code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package goggles.macros.lex
sealed trait Fragment {
def offset: Int
}
object Fragment {
case class Verbatim(text: String, offset: Int) extends Fragment
case class Argument(offset: Int) extends Fragment
} | kenbot/goggles | macros/src/main/scala/goggles/macros/lex/Fragment.scala | Scala | mit | 211 |
/*
* Copyright (c) 2017. Yuriy Stul
*/
package com.stulsoft.pquartz.examples
import com.typesafe.scalalogging.LazyLogging
import org.quartz.JobBuilder._
import org.quartz.SimpleScheduleBuilder._
import org.quartz.TriggerBuilder._
import org.quartz._
import org.quartz.impl.StdSchedulerFactory
class MyJob1 extends Job with LazyLogging {
override def execute(context: JobExecutionContext): Unit = logger.info("Hello World! MyJob1 is executing.")
}
/**
* Simple schedule
*
* @author Yuriy Stul
*/
object Example1 extends App with LazyLogging {
// Grab the Scheduler instance from the Factory
val scheduler = StdSchedulerFactory.getDefaultScheduler
// define the job and tie it to our MyJob class
val job = newJob(classOf[MyJob1]).withIdentity("job1", "group1").build
// Trigger the job to run now, and then repeat every 40 seconds
val trigger = newTrigger()
.withIdentity("trigger1", "group1")
.startNow()
.withSchedule(simpleSchedule()
.withIntervalInSeconds(40) // Every 40 seconds from now
.repeatForever())
.build()
// Tell quartz to schedule the job using our trigger
scheduler.scheduleJob(job, trigger)
// Start it off
scheduler.start()
}
| ysden123/poc | pquartz/src/main/scala/com/stulsoft/pquartz/examples/Example1.scala | Scala | mit | 1,214 |
package coursier.parse
import coursier.core.{Module, ModuleName}
import dataclass.data
sealed abstract class JavaOrScalaModule extends Product with Serializable {
def attributes: Map[String, String]
def module(scalaBinaryVersion: String, scalaVersion: String): Module
final def module(scalaVersion: String): Module = {
val sbv = JavaOrScalaModule.scalaBinaryVersion(scalaVersion)
module(sbv, scalaVersion)
}
}
object JavaOrScalaModule {
def scalaBinaryVersion(scalaVersion: String): String =
if (scalaVersion.contains("-M") || scalaVersion.contains("-RC"))
scalaVersion
else
scalaVersion.split('.').take(2).mkString(".")
@data class JavaModule(module: Module) extends JavaOrScalaModule {
def attributes: Map[String, String] = module.attributes
override def toString =
module.toString
def module(scalaBinaryVersion: String, scalaVersion: String): Module =
module
}
@data class ScalaModule(
baseModule: Module,
fullCrossVersion: Boolean
) extends JavaOrScalaModule {
def attributes: Map[String, String] = baseModule.attributes
override def toString = {
val sep = if (fullCrossVersion) ":::" else "::"
s"${baseModule.organization.value}$sep${baseModule.nameWithAttributes}"
}
def module(scalaBinaryVersion: String, scalaVersion: String): Module = {
val scalaSuffix =
if (fullCrossVersion) "_" + scalaVersion
else "_" + scalaBinaryVersion
val newName = baseModule.name.value + scalaSuffix
baseModule.withName(ModuleName(newName))
}
}
}
| alexarchambault/coursier | modules/coursier/shared/src/main/scala/coursier/parse/JavaOrScalaModule.scala | Scala | apache-2.0 | 1,587 |
package io.chapbook.util.fmt
import org.apache.commons.codec.binary.Base64
/**
* Wrappers for Base64 functions.
*/
object B64 {
def encode(str: String) =
Base64.encodeBase64URLSafeString(str.getBytes("utf-8"))
def encode(str: Array[Byte]) =
Base64.encodeBase64URLSafeString(str)
def decode(str: String) =
new String(Base64.decodeBase64(str))
def decodeBytes(str: String) =
Base64.decodeBase64(str)
def isValid(str: String) =
Base64.isBase64(str)
} | mcmathja/jwt | src/main/scala/io/chapbook/util/fmt/B64.scala | Scala | mit | 486 |
import leon.collection._
import leon.collection.List
import leon.lang._
import leon.proof.check
import leon.lang.synthesis._
import scala.language.postfixOps
/** The Concurrency object defines the semantics of concurrent programs.
*
* It gives the definition of libraries, and gives a function
* isLibraryExecution which describes valid executions of the library.
* We show the files AtomicStack and TreiberStack how to instantiate it in
* order to describe specific implementations.
*/
object Concurrency {
/** The class Method gives a syntax to define a method of a library.
*
* A Method is a tuple (initials,transitions,finals) where:
* "initials" gives the initial state of the method depending on the argument
* "transitions" in transition relation, which specifies how local and global
* states are updated
* "finals" gives the final states, and the corresponding return value
* a state mapped to None means it's not final and the method cannot return here
*
* ArgData is the type of arguments values, given to the method
* RetData is the type of the values returned by the method
* LocalState is the type representing the local variables and control-flow positions
* of the method
*/
case class Method[ArgData,RetData,LocalState,GlobalState](
initials: ArgData => LocalState,
transitions: (LocalState,GlobalState) => (LocalState,GlobalState),
finals: LocalState => Option[RetData]
)
/** A Library associates to each method name a Method instance */
case class Library[MethodName,ArgData,RetData,LocalState,GlobalState](
methods: MethodName => Method[ArgData,RetData,LocalState,GlobalState]
)
/** The Event class represents low-level events.
*
* Each event is executed by a particular thread (type Tid).
* An event can be a call event. In which case, the event has information
* about the method `m' called, and the argument `arg' with which m was
* called.
*
* An event can be a return event. In which case, the event the same
* information than the corresponding call event, plus the return
* value `rv' (in RetData) which was returned.
*
* Otherwise, an event is an internal event (inside a method).
*/
abstract class Event[Tid,MethodName,ArgData,RetData]
case class CallEvent[Tid,MethodName,ArgData,RetData]
(tid: Tid, m: MethodName, arg: ArgData) extends Event[Tid,MethodName,ArgData,RetData]
case class RetEvent[Tid,MethodName,ArgData,RetData]
(tid: Tid, m: MethodName, arg: ArgData, rv: RetData) extends Event[Tid,MethodName,ArgData,RetData]
case class InternalEvent[Tid,MethodName,ArgData,RetData]
(tid: Tid) extends Event[Tid,MethodName,ArgData,RetData]
/** The Configuration class describes the whole state of a concurrent system.
*
* More precisely, it is a pair composed of a global state, and a map giving
* for each thread, the local state of the method in which the thread is.
* The map also stores information about the method name and the argument
* value with which the method was called.
* A thread mapped to None means that the thread is not currently calling
* any method.
*
* Intuitively, the global state represents the valuation of the global
* variables which are shared between the different methods. For programs
* which can use memory allocation, it should also represent the heap.
*/
case class Configuration[Tid,MethodName,ArgData,LocalState,GlobalState](
gs: GlobalState,
control: List[(Tid,Option[(MethodName,ArgData,LocalState)])]
)
/** This class describes client's of a library.
*
* A client can be composed of multiple thread. It specifies for each
* thread, the sequence of calls made to the library, with the expected*
* return values.
*/
case class Client[Tid,MethodName,ArgData,RetData](threads: Tid => List[Event[Tid,MethodName,ArgData,RetData]])
}
object AtomicStack {
import Concurrency._
/** Represents the states of the control-flow graph of the push and pop
* methods.
*/
abstract class StackState
case class ValueState(v: BigInt) extends StackState
case class EmptyState() extends StackState
case class InitState() extends StackState
case class FinalState() extends StackState
case class BlockState() extends StackState
abstract class StackTid
case class T1() extends StackTid
case class T2() extends StackTid
/** We now describe the Atomic Stack library.
*
* The arguments taken by push and pop are of type Option[BigInt].
* Typically the pop method won't take an argument (None), while
* push will take a BigInt argument (Some[BigInt]).
*
* Similarly, the type of return values is also Option[BigInt].
*
*/
def initialsPush(arg: Option[BigInt]): StackState = arg match {
case None() => BlockState()
case Some(arg) => ValueState(arg)
}
def transitionsPush(ls: StackState, gs: List[BigInt]): (StackState,List[BigInt]) = (ls,gs) match {
case (ValueState(arg),_) => (FinalState(), arg :: gs)
case _ => (BlockState(), gs)
}
def finalsPush(ls: StackState): Option[Option[BigInt]] = ls match {
case FinalState() => Some(None())
case _ => None()
}
val PushMethod: Method[Option[BigInt],Option[BigInt],StackState,List[BigInt]] = {
Method(initialsPush,transitionsPush,finalsPush)
}
def initialsPop(arg: Option[BigInt]): StackState = InitState()
def transitionsPop(ls: StackState, gs: List[BigInt]): (StackState,List[BigInt]) = (ls,gs) match {
case (InitState(),Nil()) => (EmptyState(), Nil())
case (InitState(),Cons(rv,rvs)) => (ValueState(rv),rvs)
case _ => (BlockState(), gs)
}
def finalsPop(ls: StackState): Option[Option[BigInt]] = ls match {
case EmptyState() => Some(None())
case ValueState(arg) => Some(Some(arg))
case _ => None()
}
val PopMethod: Method[Option[BigInt],Option[BigInt],StackState,List[BigInt]] = {
Method(initialsPop,transitionsPop,finalsPop)
}
abstract class StackMethodName
case class Push() extends StackMethodName
case class Pop() extends StackMethodName
def methods(name: StackMethodName): Method[Option[BigInt],Option[BigInt],StackState,List[BigInt]] = name match {
case Push() => PushMethod
case Pop() => PopMethod
}
val libAtomicStack = Library[StackMethodName,Option[BigInt],Option[BigInt],StackState,List[BigInt]](methods)
def threads(tid: StackTid): List[Event[StackTid,StackMethodName,Option[BigInt],Option[BigInt]]] = tid match {
case T1() =>
List(
CallEvent(T1(),Push(),Some(5)),
RetEvent(T1(),Push(),Some(5),None())
)
case T2() =>
List(
CallEvent(T2(),Pop(),None()),
RetEvent(T2(),Pop(),None(),Some(5))
)
}
val client: Client[StackTid,StackMethodName,Option[BigInt],Option[BigInt]] = Client(threads)
def threadToStringSimplest(p: StackTid): String = {
???[String]
} ensuring {
res => (p, res) passes {
case T1()
=>
"T1: call Push(5)"
}
}
def threadToStringSimple0(p: Event[StackTid,StackMethodName,Option[BigInt],Option[BigInt]]): String = {
???[String]
} ensuring {
res => (p, res) passes {
case CallEvent(T1(), Push(), Some(BigInt(5)))
=>
"T1: call Push(5)"
}
}
def threadToStringSimple1(p: List[Event[StackTid,StackMethodName,Option[BigInt],Option[BigInt]]]): String = {
???[String]
} ensuring {
res => (p, res) passes {
case Cons(CallEvent(T1(), Push(), Some(BigInt(5))),
Cons(InternalEvent(T1()), Nil()))
=>
"T1: call Push(5)\\nT1: internal"
}
}
def threadToStringSimple2(p: List[Event[StackTid,StackMethodName,Option[BigInt],Option[BigInt]]]): String = {
???[String]
} ensuring {
res => (p, res) passes {
case Cons(RetEvent(T1(), Push(), Some(BigInt(5)), None()),
Cons(InternalEvent(T2()),
Cons(RetEvent(T2(), Pop(), None(), Some(BigInt(5))), Nil())))
=>
"T1: ret Push(5)\\nT2: internal\\nT2: ret Pop() -> 5"
}
}
/** This is THE method we want to render */
def threadToString(p: List[Event[StackTid,StackMethodName,Option[BigInt],Option[BigInt]]]): String = {
???[String]
} ensuring {
res => (p, res) passes {
case Cons(CallEvent(T1(), Push(), Some(BigInt(5))),
Cons(InternalEvent(T1()),
Cons(CallEvent(T2(), Pop(), None()),
Cons(RetEvent(T1(), Push(), Some(BigInt(5)), None()),
Cons(InternalEvent(T2()),
Cons(RetEvent(T2(), Pop(), None(), Some(BigInt(5))), Nil()))))))
=>
"T1: call Push(5)\\nT1: internal\\nT2: call Pop()\\nT1: ret Push(5)\\nT2: internal\\nT2: ret Pop() -> 5"
}
}
// Warning: Spacing differs from records to records.
// Warning: The displaying of a tuple might depend on its operands.
def configurationToString(p: List[Configuration[StackTid, StackMethodName, Option[BigInt], StackState, List[BigInt]]]): String = {
???[String]
} ensuring {
res => (p, res) passes {
case Cons(Configuration(Nil(), Cons((T1(), Some((Push(), Some(BigInt(5)), ValueState(BigInt(5))))), Nil())),
Cons(Configuration(Cons(BigInt(5), Nil()), Cons((T1(), Some((Push(), Some(BigInt(5)), FinalState()))), Nil())),
Cons(Configuration(Cons(BigInt(5), Nil()), Cons((T2(), Some((Pop(), None(), InitState()))), Cons((T1(), Some((Push(), Some(BigInt(5)), FinalState()))), Nil()))),
Cons(Configuration(Cons(BigInt(5), Nil()), Cons((T2(), Some((Pop(), None(), InitState()))), Cons((T1(), None()), Nil()))),
Cons(Configuration(Nil(), Cons((T2(), Some((Pop(), None(), ValueState(BigInt(5))))), Cons((T1(), None()), Nil()))),
Cons(Configuration(Nil(), Cons((T2(), None()), Cons((T1(), None()), Nil()))), Nil())))))) =>
"""([], {
T1 -> Push(5): ValueState(5)
})
([5], {
T1 -> Push(5): FinalState
})
([5], {
T2 -> Pop(): InitState;
T1 -> Push(5): FinalState
})
([5], {
T2 -> Pop(): InitState;
T1 -> idle
})
([], {
T2 -> Pop(): ValueState(5);
T1 -> idle
})
([], {
T2 -> idle;
T1 -> idle
})"""
}
}
/*
/// Out of order configurationToString
def configurationToStringOOO(p: List[Configuration[StackTid, StackMethodName, Option[BigInt], StackState, List[BigInt]]]): String = {
???[String]
} ensuring {
res => (p, res) passes {
case Cons(Configuration(Nil(), Map(T1() -> Some((Push(), Some(BigInt(5)), ValueState(BigInt(5)))))),
Cons(Configuration(Cons(BigInt(5), Nil()), Map(T1() -> Some((Push(), Some(BigInt(5)), FinalState())))),
Cons(Configuration(Cons(BigInt(5), Nil()), Map(T2() -> Some((Pop(), None(), InitState())), T1() -> Some((Push(), Some(BigInt(5)), FinalState())))),
Cons(Configuration(Cons(BigInt(5), Nil()), Map(T2() -> Some((Pop(), None(), InitState())), T1() -> None())),
Cons(Configuration(Nil(), Map(T2() -> Some((Pop(), None(), ValueState(BigInt(5)))), T1() -> None())),
Cons(Configuration(Nil(), Map(T2() -> None(), T1() -> None())), Nil())))))) =>
"""([], {
T1 -> ValueState(5) in Push(5)
})
([5], {
T1 -> FinalState in Push(5)
})
([5], {
T2 -> InitState in Pop();
T1 -> FinalState in Push(5)
})
([5], {
T2 -> InitState in Pop();
T1 -> idle
})
([], {
T2 -> ValueState(5) in Pop();
T1 -> idle
})
([], {
T2 -> idle;
T1 -> idle
})"""
}
}*/
}
| regb/leon | testcases/stringrender/Example-Stack.scala | Scala | gpl-3.0 | 11,603 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.scala
import java.util.{UUID, List => JList, Map => JMap}
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.cep.pattern.{Pattern => JPattern}
import org.apache.flink.cep.scala.pattern.Pattern
import org.apache.flink.cep.{EventComparator, PatternFlatSelectFunction, PatternFlatTimeoutFunction, PatternSelectFunction, PatternTimeoutFunction, PatternStream => JPatternStream}
import org.apache.flink.streaming.api.scala.{asScalaStream, _}
import org.apache.flink.util.Collector
import org.apache.flink.cep.operator.CEPOperatorUtils
import org.apache.flink.cep.scala.pattern.Pattern
import scala.collection.Map
/**
* Stream abstraction for CEP pattern detection. A pattern stream is a stream which emits detected
* pattern sequences as a map of events associated with their names. The pattern is detected using
* a [[org.apache.flink.cep.nfa.NFA]]. In order to process the detected sequences, the user has to
* specify a [[PatternSelectFunction]] or a [[PatternFlatSelectFunction]].
*
* @param jPatternStream Underlying pattern stream from Java API
* @tparam T Type of the events
*/
class PatternStream[T](jPatternStream: JPatternStream[T]) {
private[flink] def wrappedPatternStream = jPatternStream
def getPattern: Pattern[T, T] = Pattern(jPatternStream.getPattern.asInstanceOf[JPattern[T, T]])
def getInputStream: DataStream[T] = asScalaStream(jPatternStream.getInputStream)
def getComparator: EventComparator[T] = jPatternStream.getComparator
/**
* Applies a select function to the detected pattern sequence. For each pattern sequence the
* provided [[PatternSelectFunction]] is called. The pattern select function can produce
* exactly one resulting element.
*
* @param patternSelectFunction The pattern select function which is called for each detected
* pattern sequence.
* @tparam R Type of the resulting elements
* @return [[DataStream]] which contains the resulting elements from the pattern select function.
*/
def select[R: TypeInformation](patternSelectFunction: PatternSelectFunction[T, R])
: DataStream[R] = {
asScalaStream(jPatternStream.select(patternSelectFunction, implicitly[TypeInformation[R]]))
}
/**
* Applies a select function to the detected pattern sequence. For each pattern sequence the
* provided [[PatternSelectFunction]] is called. The pattern select function can produce
* exactly one resulting element.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternTimeoutFunction]] is called. The pattern
* timeout function has to produce exactly one resulting timeout event.
*
* The resulting event and the resulting timeout event are wrapped in an [[Either]] instance.
*
* @param patternTimeoutFunction The pattern timeout function which is called for each partial
* pattern sequence which has timed out.
* @param patternSelectFunction The pattern select function which is called for each detected
* pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @deprecated Use the version that returns timeouted events as a side-output
* @return Data stream of either type which contains the resulting events and resulting timeout
* events.
*/
@deprecated
def select[L: TypeInformation, R: TypeInformation](
patternTimeoutFunction: PatternTimeoutFunction[T, L],
patternSelectFunction: PatternSelectFunction[T, R])
: DataStream[Either[L, R]] = {
val outputTag = OutputTag[L](UUID.randomUUID().toString)
val mainStream = select(outputTag, patternTimeoutFunction, patternSelectFunction)
mainStream.connect(mainStream.getSideOutput[L](outputTag)).map(r => Right(r), l => Left(l))
}
/**
* Applies a select function to the detected pattern sequence. For each pattern sequence the
* provided [[PatternSelectFunction]] is called. The pattern select function can produce
* exactly one resulting element.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternTimeoutFunction]] is called. The pattern
* timeout function has to produce exactly one resulting timeout event.
*
* You can get the stream of timeouted matches using [[DataStream.getSideOutput()]] on the
* [[DataStream]] resulting from the windowed operation with the same [[OutputTag]].
*
* @param outputTag [[OutputTag]] that identifies side output with timeouted patterns
* @param patternTimeoutFunction The pattern timeout function which is called for each partial
* pattern sequence which has timed out.
* @param patternSelectFunction The pattern select function which is called for each detected
* pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @return Data stream which contains the resulting elements with the resulting timeout elements
* in a side output.
*/
def select[L: TypeInformation, R: TypeInformation](
outputTag: OutputTag[L],
patternTimeoutFunction: PatternTimeoutFunction[T, L],
patternSelectFunction: PatternSelectFunction[T, R])
: DataStream[R] = {
val cleanedSelect = cleanClosure(patternSelectFunction)
val cleanedTimeout = cleanClosure(patternTimeoutFunction)
asScalaStream(
jPatternStream
.select(outputTag, cleanedTimeout, implicitly[TypeInformation[R]], cleanedSelect)
)
}
/**
* Applies a flat select function to the detected pattern sequence. For each pattern sequence
* the provided [[PatternFlatSelectFunction]] is called. The pattern flat select function can
* produce an arbitrary number of resulting elements.
*
* @param patternFlatSelectFunction The pattern flat select function which is called for each
* detected pattern sequence.
* @tparam R Type of the resulting elements
* @return [[DataStream]] which contains the resulting elements from the pattern flat select
* function.
*/
def flatSelect[R: TypeInformation](patternFlatSelectFunction: PatternFlatSelectFunction[T, R])
: DataStream[R] = {
asScalaStream(jPatternStream
.flatSelect(patternFlatSelectFunction, implicitly[TypeInformation[R]]))
}
/**
* Applies a flat select function to the detected pattern sequence. For each pattern sequence
* the provided [[PatternFlatSelectFunction]] is called. The pattern flat select function can
* produce an arbitrary number of resulting elements.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternFlatTimeoutFunction]] is called. The
* pattern timeout function can produce an arbitrary number of resulting timeout events.
*
* The resulting event and the resulting timeout event are wrapped in an [[Either]] instance.
*
* @param patternFlatTimeoutFunction The pattern flat timeout function which is called for each
* partially matched pattern sequence which has timed out.
* @param patternFlatSelectFunction The pattern flat select function which is called for each
* detected pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @deprecated Use the version that returns timeouted events as a side-output
* @return Data stream of either type which contains the resulting events and the resulting
* timeout events wrapped in a [[Either]] type.
*/
@deprecated
def flatSelect[L: TypeInformation, R: TypeInformation](
patternFlatTimeoutFunction: PatternFlatTimeoutFunction[T, L],
patternFlatSelectFunction: PatternFlatSelectFunction[T, R])
: DataStream[Either[L, R]] = {
val outputTag = OutputTag[L]("dummy-timeouted")
val mainStream = flatSelect(outputTag, patternFlatTimeoutFunction, patternFlatSelectFunction)
mainStream.connect(mainStream.getSideOutput[L](outputTag)).map(r => Right(r), l => Left(l))
}
/**
* Applies a flat select function to the detected pattern sequence. For each pattern sequence
* the provided [[PatternFlatSelectFunction]] is called. The pattern flat select function can
* produce an arbitrary number of resulting elements.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternFlatTimeoutFunction]] is called. The
* pattern timeout function can produce an arbitrary number of resulting timeout events.
*
* You can get the stream of timeouted matches using [[DataStream.getSideOutput()]] on the
* [[DataStream]] resulting from the windowed operation with the same [[OutputTag]].
*
* @param outputTag [[OutputTag]] that identifies side output with timeouted patterns
* @param patternFlatTimeoutFunction The pattern flat timeout function which is called for each
* partially matched pattern sequence which has timed out.
* @param patternFlatSelectFunction The pattern flat select function which is called for each
* detected pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @return Data stream which contains the resulting elements with the resulting timeout elements
* in a side output.
*/
def flatSelect[L: TypeInformation, R: TypeInformation](
outputTag: OutputTag[L],
patternFlatTimeoutFunction: PatternFlatTimeoutFunction[T, L],
patternFlatSelectFunction: PatternFlatSelectFunction[T, R])
: DataStream[R] = {
val cleanedSelect = cleanClosure(patternFlatSelectFunction)
val cleanedTimeout = cleanClosure(patternFlatTimeoutFunction)
asScalaStream(
jPatternStream.flatSelect(
outputTag,
cleanedTimeout,
implicitly[TypeInformation[R]],
cleanedSelect))
}
/**
* Applies a select function to the detected pattern sequence. For each pattern sequence the
* provided [[PatternSelectFunction]] is called. The pattern select function can produce exactly
* one resulting element.
*
* @param patternSelectFun The pattern select function which is called for each detected
* pattern sequence.
* @tparam R Type of the resulting elements
* @return [[DataStream]] which contains the resulting elements from the pattern select function.
*/
def select[R: TypeInformation](patternSelectFun: Map[String, Iterable[T]] => R): DataStream[R] = {
val cleanFun = cleanClosure(patternSelectFun)
val patternSelectFunction: PatternSelectFunction[T, R] = new PatternSelectFunction[T, R] {
def select(in: JMap[String, JList[T]]): R = cleanFun(mapToScala(in))
}
select(patternSelectFunction)
}
/**
* Applies a select function to the detected pattern sequence. For each pattern sequence the
* provided [[PatternSelectFunction]] is called. The pattern select function can produce
* exactly one resulting element.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternTimeoutFunction]] is called. The pattern
* timeout function has to produce exactly one resulting element.
*
* The resulting event and the resulting timeout event are wrapped in an [[Either]] instance.
*
* @param patternTimeoutFunction The pattern timeout function which is called for each partial
* pattern sequence which has timed out.
* @param patternSelectFunction The pattern select function which is called for each detected
* pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @deprecated Use the version that returns timeouted events as a side-output
* @return Data stream of either type which contain the resulting events and resulting timeout
* events.
*/
@deprecated
def select[L: TypeInformation, R: TypeInformation](
patternTimeoutFunction: (Map[String, Iterable[T]], Long) => L) (
patternSelectFunction: Map[String, Iterable[T]] => R)
: DataStream[Either[L, R]] = {
val cleanSelectFun = cleanClosure(patternSelectFunction)
val cleanTimeoutFun = cleanClosure(patternTimeoutFunction)
val patternSelectFun = new PatternSelectFunction[T, R] {
override def select(pattern: JMap[String, JList[T]]): R =
cleanSelectFun(mapToScala(pattern))
}
val patternTimeoutFun = new PatternTimeoutFunction[T, L] {
override def timeout(pattern: JMap[String, JList[T]], timeoutTimestamp: Long): L =
cleanTimeoutFun(mapToScala(pattern), timeoutTimestamp)
}
select(patternTimeoutFun, patternSelectFun)
}
/**
* Applies a select function to the detected pattern sequence. For each pattern sequence the
* provided [[PatternSelectFunction]] is called. The pattern select function can produce
* exactly one resulting element.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternTimeoutFunction]] is called. The pattern
* timeout function has to produce exactly one resulting element.
*
* You can get the stream of timeouted matches using [[DataStream.getSideOutput()]] on the
* [[DataStream]] resulting from the windowed operation with the same [[OutputTag]].
*
* @param outputTag [[OutputTag]] that identifies side output with timeouted patterns
* @param patternTimeoutFunction The pattern timeout function which is called for each partial
* pattern sequence which has timed out.
* @param patternSelectFunction The pattern select function which is called for each detected
* pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @return Data stream of either type which contain the resulting events and resulting timeout
* events.
*/
def select[L: TypeInformation, R: TypeInformation](outputTag: OutputTag[L])(
patternTimeoutFunction: (Map[String, Iterable[T]], Long) => L) (
patternSelectFunction: Map[String, Iterable[T]] => R)
: DataStream[R] = {
val cleanSelectFun = cleanClosure(patternSelectFunction)
val cleanTimeoutFun = cleanClosure(patternTimeoutFunction)
val patternSelectFun = new PatternSelectFunction[T, R] {
override def select(pattern: JMap[String, JList[T]]): R =
cleanSelectFun(mapToScala(pattern))
}
val patternTimeoutFun = new PatternTimeoutFunction[T, L] {
override def timeout(pattern: JMap[String, JList[T]], timeoutTimestamp: Long): L =
cleanTimeoutFun(mapToScala(pattern), timeoutTimestamp)
}
select(outputTag, patternTimeoutFun, patternSelectFun)
}
/**
* Applies a flat select function to the detected pattern sequence. For each pattern sequence
* the provided [[PatternFlatSelectFunction]] is called. The pattern flat select function
* can produce an arbitrary number of resulting elements.
*
* @param patternFlatSelectFun The pattern flat select function which is called for each
* detected pattern sequence.
* @tparam R Type of the resulting elements
* @return [[DataStream]] which contains the resulting elements from the pattern flat select
* function.
*/
def flatSelect[R: TypeInformation](patternFlatSelectFun: (Map[String, Iterable[T]],
Collector[R]) => Unit): DataStream[R] = {
val cleanFun = cleanClosure(patternFlatSelectFun)
val patternFlatSelectFunction: PatternFlatSelectFunction[T, R] =
new PatternFlatSelectFunction[T, R] {
def flatSelect(pattern: JMap[String, JList[T]], out: Collector[R]): Unit =
cleanFun(mapToScala(pattern), out)
}
flatSelect(patternFlatSelectFunction)
}
/**
* Applies a flat select function to the detected pattern sequence. For each pattern sequence
* the provided [[PatternFlatSelectFunction]] is called. The pattern flat select function can
* produce an arbitrary number of resulting elements.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternFlatTimeoutFunction]] is called. The
* pattern timeout function can produce an arbitrary number of resulting timeout events.
*
* The resulting event and the resulting timeout event are wrapped in an [[Either]] instance.
*
* @param patternFlatTimeoutFunction The pattern flat timeout function which is called for each
* partially matched pattern sequence which has timed out.
* @param patternFlatSelectFunction The pattern flat select function which is called for each
* detected pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @deprecated Use the version that returns timeouted events as a side-output
* @return Data stream of either type which contains the resulting events and the resulting
* timeout events wrapped in a [[Either]] type.
*/
@deprecated
def flatSelect[L: TypeInformation, R: TypeInformation](
patternFlatTimeoutFunction: (Map[String, Iterable[T]], Long, Collector[L]) => Unit) (
patternFlatSelectFunction: (Map[String, Iterable[T]], Collector[R]) => Unit)
: DataStream[Either[L, R]] = {
val cleanSelectFun = cleanClosure(patternFlatSelectFunction)
val cleanTimeoutFun = cleanClosure(patternFlatTimeoutFunction)
val patternFlatSelectFun = new PatternFlatSelectFunction[T, R] {
override def flatSelect(pattern: JMap[String, JList[T]], out: Collector[R]): Unit =
cleanSelectFun(mapToScala(pattern), out)
}
val patternFlatTimeoutFun = new PatternFlatTimeoutFunction[T, L] {
override def timeout(
pattern: JMap[String, JList[T]],
timeoutTimestamp: Long, out: Collector[L])
: Unit = {
cleanTimeoutFun(mapToScala(pattern), timeoutTimestamp, out)
}
}
flatSelect(patternFlatTimeoutFun, patternFlatSelectFun)
}
/**
* Applies a flat select function to the detected pattern sequence. For each pattern sequence
* the provided [[PatternFlatSelectFunction]] is called. The pattern flat select function can
* produce an arbitrary number of resulting elements.
*
* Additionally a timeout function is applied to partial event patterns which have timed out. For
* each partial pattern sequence the provided [[PatternFlatTimeoutFunction]] is called. The
* pattern timeout function can produce an arbitrary number of resulting timeout events.
*
* You can get the stream of timeouted matches using [[DataStream.getSideOutput()]] on the
* [[DataStream]] resulting from the windowed operation with the same [[OutputTag]].
*
* @param outputTag [[OutputTag]] that identifies side output with timeouted patterns
* @param patternFlatTimeoutFunction The pattern flat timeout function which is called for each
* partially matched pattern sequence which has timed out.
* @param patternFlatSelectFunction The pattern flat select function which is called for each
* detected pattern sequence.
* @tparam L Type of the resulting timeout event
* @tparam R Type of the resulting event
* @return Data stream of either type which contains the resulting events and the resulting
* timeout events wrapped in a [[Either]] type.
*/
def flatSelect[L: TypeInformation, R: TypeInformation](outputTag: OutputTag[L])(
patternFlatTimeoutFunction: (Map[String, Iterable[T]], Long, Collector[L]) => Unit) (
patternFlatSelectFunction: (Map[String, Iterable[T]], Collector[R]) => Unit)
: DataStream[R] = {
val cleanSelectFun = cleanClosure(patternFlatSelectFunction)
val cleanTimeoutFun = cleanClosure(patternFlatTimeoutFunction)
val patternFlatSelectFun = new PatternFlatSelectFunction[T, R] {
override def flatSelect(pattern: JMap[String, JList[T]], out: Collector[R]): Unit =
cleanSelectFun(mapToScala(pattern), out)
}
val patternFlatTimeoutFun = new PatternFlatTimeoutFunction[T, L] {
override def timeout(
pattern: JMap[String, JList[T]],
timeoutTimestamp: Long, out: Collector[L])
: Unit = {
cleanTimeoutFun(mapToScala(pattern), timeoutTimestamp, out)
}
}
flatSelect(outputTag, patternFlatTimeoutFun, patternFlatSelectFun)
}
def sideOutputLateData(lateDataOutputTag: OutputTag[T]): PatternStream[T] = {
jPatternStream.sideOutputLateData(lateDataOutputTag)
this
}
}
object PatternStream {
/**
*
* @param jPatternStream Underlying pattern stream from Java API
* @tparam T Type of the events
* @return A new pattern stream wrapping the pattern stream from Java APU
*/
def apply[T](jPatternStream: JPatternStream[T]): PatternStream[T] = {
new PatternStream[T](jPatternStream)
}
}
| mylog00/flink | flink-libraries/flink-cep-scala/src/main/scala/org/apache/flink/cep/scala/PatternStream.scala | Scala | apache-2.0 | 22,756 |
package io.hydrosphere.mist.api.ml.preprocessors
import io.hydrosphere.mist.api.ml._
import org.apache.spark.ml.feature.Word2VecModel
import org.apache.spark.ml.linalg.{DenseVector, Vectors}
import org.apache.spark.mllib.feature.{Word2VecModel => OldWord2VecModel}
class LocalWord2VecModel(override val sparkTransformer: Word2VecModel) extends LocalTransformer[Word2VecModel] {
lazy val parent: OldWord2VecModel = {
val field = sparkTransformer.getClass.getDeclaredField("org$apache$spark$ml$feature$Word2VecModel$$wordVectors")
field.setAccessible(true)
field.get(sparkTransformer).asInstanceOf[OldWord2VecModel]
}
private def axpy(a: Double, x: Array[Double], y: Array[Double]) = {
y.zipWithIndex.foreach {
case (value, index) =>
y.update(index, x(index)*a + value)
}
}
private def scal(a: Double, v: Array[Double]) = {
v.zipWithIndex.foreach{
case (value, index) =>
v.update(index, value * a)
}
}
override def transform(localData: LocalData): LocalData = {
localData.column(sparkTransformer.getInputCol) match {
case Some(column) =>
val data = if (column.data.isEmpty) {
List(Array.fill(sparkTransformer.getVectorSize){0.0})
} else {
val vectors = parent.getVectors
.mapValues(v => Vectors.dense(v.map(_.toDouble)))
val sum = Array.fill(sparkTransformer.getVectorSize){0.0}
column.data.map(_.asInstanceOf[String]).foreach { word =>
vectors.get(word).foreach { vec =>
axpy(1.0, vec.toDense.values, sum)
}
}
scal(1.0 / column.data.length, sum)
List(sum)
}
val newColumn = LocalDataColumn(sparkTransformer.getOutputCol, data)
localData.withColumn(newColumn)
case None => localData
}
}
}
object LocalWord2VecModel extends LocalModel[Word2VecModel] {
override def load(metadata: Metadata, data: Map[String, Any]): Word2VecModel = {
val wordVectors = data("wordVectors").asInstanceOf[List[Float]].toArray
val wordIndex = data("wordIndex").asInstanceOf[Map[String, Int]]
val oldCtor = classOf[OldWord2VecModel].getConstructor(classOf[Map[String, Int]], classOf[Array[Float]])
oldCtor.setAccessible(true)
val oldWord2VecModel = oldCtor.newInstance(wordIndex, wordVectors)
val ctor = classOf[Word2VecModel].getConstructor(classOf[String], classOf[OldWord2VecModel])
ctor.setAccessible(true)
val inst = ctor.newInstance(metadata.uid, oldWord2VecModel)
.setInputCol(metadata.paramMap("inputCol").toString)
.setOutputCol(metadata.paramMap("outputCol").toString)
inst
.set(inst.maxIter, metadata.paramMap("maxIter").asInstanceOf[Number].intValue())
.set(inst.seed, metadata.paramMap("seed").toString.toLong) // FIXME why seed is converted to int?
.set(inst.numPartitions, metadata.paramMap("numPartitions").asInstanceOf[Number].intValue())
.set(inst.stepSize, metadata.paramMap("stepSize").asInstanceOf[Double])
.set(inst.maxSentenceLength, metadata.paramMap("maxSentenceLength").asInstanceOf[Number].intValue())
.set(inst.windowSize, metadata.paramMap("windowSize").asInstanceOf[Number].intValue())
.set(inst.vectorSize, metadata.paramMap("vectorSize").asInstanceOf[Number].intValue())
}
override implicit def getTransformer(transformer: Word2VecModel): LocalTransformer[Word2VecModel] = new LocalWord2VecModel(transformer)
}
| KineticCookie/mist | mist-lib/src/main/scala-2.11/io/hydrosphere/mist/api/ml/preprocessors/LocalWord2VecModel.scala | Scala | apache-2.0 | 3,476 |
package io.vamp.persistence
import akka.actor.{ ActorRef, ActorSystem }
import akka.util.Timeout
import io.vamp.common.{ Config, Namespace }
import io.vamp.common.akka.ActorBootstrap
import scala.concurrent.{ ExecutionContext, Future }
object PersistenceBootstrap {
def databaseType()(implicit namespace: Namespace) = {
Config.string("vamp.persistence.database.type")().toLowerCase
}
def keyValueStoreType()(implicit namespace: Namespace) = {
Config.string("vamp.persistence.key-value-store.type")().toLowerCase
}
}
class PersistenceBootstrap extends ActorBootstrap {
import PersistenceBootstrap._
def createActors(implicit actorSystem: ActorSystem, namespace: Namespace, timeout: Timeout): Future[List[ActorRef]] = {
val db = databaseType()
val kv = keyValueStoreType()
val dbActor = alias[PersistenceActor](db, (`type`: String) ⇒ {
throw new RuntimeException(s"Unsupported database type: ${`type`}")
})
val kvActor = alias[KeyValueStoreActor](kv, (`type`: String) ⇒ {
throw new RuntimeException(s"Unsupported key-value store type: ${`type`}")
})
info(s"Database: $db")
info(s"Key-Value store: $kv")
implicit val ec: ExecutionContext = actorSystem.dispatcher
Future.sequence(kvActor :: dbActor :: Nil)
}
}
| dragoslav/vamp | persistence/src/main/scala/io/vamp/persistence/PersistenceBootstrap.scala | Scala | apache-2.0 | 1,298 |
package org.workcraft.graphics.formularendering
import java.awt.font.FontRenderContext
import java.awt.geom.AffineTransform
object PodgonFontRenderContext {
lazy val instance = new FontRenderContext(AffineTransform.getScaleInstance(1000, 1000), true, true)
def apply = instance
} | tuura/workcraft-2.2 | Graphics/src/main/scala/org/workcraft/graphics/formularendering/PodgonFontRenderContext.scala | Scala | gpl-3.0 | 282 |
package chap5
object Exe15 extends App {
assert(Stream(1, 2, 3).tails.map(_.toList).toList == List(List(1, 2, 3), List(2, 3), List(3), Nil))
}
| ponkotuy/FPScala | src/main/scala/chap5/Exe15.scala | Scala | unlicense | 146 |
package haru.action
import xitrum.annotation.GET
import xitrum.Action
@GET("")
class Home extends Action {
def execute() {
respondView()
}
}
@GET("/HomeMin")
class HomeMin extends Action {
def execute() {
respondView()
}
}
@GET("/ping")
class ping extends Action {
def execute() {
respondText("hello world");
}
}
| haruio/haru-admin | src/main/scala/haru/action/Index.scala | Scala | mit | 341 |
package com.cloudray.scalapress.plugin.listings
import org.scalatest.{FunSuite, OneInstancePerTest}
import org.scalatest.mock.MockitoSugar
import com.cloudray.scalapress.item.Item
import com.cloudray.scalapress.settings.{Installation, InstallationDao}
import org.mockito.{ArgumentCaptor, Matchers, Mockito}
import com.cloudray.scalapress.plugin.listings.domain.ListingPackage
import com.cloudray.scalapress.plugin.listings.email.ListingAdminNotificationService
import org.springframework.mail.{SimpleMailMessage, MailSender}
/** @author Stephen Samuel */
class ListingAdminNotificationServiceTest extends FunSuite with OneInstancePerTest with MockitoSugar {
val installationDao = mock[InstallationDao]
val sender = mock[MailSender]
val service = new ListingAdminNotificationService(sender, installationDao)
val installation = new Installation
installation.domain = "coldplay.com"
installation.adminEmail = "sammy@sammy.com"
Mockito.when(installationDao.get).thenReturn(installation)
val obj = new Item
obj.id = 34
obj.status = "Live"
obj.name = "coldplay tshirt"
val lp = new ListingPackage
lp.name = "t-shirt sale"
obj.listingPackage = lp
test("test format of message") {
service.notify(obj)
val captor = ArgumentCaptor.forClass(classOf[SimpleMailMessage])
Mockito.verify(sender).send(captor.capture)
assert(
"Hello Admin\\n\\nA new listing has been added to your site:\\ncoldplay tshirt\\n\\n" +
"The status of this listing is: [Live]\\nThe listing was added using: [t-shirt sale]\\n\\n" +
"You can edit the listing in the backoffice:\\nhttp://coldplay.com/backoffice/item/34\\n\\n" +
"Regards, Scalapress" === captor.getValue.getText)
}
test("a paid listing should show the paid warning") {
lp.fee = 100
service.notify(obj)
val captor = ArgumentCaptor.forClass(classOf[SimpleMailMessage])
Mockito.verify(sender).send(captor.capture)
assert(captor.getValue.getText.contains("This is a paid listing"))
}
test("a free listing should not show the paid warning") {
lp.fee = 0
service.notify(obj)
val captor = ArgumentCaptor.forClass(classOf[SimpleMailMessage])
Mockito.verify(sender).send(captor.capture)
assert(!captor.getValue.getText.contains("This is a paid listing"))
}
test("if admin email is not set then no email should be sent") {
installation.adminEmail = null
service.notify(obj)
Mockito.verify(sender, Mockito.never).send(Matchers.any[SimpleMailMessage])
}
}
| vidyacraghav/scalapress | src/test/scala/com/cloudray/scalapress/plugin/listings/ListingAdminNotificationServiceTest.scala | Scala | apache-2.0 | 2,511 |
package poly.util.fastloop
import poly.util.macroutil._
import scala.reflect.macros.blackbox._
import scala.language.experimental.macros
/**
* Contains macro expansions that
* - Rewrites a range foreach into a while loop;
* - Attempts to inline to loop body.
*
* @author Tongfei Chen (ctongfei@gmail.com).
* @since 0.2.2
*/
object FastLoop {
/**
* Equivalent to {{{
* for (i <- a until b by step) f(i)
* }}} (where `step` > 0) but optimized using macros.
* It normally provides 1.5x~3x speedup.
*/
def ascending[V](a: Int, b: Int, step: Int)(f: Int => V): Unit = macro ascendingMacroImpl[V]
/**
* Equivalent to {{{
* for (i <- a until b by step) f(i)
* }}} (where `step` < 0) but optimized using macros.
* It normally provides 1.5x~3x speedup.
*/
def descending[V](a: Int, b: Int, step: Int)(f: Int => V): Unit = macro descendingMacroImpl[V]
def ascendingMacroImpl[V](c: Context)(a: c.Expr[Int], b: c.Expr[Int], step: c.Expr[Int])(f: c.Expr[Int => V]): c.Expr[Unit] = {
import c.universe._
val i = TermName(c.freshName("poly$i"))
val limit = TermName(c.freshName("poly$limit"))
val tree = q"""
var $i = $a
var $limit = $b
while ($i < $limit) {
$f($i)
$i += $step
}
"""
new InlineUtil[c.type](c).inlineAndReset[Unit](tree)
}
def descendingMacroImpl[V](c: Context)(a: c.Expr[Int], b: c.Expr[Int], step: c.Expr[Int])(f: c.Expr[Int => V]): c.Expr[Unit] = {
import c.universe._
val i = TermName(c.freshName("poly$i"))
val limit = TermName(c.freshName("poly$limit"))
val tree = q"""
var $i = $a
var $limit = $b
while ($i > $limit) {
$f($i)
$i += $step
}
"""
new InlineUtil[c.type](c).inlineAndReset[Unit](tree)
}
}
| ctongfei/poly-util | src/main/scala/poly/util/fastloop/FastLoop.scala | Scala | mit | 1,805 |
package io.skysail.app.wyt.services
import io.skysail.repo.orientdb.ScalaDbService
import io.skysail.core.model.ApplicationModel
import io.skysail.app.wyt.repository.WytRepository
import org.json4s.DefaultFormats
import io.skysail.app.wyt.domain.Pact
import scala.util.Try
import io.skysail.queryfilter.filter.Filter
import io.skysail.queryfilter.pagination.Pagination
import io.skysail.app.wyt.domain.Turn
class PactService(dbService: ScalaDbService, appModel: ApplicationModel) {
private var repo: WytRepository = new WytRepository(dbService)
private implicit val formats = DefaultFormats
private var i = 0
def create(pact: Pact): Try[Pact] = {
if (pact.turn == null) {
pact.turn = Turn("default")
}
repo.save(pact, appModel)
}
// def getById(id: String): Option[Connection] = {
// val entry = repo.findOne(id)
// if (entry.isDefined) Some(entry.get.extract[Connection]) else None
// }
//
def find(f: Filter, p: Pagination) = repo.find(f, p).map { (row => row.extract[Pact]) }.toList
def getNextTurn(pactId: String) = {
if (i % 2 == 0) {
Turn("Georgios is next")
} else {
Turn("Carsten is next")
}
}
//
// def findOne(id: String): Option[Connection] = {
// val option = repo.findOne(id)
// if (option.isDefined) Some(option.get.extract[Connection]) else None
// }
//
// def save(entity: Connection): Connection = {
// val vertex = repo.save(entity, appModel)
// // entity.setId(vertex.getId().toString())
// entity.copy(id = Some(vertex.get.id.toString()))
// }
} | evandor/skysail-notes | skysail.app.wyt/src/io/skysail/app/wyt/services/PactService.scala | Scala | apache-2.0 | 1,563 |
package org.scalaide.core.sbtbuilder
import org.eclipse.core.resources.IProject
import org.eclipse.core.runtime.IPath
import org.eclipse.jdt.core.IClasspathEntry
import org.eclipse.jdt.core.IJavaProject
import org.eclipse.jdt.core.JavaCore
import org.junit.AfterClass
import org.junit.Assert
import org.junit.BeforeClass
import org.junit.Test
import org.scalaide.core.IScalaProject
import org.scalaide.core.SdtConstants
import org.scalaide.core.testsetup.IProjectHelpers
import org.scalaide.core.testsetup.IProjectOperations
import org.scalaide.core.testsetup.SDTTestUtils.addToClasspath
import org.scalaide.core.testsetup.SDTTestUtils.buildWorkspace
import org.scalaide.core.testsetup.SDTTestUtils.createJavaProjectInWorkspace
import org.scalaide.core.testsetup.SDTTestUtils.createProjectInWorkspace
import org.scalaide.core.testsetup.SDTTestUtils.findProjectProblemMarkers
import org.scalaide.core.testsetup.SDTTestUtils.markersMessages
import org.scalaide.util.eclipse.EclipseUtils
import ScalaProjectDependedOnJavaProjectTest.projectJ
import ScalaProjectDependedOnJavaProjectTest.projectS
object ScalaProjectDependedOnJavaProjectTest extends IProjectOperations {
import org.scalaide.core.testsetup.SDTTestUtils._
private val projectJName = "scalaDependedOnJavaJ"
private val projectSName = "scalaDependedOnJavaS"
private var projectJ: IJavaProject = _
private var projectS: IScalaProject = _
private val bundleName = "org.scala-ide.sdt.core.tests"
private def withSrcOutputStructure(project: IProject, jProject: IJavaProject): Seq[IClasspathEntry] = {
val mainSourceFolder = project.getFolder("/src/main")
val mainOutputFolder = project.getFolder("/target/main")
Seq(JavaCore.newSourceEntry(
jProject.getPackageFragmentRoot(mainSourceFolder).getPath,
Array[IPath](),
jProject.getPackageFragmentRoot(mainOutputFolder).getPath))
}
@BeforeClass def setup(): Unit = {
initializeProjects(bundleName, Seq(projectJName, projectSName)) {
projectJ = createJavaProjectInWorkspace(projectJName, withSrcOutputStructure)
projectS = createProjectInWorkspace(projectSName, withSrcOutputStructure _)
addToClasspath(projectS, JavaCore.newProjectEntry(projectJ.getProject.getFullPath, false))
}
}
@AfterClass def cleanup(): Unit = {
EclipseUtils.workspaceRunnableIn(EclipseUtils.workspaceRoot.getWorkspace) { _ =>
projectS.underlying.delete( /* force = */ true, /* monitor = */ null)
projectJ.getProject.delete( /* force = */ true, /* monitor = */ null)
}
}
}
class ScalaProjectDependedOnJavaProjectTest extends IProjectOperations with IProjectHelpers {
import ScalaProjectDependedOnJavaProjectTest._
@Test def shouldCorrectlyBuildScalaProjectWhichDependsOnJavaOne(): Unit = {
givenCleanWorkspaceForProjects(projectJ, projectS)
buildWorkspace()
val errors = markersMessages(findProjectProblemMarkers(projectS, SdtConstants.ProblemMarkerId).toList)
Assert.assertTrue("no error expected: " + errors.mkString(", "), errors.isEmpty)
}
} | Kwestor/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/core/sbtbuilder/ScalaProjectDependedOnJavaProjectTest.scala | Scala | bsd-3-clause | 3,051 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import org.scalatest._
import org.scalactic.Equality
import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.prop.PropertyChecks
// SKIP-SCALATESTJS,NATIVE-START
import scala.collection.immutable.NumericRange
// SKIP-SCALATESTJS,NATIVE-END
import scala.collection.mutable.WrappedArray
import OptionValues._
import scala.util.{Failure, Success, Try}
import org.scalatest.Inspectors
import org.scalactic.{Good, Bad}
import org.scalactic.{Pass, Fail}
trait FiniteDoubleSpecSupport {
implicit val doubleEquality: Equality[Double] =
new Equality[Double] {
override def areEqual(a: Double, b: Any): Boolean =
(a, b) match {
case (a, bDouble: Double) if a.isNaN && bDouble.isNaN => true
case _ => a == b
}
}
implicit val finiteDoubleEquality: Equality[FiniteDouble] =
new Equality[FiniteDouble] {
override def areEqual(a: FiniteDouble, b: Any): Boolean =
(a, b) match {
case (a, bDouble: FiniteDouble) if a.value.isNaN && bDouble.value.isNaN => true
case _ => a == b
}
}
implicit def tryEquality[T]: Equality[Try[T]] = new Equality[Try[T]] {
override def areEqual(a: Try[T], b: Any): Boolean = a match {
case Success(double: Double) if double.isNaN => // This is because in scala.js x/0 results to NaN not ArithmetricException like in jvm, and we need to make sure Success(NaN) == Success(NaN) is true to pass the test.
b match {
case Success(bDouble: Double) if bDouble.isNaN => true
case _ => false
}
case _: Success[_] => a == b
case Failure(ex) => b match {
case _: Success[_] => false
case Failure(otherEx) => ex.getClass == otherEx.getClass && ex.getMessage == otherEx.getMessage
case _ => false
}
}
}
}
class FiniteDoubleSpec extends funspec.AnyFunSpec with matchers.should.Matchers with PropertyChecks with TypeCheckedTripleEquals with FiniteDoubleSpecSupport {
describe("A FiniteDouble") {
describe("should offer a from factory method that") {
it("returns Some[FiniteDouble] if the passed Double is finite") {
FiniteDouble.from(50.23).value.value shouldBe 50.23
FiniteDouble.from(100.0).value.value shouldBe 100.0
FiniteDouble.from(0.0).value.value shouldBe 0.0
FiniteDouble.from(-0.00001).value.value shouldBe -0.00001
FiniteDouble.from(-99.9).value.value shouldBe -99.9
FiniteDouble.from(Double.MinPositiveValue).value.value shouldBe Double.MinPositiveValue
}
it("returns None if the passed Double is infinite") {
FiniteDouble.from(Double.NegativeInfinity) shouldBe None
FiniteDouble.from(Double.PositiveInfinity) shouldBe None
FiniteDouble.from(Double.NaN) shouldBe None
}
}
describe("should offer an ensuringValid factory method that") {
it("returns FiniteDouble if the passed Double is finite") {
FiniteDouble.ensuringValid(50.23).value shouldBe 50.23
FiniteDouble.ensuringValid(100.0).value shouldBe 100.0
FiniteDouble.ensuringValid(0.0).value shouldBe 0.0
FiniteDouble.ensuringValid(-0.00001).value shouldBe -0.00001
FiniteDouble.ensuringValid(-99.9).value shouldBe -99.9
FiniteDouble.ensuringValid(Double.MinPositiveValue).value shouldBe Double.MinPositiveValue
}
it("throws AssertionError if the passed Double is infinite") {
an [AssertionError] should be thrownBy FiniteDouble.ensuringValid(Double.PositiveInfinity)
an [AssertionError] should be thrownBy FiniteDouble.ensuringValid(Double.NegativeInfinity)
an [AssertionError] should be thrownBy FiniteDouble.ensuringValid(Double.NaN)
}
}
describe("should offer a tryingValid factory method that") {
import TryValues._
it("returns a FiniteDouble wrapped in a Success if the passed FiniteDouble is finite") {
FiniteDouble.tryingValid(50.3).success.value.value shouldBe 50.3
FiniteDouble.tryingValid(100.0).success.value.value shouldBe 100.0
FiniteDouble.tryingValid(0.0).success.value.value shouldBe 0.0
FiniteDouble.tryingValid(-1.0).success.value.value shouldBe -1.0
FiniteDouble.tryingValid(-99.9).success.value.value shouldBe -99.9
FiniteDouble.tryingValid(Double.MinPositiveValue).success.value.value shouldBe Double.MinPositiveValue
}
it("returns an AssertionError wrapped in a Failure if the passed Double is infinite") {
FiniteDouble.tryingValid(Double.NegativeInfinity).failure.exception shouldBe an [AssertionError]
FiniteDouble.tryingValid(Double.PositiveInfinity).failure.exception shouldBe an [AssertionError]
FiniteDouble.tryingValid(Double.NaN).failure.exception shouldBe an [AssertionError]
}
}
describe("should offer a passOrElse factory method that") {
it("returns a Pass if the given Double is finite") {
FiniteDouble.passOrElse(50.0)(i => i) shouldBe Pass
FiniteDouble.passOrElse(100.0)(i => i) shouldBe Pass
FiniteDouble.passOrElse(0.0)(i => i) shouldBe Pass
FiniteDouble.passOrElse(-1.1)(i => i) shouldBe Pass
FiniteDouble.passOrElse(-99.0)(i => i) shouldBe Pass
FiniteDouble.passOrElse(Double.MinPositiveValue)(i => i) shouldBe Pass
}
it("returns an error value produced by passing the given Double to the given function if the passed Double is infinite, wrapped in a Fail") {
FiniteDouble.passOrElse(Double.NegativeInfinity)(i => s"$i did not taste good") shouldBe Fail("-Infinity did not taste good")
FiniteDouble.passOrElse(Double.PositiveInfinity)(i => i) shouldBe Fail(Double.PositiveInfinity)
FiniteDouble.passOrElse(Double.NaN)(i => 1.1) shouldBe Fail(1.1)
}
}
describe("should offer a goodOrElse factory method that") {
it("returns a FiniteDouble wrapped in a Good if the given Double is greater than 0") {
FiniteDouble.goodOrElse(50.3)(i => i) shouldBe Good(FiniteDouble(50.3))
FiniteDouble.goodOrElse(100.0)(i => i) shouldBe Good(FiniteDouble(100.0))
FiniteDouble.goodOrElse(0.0)(i => i) shouldBe Good(FiniteDouble(0.0))
FiniteDouble.goodOrElse(-1.1)(i => i) shouldBe Good(FiniteDouble(-1.1))
FiniteDouble.goodOrElse(-99.0)(i => i) shouldBe Good(FiniteDouble(-99.0))
// SKIP-DOTTY-START
// not constant literal
FiniteDouble.goodOrElse(Double.MinPositiveValue)(i => i) shouldBe Good(FiniteDouble(Double.MinPositiveValue))
// SKIP-DOTTY-END
}
it("returns an error value produced by passing the given Double to the given function if the passed Double is NOT greater than 0, wrapped in a Bad") {
FiniteDouble.goodOrElse(Double.NegativeInfinity)(i => s"$i did not taste good") shouldBe Bad("-Infinity did not taste good")
FiniteDouble.goodOrElse(Double.PositiveInfinity)(i => i) shouldBe Bad(Double.PositiveInfinity)
FiniteDouble.goodOrElse(Double.NaN)(i => s"$i did not taste good") shouldBe Bad("NaN did not taste good")
}
}
describe("should offer a rightOrElse factory method that") {
it("returns a FiniteDouble wrapped in a Right if the given Double is greater than 0") {
FiniteDouble.rightOrElse(50.3)(i => i) shouldBe Right(FiniteDouble(50.3))
FiniteDouble.rightOrElse(100.0)(i => i) shouldBe Right(FiniteDouble(100.0))
FiniteDouble.rightOrElse(0.0)(i => i) shouldBe Right(FiniteDouble(0.0))
FiniteDouble.rightOrElse(-1.1)(i => i) shouldBe Right(FiniteDouble(-1.1))
FiniteDouble.rightOrElse(-99.9)(i => i) shouldBe Right(FiniteDouble(-99.9))
// SKIP-DOTTY-START
// not constant literal
FiniteDouble.rightOrElse(Double.MinPositiveValue)(i => i) shouldBe Right(FiniteDouble(Double.MinPositiveValue))
// SKIP-DOTTY-END
}
it("returns an error value produced by passing the given Double to the given function if the passed Double is infinite, wrapped in a Left") {
FiniteDouble.rightOrElse(Double.NegativeInfinity)(i => s"$i did not taste good") shouldBe Left("-Infinity did not taste good")
FiniteDouble.rightOrElse(Double.PositiveInfinity)(i => i) shouldBe Left(Double.PositiveInfinity)
FiniteDouble.rightOrElse(Double.NaN)(i => s"$i did not taste good") shouldBe Left("NaN did not taste good")
}
}
describe("should offer an isValid predicate method that") {
it("returns true if the passed Double is finite") {
FiniteDouble.isValid(50.23) shouldBe true
FiniteDouble.isValid(100.0) shouldBe true
FiniteDouble.isValid(0.0) shouldBe true
FiniteDouble.isValid(-0.0) shouldBe true
FiniteDouble.isValid(-0.00001) shouldBe true
FiniteDouble.isValid(-99.9) shouldBe true
FiniteDouble.isValid(Double.NaN) shouldBe false
FiniteDouble.isValid(Double.MinPositiveValue) shouldBe true
FiniteDouble.isValid(Double.NegativeInfinity) shouldBe false
FiniteDouble.isValid(Double.PositiveInfinity) shouldBe false
}
}
describe("should offer a fromOrElse factory method that") {
it("returns a FiniteDouble if the passed Double is finite") {
FiniteDouble.fromOrElse(50.23, FiniteDouble(42.0)).value shouldBe 50.23
FiniteDouble.fromOrElse(100.0, FiniteDouble(42.0)).value shouldBe 100.0
FiniteDouble.fromOrElse(0.0, FiniteDouble(42.0)).value shouldBe 0.0
FiniteDouble.fromOrElse(-0.00001, FiniteDouble(42.0)).value shouldBe -0.00001
FiniteDouble.fromOrElse(-99.9, FiniteDouble(42.0)).value shouldBe -99.9
FiniteDouble.fromOrElse(Double.MinPositiveValue, FiniteDouble(42.0)).value shouldBe Double.MinPositiveValue
}
it("returns a given default if the passed Double is infinite") {
FiniteDouble.fromOrElse(Double.NegativeInfinity, FiniteDouble(42.0)).value shouldBe 42.0
FiniteDouble.fromOrElse(Double.PositiveInfinity, FiniteDouble(42.0)).value shouldBe 42.0
FiniteDouble.fromOrElse(Double.NaN, FiniteDouble(42.0)).value shouldBe 42.0
}
}
it("should offer MaxValue, MinValue, and MinPositiveValue factory methods") {
FiniteDouble.MaxValue shouldEqual FiniteDouble.from(Double.MaxValue).get
FiniteDouble.MinValue shouldEqual FiniteDouble.from(Double.MinValue).get
FiniteDouble.MinPositiveValue shouldEqual FiniteDouble.from(Double.MinPositiveValue).get
}
it("should not offer a PositiveInfinity factory method") {
"FiniteDouble.PositiveInfinity" shouldNot compile
}
it("should not offer a NegativeInfinity factory method") {
"FiniteDouble.NegativeInfinity" shouldNot compile
}
it("should not offer a isPosInfinity method") {
"FiniteDouble(1.0).isPosInfinity" shouldNot compile
}
it("should not offer a isNegInfinity method") {
"FiniteDouble(1.0).isNegInfinity" shouldNot compile
}
it("should be sortable") {
val xs = List(FiniteDouble(2.2), FiniteDouble(4.4), FiniteDouble(1.1),
FiniteDouble(3.3))
xs.sorted shouldEqual List(FiniteDouble(1.1), FiniteDouble(2.2), FiniteDouble(3.3),
FiniteDouble(4.4))
}
describe("when created with apply method") {
it("should compile when 8 is passed in") {
"FiniteDouble(8)" should compile
FiniteDouble(8).value shouldEqual 8.0
"FiniteDouble(8L)" should compile
FiniteDouble(8L).value shouldEqual 8.0
"FiniteDouble(8.0F)" should compile
FiniteDouble(8.0F).value shouldEqual 8.0
"FiniteDouble(8.0)" should compile
FiniteDouble(8.0).value shouldEqual 8.0
}
it("should compile when 0 is passed in") {
"FiniteDouble(0)" should compile
FiniteDouble(0).value shouldEqual 0
"FiniteDouble(0L)" should compile
FiniteDouble(0L).value shouldEqual 0.0
"FiniteDouble(0.0F)" should compile
FiniteDouble(0.0F).value shouldEqual 0.0
"FiniteDouble(0.0)" should compile
FiniteDouble(0.0).value shouldEqual 0.0
}
it("should compile when -8 is passed in") {
"FiniteDouble(-8)" should compile
FiniteDouble(-8).value shouldEqual -8.0
"FiniteDouble(-8L)" should compile
FiniteDouble(-8L).value shouldEqual -8.0
"FiniteDouble(-8.0F)" should compile
FiniteDouble(-8.0F).value shouldEqual -8.0
"FiniteDouble(-8.0)" should compile
FiniteDouble(-8.0).value shouldEqual -8.0
}
it("should not compile when Double.NegativeInfinity is passed in") {
"FiniteDouble(Double.NegativeInfinity)" shouldNot compile
}
it("should not compile when Double.PositiveInfinity is passed in") {
"FiniteDouble(Double.PositiveInfinity)" shouldNot compile
}
it("should not compile when x is passed in") {
val a: Int = -8
"FiniteDouble(a)" shouldNot compile
val b: Long = -8L
"FiniteDouble(b)" shouldNot compile
val c: Float = -8.0F
"FiniteDouble(c)" shouldNot compile
val d: Double = -8.0
"FiniteDouble(d)" shouldNot compile
}
it("should offer a unary + method that is consistent with Double") {
forAll { (pDouble: FiniteDouble) =>
(+pDouble).toDouble shouldEqual (+(pDouble.toDouble))
}
}
it("should offer a unary - method that returns another FiniteDouble") {
forAll { (pDouble: FiniteDouble) =>
(-pDouble) shouldEqual (FiniteDouble.ensuringValid(-(pDouble.toDouble)))
}
}
}
describe("when specified as a plain-old Double") {
def takesFiniteDouble(pos: FiniteDouble): Double = pos.value
it("should compile when 8 is passed in") {
"takesFiniteDouble(8)" should compile
takesFiniteDouble(8) shouldEqual 8.0
"takesFiniteDouble(8L)" should compile
takesFiniteDouble(8L) shouldEqual 8.0
"takesFiniteDouble(8.0F)" should compile
takesFiniteDouble(8.0F) shouldEqual 8.0
"takesFiniteDouble(8.0)" should compile
takesFiniteDouble(8.0) shouldEqual 8.0
}
it("should compile when 0 is passed in") {
"takesFiniteDouble(0)" should compile
takesFiniteDouble(0) shouldEqual 0.0
"takesFiniteDouble(0L)" should compile
takesFiniteDouble(0L) shouldEqual 0.0
"takesFiniteDouble(0.0F)" should compile
takesFiniteDouble(0.0F) shouldEqual 0.0
"takesFiniteDouble(0.0)" should compile
takesFiniteDouble(0.0) shouldEqual 0.0
}
it("should compile when -8 is passed in") {
"takesFiniteDouble(-8)" should compile
takesFiniteDouble(-8) shouldEqual -8.0
"takesFiniteDouble(-8L)" should compile
takesFiniteDouble(-8L) shouldEqual -8.0
"takesFiniteDouble(-8.0F)" should compile
takesFiniteDouble(-8.0F) shouldEqual -8.0
"takesFiniteDouble(-8.0)" should compile
takesFiniteDouble(-8.0) shouldEqual -8.0
}
it("should not compile when 0 is passed in") {
"takesFiniteDouble(Double.NegativeInfinity)" shouldNot compile
"takesFiniteDouble(Double.PositiveInfinity)" shouldNot compile
}
it("should not compile when x is passed in") {
val x: Int = -8
"takesFiniteDouble(x)" shouldNot compile
val b: Long = -8L
"takesFiniteDouble(b)" shouldNot compile
val c: Float = -8.0F
"takesFiniteDouble(c)" shouldNot compile
val d: Double = -8.0
"takesFiniteDouble(d)" shouldNot compile
}
}
it("should offer 'min' and 'max' methods that are consistent with Double") {
forAll { (pdouble1: FiniteDouble, pdouble2: FiniteDouble) =>
pdouble1.max(pdouble2).toDouble shouldEqual pdouble1.toDouble.max(pdouble2.toDouble)
pdouble1.min(pdouble2).toDouble shouldEqual pdouble1.toDouble.min(pdouble2.toDouble)
}
}
it("should offer an 'isWhole' method that is consistent with Double") {
forAll { (pdouble: FiniteDouble) =>
pdouble.isWhole shouldEqual pdouble.toDouble.isWhole
}
}
it("should offer 'round', 'ceil', and 'floor' methods that are consistent with Double") {
forAll { (pdouble: FiniteDouble) =>
pdouble.round.toDouble shouldEqual pdouble.toDouble.round
pdouble.ceil.toDouble shouldEqual pdouble.toDouble.ceil
pdouble.floor.toDouble shouldEqual pdouble.toDouble.floor
}
}
it("should offer 'toRadians' and 'toDegrees' methods that are consistent with Double") {
forAll { (pdouble: FiniteDouble) =>
pdouble.toRadians shouldEqual pdouble.toDouble.toRadians
}
}
it("should offer an ensuringValid method that takes a Double => Double, throwing AssertionError if the result is invalid") {
FiniteDouble(33.0).ensuringValid(_ + 1.0) shouldEqual FiniteDouble(34.0)
FiniteDouble(0.0).ensuringValid(_ + Double.MinValue) shouldEqual FiniteDouble.MinValue
FiniteDouble(0.0).ensuringValid(_ + Double.MaxValue) shouldEqual FiniteDouble.MaxValue
FiniteDouble(0.0).ensuringValid(_ + Double.MinPositiveValue) shouldEqual FiniteDouble.MinPositiveValue
an [AssertionError] should be thrownBy { FiniteDouble.MaxValue.ensuringValid(_ => Double.PositiveInfinity) }
an [AssertionError] should be thrownBy { FiniteDouble.MaxValue.ensuringValid(_ => Double.NegativeInfinity) }
}
}
}
| scalatest/scalatest | jvm/scalactic-test/src/test/scala/org/scalactic/anyvals/FiniteDoubleSpec.scala | Scala | apache-2.0 | 18,084 |
package core
import play.api.cache.SyncCacheApi
trait CacheAware {
val cache: SyncCacheApi
}
trait DefaultCacheAware extends CacheAware {
val cache: SyncCacheApi = DefaultCacheProvider.getInstance().cache
} | tegonal/lasius | app/core/CacheAware.scala | Scala | gpl-3.0 | 213 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.util.concurrent.ExecutionException
import java.util.concurrent.atomic.AtomicReference
import java.util.{Properties}
import kafka.common.TopicAndPartition
import kafka.integration.KafkaServerTestHarness
import kafka.server._
import kafka.utils._
import org.apache.kafka.clients.consumer._
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.{ClusterResource, ClusterResourceListener, TopicPartition}
import org.apache.kafka.test.{TestUtils => _, _}
import org.junit.Assert._
import org.junit.{Before, Test}
import scala.collection.JavaConverters._
import org.apache.kafka.test.TestUtils.isValidClusterId
import scala.collection.mutable.ArrayBuffer
/** The test cases here verify the following conditions.
* 1. The ProducerInterceptor receives the cluster id after the onSend() method is called and before onAcknowledgement() method is called.
* 2. The Serializer receives the cluster id before the serialize() method is called.
* 3. The producer MetricReporter receives the cluster id after send() method is called on KafkaProducer.
* 4. The ConsumerInterceptor receives the cluster id before the onConsume() method.
* 5. The Deserializer receives the cluster id before the deserialize() method is called.
* 6. The consumer MetricReporter receives the cluster id after poll() is called on KafkaConsumer.
* 7. The broker MetricReporter receives the cluster id after the broker startup is over.
* 8. The broker KafkaMetricReporter receives the cluster id after the broker startup is over.
* 9. All the components receive the same cluster id.
*/
object EndToEndClusterIdTest {
object MockConsumerMetricsReporter {
val CLUSTER_META = new AtomicReference[ClusterResource]
}
class MockConsumerMetricsReporter extends MockMetricsReporter with ClusterResourceListener {
override def onUpdate(clusterMetadata: ClusterResource) {
MockConsumerMetricsReporter.CLUSTER_META.set(clusterMetadata)
}
}
object MockProducerMetricsReporter {
val CLUSTER_META = new AtomicReference[ClusterResource]
}
class MockProducerMetricsReporter extends MockMetricsReporter with ClusterResourceListener {
override def onUpdate(clusterMetadata: ClusterResource) {
MockProducerMetricsReporter.CLUSTER_META.set(clusterMetadata)
}
}
object MockBrokerMetricsReporter {
val CLUSTER_META = new AtomicReference[ClusterResource]
}
class MockBrokerMetricsReporter extends MockMetricsReporter with ClusterResourceListener {
override def onUpdate(clusterMetadata: ClusterResource) {
MockBrokerMetricsReporter.CLUSTER_META.set(clusterMetadata)
}
}
}
class EndToEndClusterIdTest extends KafkaServerTestHarness {
import EndToEndClusterIdTest._
val producerCount = 1
val consumerCount = 1
val serverCount = 1
lazy val producerConfig = new Properties
lazy val consumerConfig = new Properties
lazy val serverConfig = new Properties
val numRecords = 1
val topic = "e2etopic"
val part = 0
val tp = new TopicPartition(topic, part)
val topicAndPartition = new TopicAndPartition(topic, part)
this.serverConfig.setProperty(KafkaConfig.MetricReporterClassesProp, "kafka.api.EndToEndClusterIdTest$MockBrokerMetricsReporter")
override def generateConfigs() = {
val cfgs = TestUtils.createBrokerConfigs(serverCount, zkConnect, interBrokerSecurityProtocol = Some(securityProtocol),
trustStoreFile = trustStoreFile, saslProperties = serverSaslProperties)
cfgs.foreach(_.putAll(serverConfig))
cfgs.map(KafkaConfig.fromProps)
}
@Before
override def setUp() {
super.setUp
// create the consumer offset topic
TestUtils.createTopic(this.zkUtils, topic, 2, serverCount, this.servers)
}
@Test
def testEndToEnd() {
val appendStr = "mock"
MockConsumerInterceptor.resetCounters()
MockProducerInterceptor.resetCounters()
assertNotNull(MockBrokerMetricsReporter.CLUSTER_META)
isValidClusterId(MockBrokerMetricsReporter.CLUSTER_META.get.clusterId)
val producerProps = new Properties()
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
producerProps.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, "org.apache.kafka.test.MockProducerInterceptor")
producerProps.put("mock.interceptor.append", appendStr)
producerProps.put(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG, "kafka.api.EndToEndClusterIdTest$MockProducerMetricsReporter")
val testProducer = new KafkaProducer(producerProps, new MockSerializer, new MockSerializer)
// Send one record and make sure clusterId is set after send and before onAcknowledgement
sendRecords(testProducer, 1, tp)
assertNotEquals(MockProducerInterceptor.CLUSTER_ID_BEFORE_ON_ACKNOWLEDGEMENT, MockProducerInterceptor.NO_CLUSTER_ID)
assertNotNull(MockProducerInterceptor.CLUSTER_META)
assertEquals(MockProducerInterceptor.CLUSTER_ID_BEFORE_ON_ACKNOWLEDGEMENT.get.clusterId, MockProducerInterceptor.CLUSTER_META.get.clusterId)
isValidClusterId(MockProducerInterceptor.CLUSTER_META.get.clusterId)
// Make sure that serializer gets the cluster id before serialize method.
assertNotEquals(MockSerializer.CLUSTER_ID_BEFORE_SERIALIZE, MockSerializer.NO_CLUSTER_ID)
assertNotNull(MockSerializer.CLUSTER_META)
isValidClusterId(MockSerializer.CLUSTER_META.get.clusterId)
assertNotNull(MockProducerMetricsReporter.CLUSTER_META)
isValidClusterId(MockProducerMetricsReporter.CLUSTER_META.get.clusterId)
this.consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
this.consumerConfig.setProperty(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, "org.apache.kafka.test.MockConsumerInterceptor")
this.consumerConfig.put(ConsumerConfig.METRIC_REPORTER_CLASSES_CONFIG, "kafka.api.EndToEndClusterIdTest$MockConsumerMetricsReporter")
val testConsumer = new KafkaConsumer(this.consumerConfig, new MockDeserializer, new MockDeserializer)
testConsumer.assign(List(tp).asJava)
testConsumer.seek(tp, 0)
// consume and verify that values are modified by interceptors
consumeRecords(testConsumer, numRecords)
// Check that cluster id is present after the first poll call.
assertNotEquals(MockConsumerInterceptor.CLUSTER_ID_BEFORE_ON_CONSUME, MockConsumerInterceptor.NO_CLUSTER_ID)
assertNotNull(MockConsumerInterceptor.CLUSTER_META)
isValidClusterId(MockConsumerInterceptor.CLUSTER_META.get.clusterId)
assertEquals(MockConsumerInterceptor.CLUSTER_ID_BEFORE_ON_CONSUME.get.clusterId, MockConsumerInterceptor.CLUSTER_META.get.clusterId)
assertNotEquals(MockDeserializer.CLUSTER_ID_BEFORE_DESERIALIZE, MockDeserializer.NO_CLUSTER_ID)
assertNotNull(MockDeserializer.CLUSTER_META)
isValidClusterId(MockDeserializer.CLUSTER_META.get.clusterId)
assertEquals(MockDeserializer.CLUSTER_ID_BEFORE_DESERIALIZE.get.clusterId, MockDeserializer.CLUSTER_META.get.clusterId)
assertNotNull(MockConsumerMetricsReporter.CLUSTER_META)
isValidClusterId(MockConsumerMetricsReporter.CLUSTER_META.get.clusterId)
// Make sure everyone receives the same cluster id.
assertEquals(MockProducerInterceptor.CLUSTER_META.get.clusterId, MockSerializer.CLUSTER_META.get.clusterId)
assertEquals(MockProducerInterceptor.CLUSTER_META.get.clusterId, MockProducerMetricsReporter.CLUSTER_META.get.clusterId)
assertEquals(MockProducerInterceptor.CLUSTER_META.get.clusterId, MockConsumerInterceptor.CLUSTER_META.get.clusterId)
assertEquals(MockProducerInterceptor.CLUSTER_META.get.clusterId, MockDeserializer.CLUSTER_META.get.clusterId)
assertEquals(MockProducerInterceptor.CLUSTER_META.get.clusterId, MockConsumerMetricsReporter.CLUSTER_META.get.clusterId)
assertEquals(MockProducerInterceptor.CLUSTER_META.get.clusterId, MockBrokerMetricsReporter.CLUSTER_META.get.clusterId)
testConsumer.close()
testProducer.close()
MockConsumerInterceptor.resetCounters()
MockProducerInterceptor.resetCounters()
}
private def sendRecords(producer: KafkaProducer[Array[Byte], Array[Byte]], numRecords: Int, tp: TopicPartition) {
val futures = (0 until numRecords).map { i =>
val record = new ProducerRecord(tp.topic(), tp.partition(), s"$i".getBytes, s"$i".getBytes)
debug(s"Sending this record: $record")
producer.send(record)
}
try {
futures.foreach(_.get)
} catch {
case e: ExecutionException => throw e.getCause
}
}
private def consumeRecords(consumer: Consumer[Array[Byte], Array[Byte]],
numRecords: Int = 1,
startingOffset: Int = 0,
topic: String = topic,
part: Int = part) {
val records = new ArrayBuffer[ConsumerRecord[Array[Byte], Array[Byte]]]()
val maxIters = numRecords * 50
var iters = 0
while (records.size < numRecords) {
for (record <- consumer.poll(50).asScala) {
records += record
}
if (iters > maxIters)
throw new IllegalStateException("Failed to consume the expected records after " + iters + " iterations.")
iters += 1
}
for (i <- 0 until numRecords) {
val record = records(i)
val offset = startingOffset + i
assertEquals(topic, record.topic)
assertEquals(part, record.partition)
assertEquals(offset.toLong, record.offset)
}
}
}
| ijuma/kafka | core/src/test/scala/integration/kafka/api/EndToEndClusterIdTest.scala | Scala | apache-2.0 | 10,255 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package common
import java.time.Instant
import scala.concurrent.duration._
trait TimingHelpers {
def between(start: Instant, end: Instant): Duration =
Duration.fromNanos(java.time.Duration.between(start, end).toNanos)
def durationOf[A](block: => A): (Duration, A) = {
val start = Instant.now
val value = block
val end = Instant.now
(between(start, end), value)
}
}
| starpit/openwhisk | tests/src/test/scala/common/TimingHelpers.scala | Scala | apache-2.0 | 1,197 |
import scala.tools.partest.ReplTest
object Test extends ReplTest {
def code = """
val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
"""
}
| yusuke2255/dotty | tests/pending/run/t4172.scala | Scala | bsd-3-clause | 181 |
package toguru.impl
import com.codahale.metrics.{Gauge, JmxReporter, MetricRegistry}
import org.komamitsu.failuredetector.PhiAccuralFailureDetector
import scala.concurrent.duration.Duration
trait ToguruClientMetrics {
def pollInterval: Duration
def currentSequenceNo: Option[Long]
val RegistryDomain = "toguru-client"
val ConnectivityPhiGauge = "connectivity-phi-gauge"
val SequenceNoGauge = "sequence-no-gauge"
val ConnectErrorCount = "connect-error-count"
val FetchFailureCount = "fetch-failure-count"
val metricsRegistry = new MetricRegistry()
val reporter = JmxReporter.forRegistry(metricsRegistry).inDomain(RegistryDomain).build()
reporter.start()
val connectivity = {
val builder = new PhiAccuralFailureDetector.Builder()
builder.setThreshold(16)
builder.setMaxSampleSize(200)
builder.setAcceptableHeartbeatPauseMillis(pollInterval.toMillis)
builder.setFirstHeartbeatEstimateMillis(pollInterval.toMillis)
builder.setMinStdDeviationMillis(100)
builder.build()
}
// this heartbeat call is needed to kickoff phi computation
connectivity.heartbeat()
metricsRegistry.register(ConnectivityPhiGauge, new Gauge[Double] { def getValue = connectivity.phi() })
metricsRegistry.register(SequenceNoGauge, new Gauge[Long] { def getValue: Long = currentSequenceNo.getOrElse(0) })
val connectErrors = metricsRegistry.counter(ConnectErrorCount)
val fetchFailures = metricsRegistry.counter(FetchFailureCount)
def fetchSuccess() = connectivity.heartbeat()
def fetchFailed() = fetchFailures.inc()
def connectError() = connectErrors.inc()
def healthy() = connectivity.isAvailable()
def deregister() = {
metricsRegistry.remove(ConnectivityPhiGauge)
metricsRegistry.remove(ConnectErrorCount)
metricsRegistry.remove(FetchFailureCount)
reporter.close()
}
}
| andreas-schroeder/toguru-scala-client | src/main/scala/toguru/impl/ToguruClientMetrics.scala | Scala | mit | 1,865 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle
import scala.Option.option2Iterable
import scala.xml.NodeSeq.seqToNodeSeq
import scala.xml.Atom
import scala.xml.Comment
import scala.xml.Elem
import scala.xml.EntityRef
import scala.xml.Group
import scala.xml.NamespaceBinding
import scala.xml.Node
import scala.xml.PrettyPrinter
import scala.xml.ProcInstr
import scala.xml.Text
import scala.xml.TextBuffer
import scala.xml.Utility
import scala.xml.XML
import scala.xml.Attribute
import scala.xml.MinimizeMode
object Level {
val Warning = "warning"
val Error = "error"
val Info = "info"
def apply(s: String): Level = s match {
case Warning => WarningLevel
case Error => ErrorLevel
case Info => InfoLevel
case _ => WarningLevel
}
}
sealed abstract class Level(val name: String)
case object ErrorLevel extends Level(Level.Error)
case object WarningLevel extends Level(Level.Warning)
case object InfoLevel extends Level(Level.Info)
object ParameterType {
val Integer = "integer"
val String = "string"
val Boolean = "boolean"
def apply(s: String): ParameterType = s match {
case Integer => IntegerType
case String => StringType
case Boolean => BooleanType
case _ => StringType
}
}
sealed abstract class ParameterType(val name: String)
case object IntegerType extends ParameterType(ParameterType.Integer)
case object StringType extends ParameterType(ParameterType.String)
case object BooleanType extends ParameterType(ParameterType.Boolean)
case class ConfigurationChecker(className: String, level: Level, enabled: Boolean, parameters: Map[String, String],
customMessage: Option[String], customId: Option[String])
object ScalastyleConfiguration {
val DefaultConfiguration: String = "/default_config.xml"
val Enabled = "enabled"
val Disabled = "disabled"
val True = "true"
val False = "false"
val Name = "name"
def getDefaultConfiguration(): ScalastyleConfiguration = {
val is = this.getClass().getClassLoader().getResourceAsStream(DefaultConfiguration)
fromXml(XML.load(is))
}
def readFromXml(file: String): ScalastyleConfiguration = fromXml(XML.loadFile(file))
def readFromString(s: String): ScalastyleConfiguration = fromXml(XML.loadString(s))
private[this] def fromXml(elem: Elem) = {
val commentFilter = elem.attribute("commentFilter").getOrElse(scala.xml.Text(Enabled)).text.toLowerCase() != Disabled
val name = (elem \\\\ Name).text
ScalastyleConfiguration(name, commentFilter, (elem \\\\ "check").map(toCheck).toList)
}
private def contentsOf(node: Node, n: String) = {
val ns = (node \\\\ n)
if (ns.size == 0) None else (Some(ns(0).text))
}
def toCheck(node: Node): ConfigurationChecker = {
val className = node.attribute("class").get.text
val level = Level(node.attribute("level").get.text)
val enabled = node.attribute(Enabled).getOrElse(scala.xml.Text(False)).text.toLowerCase() == True
val customMessage = contentsOf(node, "customMessage")
val customId = node.attribute("customId").flatMap(x => Some(x.text))
ConfigurationChecker(className, level, enabled, (node \\\\ "parameters" \\\\ "parameter").map(e => {
val attributeValue = e.attribute("value")
val value = if (attributeValue.isDefined) attributeValue.get.text else e.text
(e.attribute(Name).head.text -> value)
}).toMap, customMessage, customId)
}
private[this] def toCDATA(s: String) = scala.xml.Unparsed("<![CDATA[" + s + "]]>")
def toXml(scalastyleConfiguration: ScalastyleConfiguration): scala.xml.Elem = {
val elements = scalastyleConfiguration.checks.map(c => {
val parameters = if (c.parameters.size > 0) {
val ps = c.parameters.map(p => {
val text = toCDATA(p._2)
<parameter name={p._1}>{text}</parameter>
})
<parameters>{ps}</parameters>
} else {
scala.xml.Null
}
val customMessage = c.customMessage match {
case Some(s) => {
val text = toCDATA(s)
<customMessage>{text}</customMessage>
}
case None => scala.xml.Null
}
val check = <check class={c.className} level={c.level.name} enabled={if (c.enabled) True else False}>{customMessage}{parameters}</check>
c.customId match {
case Some(x) => check % Attribute(None, "customId", Text(x), scala.xml.Null)
case None => check
}
})
<scalastyle commentFilter={if (scalastyleConfiguration.commentFilter) Enabled else Disabled}>
<name>{scalastyleConfiguration.name}</name>
{elements}
</scalastyle>
}
def toXmlString(scalastyleConfiguration: ScalastyleConfiguration, width: Int, step: Int): String =
new XmlPrettyPrinter(width, step).format(toXml(scalastyleConfiguration))
}
case class ScalastyleConfiguration(name: String, commentFilter: Boolean, checks: List[ConfigurationChecker])
// definition
case class DefinitionParameter(name: String, typeName: ParameterType, multiple: Boolean, defaultValue: String)
case class DefinitionChecker(className: String, id: String, level: Level, parameters: Map[String, DefinitionParameter])
object ScalastyleDefinition {
def readFromXml(stream: java.io.InputStream): ScalastyleDefinition = {
val elem = XML.load(stream)
ScalastyleDefinition((elem \\\\ "checker").map(toCheck).toList)
}
def toCheck(node: Node): DefinitionChecker = {
val className = stringAttr(node, "class")
val id = stringAttr(node, "id")
val defaultLevel = levelAttr(node, "defaultLevel")
DefinitionChecker(className, id, defaultLevel, (node \\\\ "parameters" \\\\ "parameter").map(e => {
val parameterName = stringAttr(e, "name")
val parameterType = typeAttr(e, "type")
val multiple = booleanAttr(e, "multiple")
val defaultValue = stringAttr(e, "default")
(parameterName -> DefinitionParameter(parameterName, parameterType, multiple, defaultValue))
}).toMap)
}
def stringAttr(node: Node, id: String): String = {
attr(node, id, "", {s => s})
}
def levelAttr(node: Node, id: String): Level = {
attr(node, id, Level.Warning, {s => Level(s)})
}
def typeAttr(node: Node, id: String): ParameterType = {
attr(node, id, "string", {s => ParameterType(s)})
}
def booleanAttr(node: Node, id: String): Boolean = {
attr(node, id, "false", {s => "true" == s.toLowerCase()})
}
def attr[T](node: Node, id: String, defaultValue: String, fn: (String) => T): T = {
node.attribute(id) match {
case Some(x) => fn(x.text)
case _ => fn(defaultValue)
}
}
}
case class ScalastyleDefinition(checkers: List[DefinitionChecker])
// it's unfortunate that we have to do this, but the scala xml PrettyPrinter converts CDATA sections to
// Text, which means that multiple lines get wrapped into one. So we extend PrettyPrinter
// so that they don't get eaten
// see also https://issues.scala-lang.org/browse/SI-3368
class XmlPrettyPrinter(width: Int, step: Int) extends PrettyPrinter(width, step) {
// this is the method which has changed.
private def doPreserve(node: Node) = true
// This is just a copy of what's in scala.xml.PrettyPrinter
/** @param tail: what we'd like to squeeze in */
protected override def traverse(node: Node, pscope: NamespaceBinding, ind: Int): Unit = node match {
case Text(s) if s.trim() == "" =>
;
case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr =>
makeBox(ind, node.toString().trim() )
case g @ Group(xs) =>
traverse(xs.iterator, pscope, ind)
case _ =>
val test = {
val sb = new StringBuilder()
Utility.serialize(node, pscope, sb, false, minimizeTags = MinimizeMode.Default)
if (doPreserve(node)) sb.toString else TextBuffer.fromString(sb.toString()).toText(0).data
}
if (childrenAreLeaves(node) && fits(test)) {
makeBox(ind, test)
} else {
val (stg, len2) = startTag(node, pscope)
val etg = endTag(node)
if (stg.length < width - cur) { // start tag fits
makeBox(ind, stg)
makeBreak()
traverse(node.child.iterator, node.scope, ind + step)
makeBox(ind, etg)
} else if (len2 < width - cur) {
// <start label + attrs + tag + content + end tag
makeBox(ind, stg.substring(0, len2))
makeBreak() // todo: break the rest in pieces
makeBox(ind, stg.substring(len2, stg.length))
makeBreak()
traverse(node.child.iterator, node.scope, ind + step)
makeBox(cur, etg)
makeBreak()
} else { // give up
makeBox(ind, test)
makeBreak()
}
}
}
}
| jkerfs/scalastyle | src/main/scala/org/scalastyle/ScalastyleConfiguration.scala | Scala | apache-2.0 | 9,466 |
package com.github.bzumhagen.sct
import com.github.bzumhagen.sct.ChangeGroup.load
import com.github.zafarkhaja.semver.Version
class VerboseChangeBinding(val template: String, val changes: Seq[ChangelogChange]) extends ChangeBinding {
require(changes.nonEmpty, "Cannot build change bindings without changes")
override def buildChangeBindings: Map[String, Any] = {
val versionChanges = changes.groupBy(_.version).toSeq.sortWith((v1, v2) => v1._1.greaterThan(v2._1)).map(_._2)
Map(
"changeGroups" -> versionChanges.flatMap(load),
"changes" -> changes
)
}
}
| bzumhagen/sct | src/main/scala/com/github/bzumhagen/sct/VerboseChangeBinding.scala | Scala | mit | 593 |
package models
case class FeedConfig(
title: String,
subtitle: String,
authorName: String,
feedId: String,
baseUrl: String,
summaryLength: Int,
bodyLength: Int,
copyright: String
)
object FeedConfig extends FeedConfig(
title = "wasis.nu/mit/blog",
subtitle = "Blog about technical stuff.",
authorName = "Cornelius Lilge",
feedId = "urn:uuid:14369a20-1023-11e4-9191-0800200c9a66",
baseUrl = "https://wasis.nu/mit/blog",
summaryLength = 200,
bodyLength = 1000,
copyright = "(c) 2014 - wasis.nu"
)
| sne11ius/playlog | app/models/FeedConfig.scala | Scala | gpl-3.0 | 529 |
package com.ergodicity.marketdb.loader
import org.slf4j.LoggerFactory
import org.scalatest.WordSpec
import com.twitter.finagle.kestrel.Client
import org.mockito.Mockito._
import org.mockito.Matchers._
import util.{BatchSettings, Iteratees}
import com.ergodicity.marketdb.model.TradeProtocol._
class TradeDataToKestrelTest extends WordSpec {
val log = LoggerFactory.getLogger(classOf[TradeDataIterateeTest])
val Queue = "Queue"
val RtsTrades = () => {
RtsTradeHistory(InputStreamRef(this.getClass.getResourceAsStream("/data/FT120201.zip")))
}
"Kestrel Iteratee" must {
"push messages to Kestrel" in {
import TradeDataIteratee._
import Iteratees._
val client = mock(classOf[Client])
implicit val settings = BatchSettings(1000, None)
val reportIo = RtsTrades().enumTradeData(kestrelBulkLoader(Queue, client)) map (_.run)
val report = reportIo.unsafePerformIO
log.info("Report: " + report)
assert(report.count == 60)
assert(report.list.size == 0)
verify(client, only()).write(anyString(), any())
}
"push messages to Kestrel with specified bulk size" in {
import TradeDataIteratee._
import Iteratees._
val client = mock(classOf[Client])
implicit val settings = BatchSettings(40, None)
val reportIo = RtsTrades().enumTradeData(kestrelBulkLoader(Queue, client)) map (_.run)
val report = reportIo.unsafePerformIO
log.info("Report: " + report)
assert(report.count == 60)
assert(report.list.size == 0)
verify(client, times(2)).write(anyString(), any())
}
"push messages to Kestrel with specified bulk size and limit" in {
import TradeDataIteratee._
import Iteratees._
val client = mock(classOf[Client])
implicit val settings = BatchSettings(40, Some(50))
val reportIo = RtsTrades().enumTradeData(kestrelBulkLoader(Queue, client)) map (_.run)
val report = reportIo.unsafePerformIO
log.info("Report: " + report)
assert(report.count == 50)
assert(report.list.size == 0)
verify(client, times(2)).write(anyString(), any())
}
"push messages to Kestrel with specified bulk size and limit below bulk size" in {
import TradeDataIteratee._
import Iteratees._
val client = mock(classOf[Client])
implicit val settings = BatchSettings(40, Some(20))
val reportIo = RtsTrades().enumTradeData(kestrelBulkLoader(Queue, client)) map (_.run)
val report = reportIo.unsafePerformIO
log.info("Report: " + report)
assert(report.count == 20)
assert(report.list.size == 0)
verify(client, times(1)).write(anyString(), any())
}
"push any uniqie payload" in {
import TradeDataIteratee._
import Iteratees._
val client = mock(classOf[Client])
implicit val settings = BatchSettings(1, None)
val reportIo = RtsTrades().enumTradeData(kestrelBulkLoader(Queue, client)) map (_.run)
val report = reportIo.unsafePerformIO
log.info("Report: " + report)
assert(report.count == 60)
assert(report.list.size == 0)
verify(client, times(60)).write(anyString(), any())
}
}
} | ezhulenev/marketdb | marketdb-loader/src/test/scala/com/ergodicity/marketdb/loader/TradeDataToKestrelTest.scala | Scala | mit | 3,209 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Sun Nov 1 13:59:40 EST 2015
* @see LICENSE (MIT style license file).
*
* @see vlsicad.eecs.umich.edu/BK/Slots/cache/www.cise.ufl.edu/~davis/Morgan/
* @see www.optimization-online.org/DB_FILE/2013/05/3897.pdf
* @see www.maths.ed.ac.uk/hall/HuHa12/ERGO-13-001.pdf
* @see www.era.lib.ed.ac.uk/bitstream/handle/1842/7952/Huangfu2013.pdf?sequence=2&isAllowed=y
*/
// U N D E R D E V E L O P M E N T
package scalation.minima
import scala.collection.mutable.{ArrayBuffer, ArrayStack}
import scala.util.control.Breaks.{breakable, break}
import scalation.linalgebra.{MatriD, VectoD, VectorD, VectorI}
import scalation.random.Randi
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Ftran` object ...
*/
object Ftran
{
val m = 10
val x = new VectorD (m+1)
val Xindex = new VectorI (m+1)
val Lstart = new VectorI (m+1)
val Lend = new VectorI (m+1)
val Lindex = new VectorI (m+1)
val Lvalue = new VectorD (m+1)
val Lpiv_i = new VectorI (m+1)
val Ustart = new VectorI (m+1)
val Uend = new VectorI (m+1)
val Uindex = new VectorI (m+1)
val Uvalue = new VectorD (m+1)
val Upiv_i = new VectorI (m+1)
val Upiv_x = new VectorD (m+1)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/**
* Figure 2.4: Standard 'ftran' with permuted LU factors
*/
def ftran ()
{
// Solve with the lower factor
for (i <- 1 to m) {
val pivot = x(Lpiv_i(i))
if (pivot != 0)
for (k <- Lstart(i) until Lend(i))
x(Lindex(k)) += pivot * Lvalue(k)
} // for
// 2. Solve with the upper factor
for (i <- m to 1 by -1) {
var pivot = x(Upiv_i(i))
if (pivot != 0) {
pivot = pivot / Upiv_x(i)
x(Upiv_i(i)) = pivot
for (k <- Ustart(i) until Uend(i))
x(Uindex(k)) += pivot * Uvalue(k)
} // if
} // for
} // ftran
val Ulookup = new VectorI (m+1)
val URstart = new VectorI (m+1)
val URindex = new VectorI (m+1)
val URcount = new VectorI (m+1)
val URvalue = new VectorD (m+1)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/**
* Figure 2.5: Form row-wise representation for a permuted factor.
*/
def permute ()
{
// 1. Counting non-zero entries for each UR eta matrix j
for (i <- 1 to m) {
for (k <- Ustart(i) until Uend(i)) {
val iRow = Ulookup(Uindex(k)) // index in the triangular factor
URcount(iRow) += 1
} // for
} // for
// 2. Constructing the URstart pointer by accumulation
URstart(1) = 1
for (i <- 2 to m) URstart(i) = URstart(i-1) + URcount(i-1)
// 3. Filling UR element, URend becomes ready afterwards
val URend = URstart
for (i <- 1 to m) {
for (k <- Ustart(i) until Uend(i)) {
val iRow = Ulookup(Uindex(k))
val iPut = URend(iRow)
URend(iRow) += 1
URindex(iPut) = Upiv_i(i) // index in the permuted factor
URvalue(iPut) = Uvalue(k)
} // for
} // for
} // permute
val Hlookup = new VectorI (m+1)
val Hstart = new VectorI (m+1)
val Hend = new VectorI (m+1)
val Hindex = new VectorI (m+1)
val stack = new ArrayStack [Tuple2 [Int, Int]] ()
val list = ArrayBuffer [Int] ()
val visited = Array.ofDim [Int] (m+1)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/**
* Figure 2.6: DFS based hyper-sparse 'ftran': search stage
*/
def dfs_search (Xcount: Int)
{
for (t <- 1 to Xcount) {
var (i, k) = (0, 0) // ith eta matrix of H, the next non-zero position to visit
i = Hlookup(Xindex(t))
k = Hstart(i)
if (visited(i) == 0) {
visited(i) = 1
var go = true
while (go) { // keep searching current ETA until finish
if (k < Hend(i)) {
val child = Hlookup(Hindex(k)) // move to a child if it is not yet been visited
k += 1
if (visited(child) == 0) {
visited(child) = 1
stack.push ((i, k)) // store current eta (the father) to stack
i = child
k = Hstart(child) // start to search the child
} // if
} else {
list += i // put current eta to the FTRAN to-do list
if (stack.isEmpty) go = false // get another eta (the father) from the stack or quit
else { val ik = stack.pop (); i = ik._1; k = ik._2 }
} // if
} // while
} // if
} // for
} // dfs_search
} // Ftran object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `SimplexFT` class solves Linear Programming (LP) problems using the Forrest-Tomlin
* (FT) Simplex Algorithm. Given a constraint matrix 'a', constant vector 'b'
* and cost vector 'c', find values for the solution/decision vector 'x' that
* minimize the objective function 'f(x)', while satisfying all of the constraints,
* i.e.,
*
* minimize f(x) = c x
* subject to a x <= b, x >= 0
*
* The FT Simplex Algorithm performs LU Factorization/Decomposition of the
* basis-matrix ('ba' = 'B') rather than computing inverses ('b_inv'). It has
* benefits over the (Revised) Simplex Algorithm (less run-time, less memory,
* and much reduced chance of round off errors).
*
* @param a the constraint matrix
* @param b the constant/limit vector
* @param c the cost/revenue vector
* @param x_B the initial basis (set of indices where x_i is in the basis)
*/
class SimplexFT (a: MatriD, b: VectoD, c: VectoD, var x_B: Array [Int] = null)
extends MinimizerLP
{
private val DEBUG = false // debug flag
private val CHECK = true // CHECK mode => check feasibility for each pivot
private val M = a.dim1 // number of constraints (rows in a)
private val N = a.dim2 // number of original variables (columns in a)
private val MAX_ITER = 200 * N // maximum number of iterations
if (b.dim != M) flaw ("constructor", "b.dim = " + b.dim + " != " + M)
if (c.dim != N) flaw ("constructor", "c.dim = " + c.dim + " != " + N)
if (x_B == null) x_B = setBasis ()
private val ba: MatriD = a.selectCols (x_B) // basis-matrix (selected columns from matrix-a)
private val lu = ba.lud // perform an LU Decomposition on the basis-matrix
// private var l_inv = lu._1.inverse // L-inverted
// private var u_inv = lu._2.inverse // U-inverted (b_inv = u_inv * l_inv)
private val c_B = c.select (x_B) // cost for basic variables
// private val c_ = c_B * (u_inv * l_inv) // adjusted cost via inverse
private val c_ : VectoD = c_B // adjusted cost via back-substitution - FIX
// private val b_ = (u_inv * l_inv) * b // adjusted constants via inverse
private val b_ = ba.solve (lu, b) // adjusted constants via back-substitution
private var u: VectoD = null // vector used for leaving
private var z: VectoD = null // vector used for entering
val checker = new CheckLP (a, b, c)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** There are M+N variables, N decision and M slack variables, of which,
* for each iteration, M are chosen for a Basic Feasible Solution (BFS).
* The the variables not in the basis are set to zero. Setting j to N
* will start with the slack variables in the basis (only works if b >= 0).
* @param j the offset to start the basis
* @param l the size of the basis
*/
def setBasis (j: Int = N-M, l: Int = M): Array [Int] =
{
val idx = Array.ofDim [Int] (l)
for (i <- 0 until l) idx(i) = i + j
idx
} // setBasis
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the best variable x_l to enter the basis. Use Dantiz's Rule: index of
* max positive (cycling possible) z value. Return -1 to indicate no such column.
*/
def entering (): Int =
{
z = c_ *: a - c
z.argmaxPos ()
} // entering
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the best variable x_k to leave the basis given that x_l is entering.
* Determine the index of the leaving variable corresponding to ROW k using
* the Min-Ratio Rule. Return -1 to indicate no such row.
* @param l the variable chosen to enter the basis
*/
def leaving (l: Int): Int =
{
// u = (u_inv * l_inv) * a.col(l)
u = ba.solve (lu._1, lu._2, a.col(l))
if (unbounded (u)) return -1
var k = 0
var r_min = Double.PositiveInfinity
for (i <- 0 until M if u(i) > 0) {
val r = b_(i) / u(i)
if (r < r_min) { r_min = r; k = i}
} // for
k
} // leaving
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check if u <= 0., the solution is unbounded.
* @param u the vector for leaving
*/
def unbounded (u: VectoD): Boolean =
{
for (i <- 0 until u.dim if u(i) > 0.0) return false
flaw ("unbounded", "the solution is UNBOUNDED")
true
} // unbounded
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Pivot by replacing 'x_k' with 'x_l' in the basis. Update 'b_inv' (actually 'lu'),
* 'b_' and 'c_'.
* @param k the leaving variable
* @param l the entering variable
*/
def pivot (k: Int, l: Int)
{
println ("pivot: entering = " + l + " leaving = " + k)
x_B(k) = l // update basis (l replaces k)
// b_inv(k) /= u(k) // FIX
b_(k) /= u(k)
for (i <- 0 until M if i != k) {
// b_inv(i) -= b_inv(k) * u(i) // FIX
b_ (i) -= b_(k) * u(i)
} // for
// c_ -= b_inv(k) * z(l) // FIX
} // pivot
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve a Linear Programming (LP) problem using the FT Simplex Algorithm.
* Iteratively pivot until there an optimal solution is found or it is
* determined that the solution is unbounded. Return the optimal vector 'x'.
*/
def solve (): VectoD =
{
if (DEBUG) showTableau (0) // for iter = 0
var k = -1 // the leaving variable (row)
var l = -1 // the entering variable (column)
breakable { for (it <- 1 to MAX_ITER) {
l = entering (); if (l == -1) break // -1 => optimal solution found
k = leaving (l); if (k == -1) break // -1 => solution is unbounded
pivot (k, l) // pivot: k leaves and l enters
if (CHECK && infeasible) break // quit if infeasible
if (DEBUG) showTableau (it)
}} // for
primal // return the optimal vector x
} // solve
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the current solution 'x = primal' is still primal feasible.
*/
def infeasible: Boolean =
{
if ( ! checker.isPrimalFeasible (primal)) {
flaw ("infeasible", "solution x is no longer PRIMAL FEASIBLE")
true
} else {
false
} // if
} // infeasible
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the primal (basis only) solution vector 'x'.
*/
def primal: VectoD = ba.solve (lu, b) // (u_inv * l_inv) * b
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the full primal solution vector 'xx'.
*/
def primalFull (x: VectoD): VectorD =
{
val xx = new VectorD (N)
for (i <- 0 until x_B.length) xx(x_B(i)) = x(i)
xx
} // primalFull
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the dual solution vector 'y'.
*/
def dual: VectoD = z.slice (N - M, N).asInstanceOf [VectoD] // FIX
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the optimal objective function value 'f(x) = c x'.
* @param x the primal solution vector
*/
def objF (x: VectoD): Double = c.select (x_B) dot x
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Show the current FT tableau.
* @param iter the number of iterations do far
*/
def showTableau (iter: Int)
{
println ("showTableau: --------------------------------------------------------")
println (this)
println ("showTableau: after " + iter + " iterations, with limit of " + MAX_ITER)
} // showTableau
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the current FT tableau 'basis', b_inv', b_', and c_' to a string.
*/
override def toString: String =
{
val b_inv = a.selectCols (x_B).inverse // compute b_inv to show tableau
var s = new StringBuilder ()
for (i <- 0 until M) {
s ++= "x" + x_B(i) + " | " + b_inv(i) + " | " + b_(i) + "\\n"
} // for
s ++= "c_ | " + c_ + "\\n"
s.toString
} // toString
} // SimplexFT class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `SimplexFT` object is used to test the `SimplexFT` class.
*/
object SimplexFTTest extends App
{
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test the FT Simplex Algorithm for solving Linear Programming problems.
* @param a the constraint matrix
* @param b the limit/RHS vector
* @param c the cost vector
* @param x_B the indices of the initial basis
*/
def test (a: MatriD, b: VectoD, c: VectoD, x_B: Array [Int] = null)
{
// val lp = new SimplexFT (a, b, c, x_B) // test with user specified basis
val lp = new SimplexFT (a, b, c) // test with default basis
val x = lp.solve () // the primal solution vector x
val xx = lp.primalFull (x) // the full primal solution vector xx
val y = lp.dual // the dual solution vector y
val f = lp.objF (x) // the minimum value of the objective function
println ("primal x = " + x)
println ("dual y = " + y)
println ("objF f = " + f)
println ("optimal? = " + lp.check (xx, y, f))
} // test
import scalation.linalgebra.MatrixD
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 1: Initialize matrix 'a', vectors 'b' and 'c', and optionally
* the basis 'x_B'. For FT Simplex, matrix 'a' must be augmented with
* an identity matrix and vector 'c' augmented with zeros.
*-------------------------------------------------------------------------
* Minimize z = -1x_0 - 2x_1 + 1x_2 - 1x_3 - 4x_4 + 2x_5
* Subject to 1x_0 + 1x_1 + 1x_2 + 1y_3 + 1y_4 + 1x_5 <= 6
* 2x_0 - 1x_1 - 2x_2 + 1y_3 + 0y_4 + 0x_5 <= 4
* 0x_0 + 0x_1 + 1x_2 + 1y_3 + 2y_4 + 1x_5 <= 4
* where z is the objective variable and x is the decision vector.
*-------------------------------------------------------------------------
* Solution: primal x_1 = 4, x_7 = 8, x_4 = 2
* dual y_1 = -2, y_2 = 0, y_3 = -1
* objF f = -16
* i.e., x = (4, 8, 2), x_B = (1, 7, 4), y = (-2, 0, -1), f = -16
* @see Linear Programming and Network Flows, Example 5.1
*/
def test1 ()
{
val a = new MatrixD ((3, 9), 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, // constraint matrix
2.0, -1.0, -2.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 1.0, 1.0, 2.0, 1.0, 0.0, 0.0, 1.0)
val c = VectorD (-1.0, -2.0, 1.0, -1.0, -4.0, 2.0, 0.0, 0.0, 0.0) // cost vector
val b = VectorD (6.0, 4.0, 4.0) // constant vector
val x_B = Array (6, 7, 8) // starting basis
test (a, b, c) // x_B is optional
} // test1
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 2:
* Solution: x = (2/3, 10/3, 0), x_B = (0, 1, 5), f = -22/3
* @see Linear Programming and Network Flows, Example 5.2
*/
def test2 ()
{
val a = new MatrixD ((3, 6), 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, // constraint matrix
-1.0, 2.0, -2.0, 0.0, 1.0, 0.0,
2.0, 1.0, 0.0, 0.0, 0.0, 1.0)
val c = VectorD (-1.0, -2.0, 1.0, 0.0, 0.0, 0.0) // cost vector
val b = VectorD (4.0, 6.0, 5.0) // constant vector
val x_B = Array (3, 4, 5) // starting basis
test (a, b, c)
} // test2
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 3:
* Solution: x = (1/3, 0, 13/3), x_B = (0, 2, 4), f = -17
* @see Linear Programming and Network Flows, Example 3.9
*/
def test3 ()
{
val a = new MatrixD ((3, 6), 1.0, 1.0, 2.0, 1.0, 0.0, 0.0, // constraint matrix
1.0, 1.0, -1.0, 0.0, 1.0, 0.0,
-1.0, 1.0, 1.0, 0.0, 0.0, 1.0)
val c = VectorD (1.0, 1.0, -4.0, 0.0, 0.0, 0.0) // cost vector
val b = VectorD (9.0, 2.0, 4.0) // constant vector
val x_B = Array (3, 4, 5) // starting basis
test (a, b, c, x_B)
} // test3
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test case 4: randomly generated LP problem.
*/
def test4 ()
{
val rn = Randi (0, 8)
val (m, n) = (10, 10)
val a = new MatrixD (m, m+n)
val b = new VectorD (m)
val c = new VectorD (m+n)
for (i <- 0 until m) {
for (j <- 0 until n) a(i, j) = rn.igen
for (j <- n until m+n) a(i, j) = if (j-n == i) 1.0 else 0.0
} // for
for (i <- 0 until m) b(i) = 100.0 * (rn.igen + 1)
for (j <- 0 until n) c(j) = -10.0 * (rn.igen + 1)
test (a, b, c)
} // test4
println ("\\ntest1 ========================================================")
test1 ()
println ("\\ntest2 ========================================================")
test2 ()
println ("\\ntest3 ========================================================")
test3 ()
println ("\\ntest4 ========================================================")
test4 ()
} // SimplexFTTest object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/minima/SimplexFT.scala | Scala | mit | 20,667 |
package org.github.sguzman.scala.game.scalebra
import akka.actor.{Actor, ActorLogging, ActorSystem, Props}
import org.github.sguzman.scala.game.scalebra.actor.{Start, Stop}
import org.github.sguzman.scala.game.scalebra.util.log.L
import org.github.sguzman.scala.game.scalebra.mvc.controller.Input
import org.github.sguzman.scala.game.scalebra.mvc.view.View
/**
* @author Salvador Guzman <guzmansalv@gmail.com>
*
* SCALEBRA - A Snake Clone
*
* Scalebra is a portmanteau of Scala and Culebra which is Spanish for snake.
* This is a simple, basic clone of snake.
*
* For concurrency, this game uses Akka. I tried to set up each subsystem as
* it's own actor. So far, there are only 2 such systems; rendering and input.
*/
object Scalebra {
/** Akka actor system */
val system = ActorSystem("Scalebra")
/** Supervisor actor */
val supervisor = system.actorOf(Props[Scalebra], "Root")
/** View rendering actor */
val viewAc = system.actorOf(Props[View], "View")
/** Input handling actor */
val inputAc = system.actorOf(Props[Input], "Input")
/**
* Driver program for entire game
*
* @param args Array[String]
*/
def main(args: Array[String]) = {
L.i("Init System", "Scalebra")
supervisor ! Start()
while (true) {
//L.i("Looping", "Scalebra")
}
}
}
/**
* Main actor class. This class will server as the supervisor for the actor
* system found in this project.
*
* For now, all it can do is start and stop the entire system.
*/
class Scalebra extends Actor with ActorLogging {
override def receive: Receive = {
case _: Start =>
L.i("Start object received... starting View actor", "Scalebra")
Scalebra.viewAc ! Start()
case _: Stop =>
L.i("Stop object received... shutting it down. Shutting it all down!!!", "Scalebra")
Scalebra.viewAc ! Stop()
}
} | sguzman/Scalebra | src/main/scala/org/github/sguzman/scala/game/scalebra/Scalebra.scala | Scala | mit | 1,871 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.iota.fey.performer
import akka.actor.{ActorSystem, Props}
import org.apache.iota.fey.FeyGenericActor.PROCESS
import scala.concurrent.duration._
object Application extends App {
//print("Starting")
//implicit val system = ActorSystem("ZMQ-RUN")
//val publish = system.actorOf(Props(classOf[ZMQPublisher], Map.empty,1.minutes, Map.empty, 1.seconds,"","",false ), name = "PUBLISH")
//publish ! PROCESS("Publish it")
// val subscribe = system.actorOf(Props(classOf[ZMQPublisher], Map.empty,1.minutes, Map.empty, 1.seconds,"","",false ), name = "SUBSCRIBE")
//
// subscribe ! PROCESS("Subscribe to it")
}
| barbaragomes/incubator-iota | performers/zmq/src/main/scala/org/apache/iota/fey/performer/Application.scala | Scala | apache-2.0 | 1,443 |
package com.github.tanacasino.btree
import org.scalatest._
class BTreeSpec extends FunSpec with Matchers {
describe("BTree") {
describe("Create BTree with Leaf and Branch combination") {
val bTree1 = BTree(Leaf(1))
val bTree2 = BTree(Branch(Leaf(1), 2, Leaf(3)))
val bTree3 = BTree(Branch(Branch(Leaf(1), 2, Leaf(3)), 4, Branch(Leaf(5), 6, Leaf(7))))
println(bTree1)
println(bTree2)
println(bTree3)
it("should compute size of BTree") {
bTree1.size should be (1)
bTree2.size should be (3)
bTree3.size should be (7)
}
it("should compute max value of BTree") {
bTree1.max should be (1)
bTree2.max should be (3)
bTree3.max should be (7)
}
it("should create BTree from list") {
BTree(List(1)) should be (bTree1)
BTree(List(1, 2, 3)) should be (bTree2)
BTree(List(1, 2, 3, 4, 5, 6, 7)) should be (bTree3)
}
}
}
}
| tanacasino/learning-scala | src/test/scala/com/github/tanacasino/btree/BTreeSpec.scala | Scala | mit | 972 |
object SCL5733 {
class A
class B extends A
class C extends A
val b = new B
val c = new C
val seq1 = Seq((b, 1), (c, 1))
val seq2 = Seq(b -> 1, c -> 1)
val tuple = b -> 1
/*start*/(seq1, seq2, tuple)/*end*/
/*
(Seq[(SCL5733.A, Int)], Seq[(SCL5733.A, Int)], (SCL5733.B, Int))
[Scala_2_13](Seq[(SCL5733.A, Int)], Seq[(SCL5733.A, Int)], (SCL5733.B, Int))
*/ | JetBrains/intellij-scala | scala/scala-impl/testdata/typeInference/bugs5/SCL5733.scala | Scala | apache-2.0 | 371 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala.typeutils
import org.apache.flink.annotation.{Public, PublicEvolving}
import org.apache.flink.api.common.ExecutionConfig
import org.apache.flink.api.common.typeinfo.{AtomicType, TypeInformation}
import org.apache.flink.api.common.typeutils.{TypeComparator, TypeSerializer}
import scala.collection.JavaConverters._
/**
* TypeInformation for [[Option]].
*/
@Public
class OptionTypeInfo[A, T <: Option[A]](private val elemTypeInfo: TypeInformation[A])
extends TypeInformation[T] with AtomicType[T] {
@PublicEvolving
override def isBasicType: Boolean = false
@PublicEvolving
override def isTupleType: Boolean = false
@PublicEvolving
override def isKeyType: Boolean = elemTypeInfo.isKeyType
@PublicEvolving
override def getTotalFields: Int = 1
@PublicEvolving
override def getArity: Int = 1
@PublicEvolving
override def getTypeClass = classOf[Option[_]].asInstanceOf[Class[T]]
@PublicEvolving
override def getGenericParameters = Map[String, TypeInformation[_]]("A" -> elemTypeInfo).asJava
@PublicEvolving
override def createComparator(ascending: Boolean, executionConfig: ExecutionConfig) = {
if (isKeyType) {
val elemCompartor = elemTypeInfo.asInstanceOf[AtomicType[A]]
.createComparator(ascending, executionConfig)
new OptionTypeComparator[A](ascending, elemCompartor).asInstanceOf[TypeComparator[T]]
} else {
throw new UnsupportedOperationException("Element type that doesn't support ")
}
}
@PublicEvolving
def createSerializer(executionConfig: ExecutionConfig): TypeSerializer[T] = {
if (elemTypeInfo == null) {
// this happens when the type of a DataSet is None, i.e. DataSet[None]
new OptionSerializer(new NothingSerializer).asInstanceOf[TypeSerializer[T]]
} else {
new OptionSerializer(elemTypeInfo.createSerializer(executionConfig))
.asInstanceOf[TypeSerializer[T]]
}
}
override def toString = s"Option[$elemTypeInfo]"
override def equals(obj: Any): Boolean = {
obj match {
case optTpe: OptionTypeInfo[_, _] =>
optTpe.canEqual(this) && elemTypeInfo.equals(optTpe.elemTypeInfo)
case _ => false
}
}
def canEqual(obj: Any): Boolean = {
obj.isInstanceOf[OptionTypeInfo[_, _]]
}
override def hashCode: Int = {
elemTypeInfo.hashCode()
}
}
| WangTaoTheTonic/flink | flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/OptionTypeInfo.scala | Scala | apache-2.0 | 3,157 |
package io.github.suitougreentea.VariousMinos.game
import scala.beans.BeanProperty
import io.github.suitougreentea.VariousMinos.Phase
import io.github.suitougreentea.VariousMinos.PhaseExecuter
import org.newdawn.slick.GameContainer
import org.newdawn.slick.state.StateBasedGame
import org.newdawn.slick.state.BasicGameState
import io.github.suitougreentea.VariousMinos.Field
import io.github.suitougreentea.VariousMinos.CommonRenderer
import io.github.suitougreentea.VariousMinos.Resource
import org.newdawn.slick.Graphics
import org.newdawn.slick.Input
import org.newdawn.slick.Color
import io.github.suitougreentea.VariousMinos.Position
import io.github.suitougreentea.VariousMinos.MinoList
import io.github.suitougreentea.VariousMinos.Block
import io.github.suitougreentea.VariousMinos.Mino
import scala.collection.mutable.HashSet
import io.github.suitougreentea.VariousMinos.Buttons
import io.github.suitougreentea.VariousMinos.MinoGeneratorBombInfinite
import io.github.suitougreentea.VariousMinos.rule.Rule
import io.github.suitougreentea.VariousMinos.CommonRendererBomb
import io.github.suitougreentea.util.TextAlign
class GameBomb(val wrapper: GameWrapper, val handler: HandlerBomb, val rule: Rule) extends Game with CommonRendererBomb {
val _this = this
var field = new Field(rule)
val bombSize = Array ((3, 0), (3, 1), (3, 2), (3, 3), (4, 4), (4, 4), (5, 5), (5, 5), (6, 6), (6, 6), (7, 7), (7, 7), (8, 8), (8, 8), (8, 8), (8, 8), (8, 8), (8, 8), (8, 8), (8, 8), (8, 8), (8, 8))
val phaseReady : Phase = new Phase {
val id = -1
var beforeTime = 10
var afterTime = 30
override def handleBeforeBefore(executer: PhaseExecuter){
searchBigBomb()
}
override def procedureBefore(executer: PhaseExecuter){
if(makingBigBombSet.size == 0) executer.moveToWorking()
else super.procedureBefore(executer)
}
override def handleAfterBefore(executer: PhaseExecuter){
makeBigBomb()
}
override def handleAfterAfter(executer: PhaseExecuter) {
handler.start(_this)
}
def procedureWorking(executer: PhaseExecuter){
val c = wrapper.control
if(c.pressed(Buttons.A)) {
nextMinoDisplayCursor = 0
executer.enterPhase(phaseMoving, true)
}
if(nextMinoDisplayType == 1) {
if(c.pressed(Buttons.UP) && nextMinoDisplayCursor > 0) nextMinoDisplayCursor -= 1
if(c.pressed(Buttons.DOWN) && nextMinoDisplayCursor + 6 < field.generator.size) nextMinoDisplayCursor += 1
}
}
override def procedureAfter(executer: PhaseExecuter) {
super.procedureAfter(executer)
val c = wrapper.control
if(c.down(Buttons.LEFT)) {
moveDirection = -1
if(firstMoveTimer != firstMoveTimerMax) firstMoveTimer += 1
}
if(c.down(Buttons.RIGHT)) {
moveDirection = 1
if(firstMoveTimer != firstMoveTimerMax) firstMoveTimer += 1
}
}
}
val phaseMoving : Phase = new Phase {
val id = 0
var beforeTime = 10
var afterTime = 0
override def handleAfterBefore(executer: PhaseExecuter) {
val c = wrapper.control
handler.beforeNewMino(_this)
fallCounter = 0
softDropCounter = 0
lockdownTimer = 0
forceLockdownTimer = 0
lastLines = field.filledLines.length
if(field.generator.size == 0 && !field.generator.infinite){
handler.noNewMino(_this)
} else {
field.newMino()
if(nextAllBombFlag) {
field.currentMino = new Mino(field.currentMino.minoId, field.rule.spawn.getRotation(field.currentMino.minoId), new Block(64))
}
handler.newMino(_this)
}
if(c.down(Buttons.C) && !c.pressed(Buttons.C) && rule.enableInitialHold){
if(field.hold()) {
if(nextAllBombFlag) {
field.currentMino = new Mino(field.currentMino.minoId, field.rule.spawn.getRotation(field.currentMino.minoId), new Block(64))
}
}
}
if(c.down(Buttons.LEFT) && !c.pressed(Buttons.LEFT) && rule.enableInitialMove){
field.moveMinoLeft()
}
if(c.down(Buttons.RIGHT) && !c.pressed(Buttons.RIGHT) && rule.enableInitialMove){
field.moveMinoRight()
}
if(c.down(Buttons.A) && !c.pressed(Buttons.A) && rule.enableInitialRotate){
field.rotateMinoCCW()
}
if(c.down(Buttons.B) && !c.pressed(Buttons.B) && rule.enableInitialRotate){
field.rotateMinoCW()
}
if(field.checkHit()) handler.stuck(_this)
fallCounter += fallCounterDelta
while(fallCounter >= 1) {
field.moveMinoDown()
fallCounter -= 1
}
}
override def procedureBefore(executer: PhaseExecuter) {
super.procedureBefore(executer)
val c = wrapper.control
if(c.down(Buttons.LEFT)) {
moveDirection = -1
if(firstMoveTimer != firstMoveTimerMax) firstMoveTimer += 1
}
if(c.down(Buttons.RIGHT)) {
moveDirection = 1
if(firstMoveTimer != firstMoveTimerMax) firstMoveTimer += 1
}
}
def procedureWorking(executer: PhaseExecuter) {
val c = wrapper.control
if(c.pressed(Buttons.A)){
if(field.rotateMinoCCW() && rule.resetByRotating) lockdownTimer = 0
}
if(c.pressed(Buttons.B)){
if(field.rotateMinoCW() && rule.resetByRotating) lockdownTimer = 0
}
if(c.pressed(Buttons.LEFT)){
if(field.moveMinoLeft() && rule.resetByMoving) lockdownTimer = 0
moveDirection = -1
firstMoveTimer = 0
moveCounter = 0
}
if(c.pressed(Buttons.RIGHT)){
if(field.moveMinoRight() && rule.resetByMoving) lockdownTimer = 0
moveDirection = 1
firstMoveTimer = 0
moveCounter = 0
}
if(c.down(Buttons.LEFT)) {
if(moveDirection == -1) {
if(firstMoveTimer == firstMoveTimerMax){
moveCounter += moveCounterDelta
while(moveCounter >= 1) {
if(field.moveMinoLeft() && rule.resetByMoving) lockdownTimer = 0
moveCounter -= 1
}
} else {
firstMoveTimer += 1
}
}
}
if(c.down(Buttons.RIGHT)) {
if(moveDirection == 1) {
if(firstMoveTimer == firstMoveTimerMax){
moveCounter += moveCounterDelta
while(moveCounter >= 1) {
if(field.moveMinoRight() && rule.resetByMoving) lockdownTimer = 0
moveCounter -= 1
}
} else {
firstMoveTimer += 1
}
}
}
if(c.down(Buttons.DOWN)){
softDropCounter += softDropCounterDelta
while(softDropCounter >= 1) {
if(field.currentMinoY == field.ghostY && rule.downKeyLock){
field.hardDrop()
executer.enterPhase(phaseCounting, true)
} else {
if(field.moveMinoDown() && rule.resetByFalling) lockdownTimer = 0
}
softDropCounter -= 1
}
}
if(c.pressed(Buttons.UP)) {
if(rule.enableUpKey){
if(rule.upKeyLock){
field.hardDrop()
executer.enterPhase(phaseCounting, true)
} else {
field.currentMinoY = field.ghostY
}
}
}
if(c.pressed(Buttons.C)){
if(field.hold()) {
if(nextAllBombFlag) {
field.currentMino = new Mino(field.currentMino.minoId, field.rule.spawn.getRotation(field.currentMino.minoId), new Block(64))
}
}
fallCounter = 0
softDropCounter = 0
lockdownTimer = 0
}
fallCounter += fallCounterDelta
while(fallCounter >= 1) {
if(field.moveMinoDown() && rule.resetByFalling) lockdownTimer = 0
fallCounter -= 1
}
if(field.currentMinoY == field.ghostY) {
if(lockdownTimer == lockdownTimerMax || forceLockdownTimer == forceLockdownTimerMax) {
field.hardDrop()
executer.enterPhase(phaseCounting, true)
}
lockdownTimer += 1
forceLockdownTimer += 1
}
}
override def handleBeforeAfter(executer: PhaseExecuter) {
nextAllBombFlag = false
}
}
val phaseCounting : Phase = new Phase {
val id = 1
var beforeTime = 0
var afterTime = 0
override def procedureBefore(executer: PhaseExecuter) {
if(field.filledLines.length == lastLines) executer.enterPhase(phaseFalling, false)
else super.procedureBefore(executer)
}
def procedureWorking(executer: PhaseExecuter) {
if(existBombLine){
chain += 1
handler.fillLine(_this, field.filledLines.length + chain - 1, chain, false)
executer.enterPhase(phaseErasing, true)
} else {
handler.fillLine(_this, field.filledLines.length + chain, chain, true)
executer.enterPhase(phaseFalling, true)
}
}
}
var bombList: HashSet[(Int, Int, Boolean)] = HashSet.empty
var bombTimer = 0
var bombTimerMiddle = 8
var bombTimerMax = 30
val phaseErasing : Phase = new Phase {
val id = 2
var beforeTime = 20
var afterTime = 0
var bombListNew: HashSet[(Int, Int, Boolean)] = HashSet.empty
var erasedBlocksList = IndexedSeq.fill(field.height)(Array.fill(10)(false))
var erasedBlocks = 0
override def handleBeforeBefore(executer: PhaseExecuter) {
bombList = HashSet.empty
lastLines = field.filledLines.length
for(iy <- field.filledLines; ix <- 0 until 10){
field(ix, iy).id match {
case 64 => bombList += Tuple3(ix, iy, false)
case 65 if(!bombList.contains(Tuple3(ix, iy - 1, true))) => bombList += Tuple3(ix, iy - 1, true)
case 67 => bombList += Tuple3(ix, iy, true)
case _ =>
}
}
}
def procedureWorking(executer: PhaseExecuter) {
if(bombTimer == bombTimerMiddle){
for(e <- bombList) {
var (width, height) = bombSize(lastLines + chain - 1 - 1)
var (x, y, big) = e
var yr, xr: Range = null
if(big) {
yr = (y - 4) to (y + 5)
xr = (x - 4) to (x + 5)
}else{
yr = (y - height) to (y + height)
xr = (x - width) to (x + width)
}
for(iy <- yr; ix <- xr) {
if(0 <= ix && ix < 10 && 0 <= iy && iy < field.height) {
var id = field(ix, iy).id
if(id == 64 && !bombList.contains(Tuple3(ix, iy, false))){
bombListNew += Tuple3(ix, iy, false)
} else if(id == 65 && !bombList.contains(Tuple3(ix, iy - 1, true))) {
bombListNew += Tuple3(ix, iy - 1, true)
} else if(id == 66 && !bombList.contains(Tuple3(ix - 1, iy - 1, true))) {
bombListNew += Tuple3(ix - 1, iy - 1, true)
} else if(id == 67 && !bombList.contains(Tuple3(ix, iy, true))) {
bombListNew += Tuple3(ix, iy, true)
} else if(id == 68 && !bombList.contains(Tuple3(ix - 1, iy, true))) {
bombListNew += Tuple3(ix - 1, iy, true)
} else if(((70 <= id && id <= 73) || (76 <= id && id <= 79)) && !erasedBlocksList(iy)(ix)){
field(ix, iy).id -= 1
erasedBlocks += 1
erasedBlocksList(iy)(ix) = true
} else if(id == 74 || id == 80 || id == 86){
} else {
if(field(ix, iy).id > 0 && !erasedBlocksList(iy)(ix)){
field(ix, iy) = new Block(0)
erasedBlocks += 1
erasedBlocksList(iy)(ix) = true
}
}
}
}
}
}
if(bombTimer == bombTimerMax) {
erasedBlocksList = IndexedSeq.fill(field.height)(Array.fill(10)(false))
bombTimer = 0
if(bombListNew.size == 0){
executer.enterPhase(phaseFalling, true)
} else {
bombList = bombListNew
bombListNew = HashSet.empty
}
}
bombTimer += 1
}
override def handleBeforeAfter(executer: PhaseExecuter){
handler.afterBomb(_this, erasedBlocks)
erasedBlocks = 0
}
}
var fallingPieceCounter = 0f
var fallingPieceCounterDelta = 1f
val phaseFalling : Phase = new Phase {
val id = 3
var beforeTime = 0
var afterTime = 0
override def handleBeforeBefore(executer: PhaseExecuter) {
field.makeFallingPieces()
}
override def procedureBefore(executer: PhaseExecuter){
if(field.fallingPieceSet.size == 0) {
executer.enterPhase(phaseMakingBigBomb, false)
}
else super.procedureBefore(executer)
}
def procedureWorking(executer: PhaseExecuter) {
fallingPieceCounter += fallingPieceCounterDelta
while (fallingPieceCounter >= 1){
for(e <- field.fallingPieceSet){
if(e.containsPersistentBlock){
// TODO: I really don't know why this bug occurs; this statement is to prevent black blocks from falling.
// TODO: If this bug is fixed, remove this statement
field.setFallingPiece(e)
field.fallingPieceSet -= e
} else {
e.y -= 1
if(field.checkHitFallingPiece(piece = e)) {
e.y += 1
field.setFallingPiece(e)
field.fallingPieceSet -= e
}
}
}
for(e <- field.fallingPieceSetIndependent){
e.y -= 1
if(field.checkHitFallingPiece(piece = e)) {
e.y += 1
field.setFallingPiece(e)
field.fallingPieceSetIndependent -= e
}
}
fallingPieceCounter -= 1
}
if(field.fallingPieceSet.size == 0 && field.fallingPieceSetIndependent.size == 0){
lastLines = 0
if(field.filledLines.length > 0 && existBombLine) executer.enterPhase(phaseCounting, true)
else {
chain = 0
executer.enterPhase(phaseMakingBigBomb, true)
}
}
}
}
var makingBigBombSet: HashSet[(Int, Int)] = HashSet.empty
def searchBigBomb() {
// 上が優先される
for(iy <- field.height - 1 to 0 by -1; ix <- 0 until 10){
if(field(ix, iy).id == 64){
if(!alongToBigBombSet(ix, iy)){
if(field(ix + 1, iy).id == 64 && !alongToBigBombSet(ix + 1, iy) &&
field(ix, iy - 1).id == 64 && !alongToBigBombSet(ix, iy - 1) &&
field(ix + 1, iy - 1).id == 64 && !alongToBigBombSet(ix + 1, iy - 1)){
makingBigBombSet += Tuple2(ix, iy)
}
}
}
}
}
def alongToBigBombSet(ix: Int, iy: Int) = makingBigBombSet.contains(Tuple2(ix - 1, iy)) || makingBigBombSet.contains(Tuple2(ix - 1, iy)) || makingBigBombSet.contains(Tuple2(ix, iy + 1)) || makingBigBombSet.contains(Tuple2(ix - 1, iy + 1))
def makeBigBomb(): Int = {
var i = 0
for(e <- makingBigBombSet){
var (x, y) = e
field(x, y) = new Block(65)
field(x + 1, y) = new Block(66)
field(x, y - 1) = new Block(67)
field(x + 1, y - 1) = new Block(68)
i += 1
}
makingBigBombSet = HashSet.empty
i
}
val phaseMakingBigBomb : Phase = new Phase {
val id = 4
var beforeTime = 10
var afterTime = 10
override def handleBeforeBefore(executer: PhaseExecuter){
chain = 0
var flag = false
for(iy <- 0 until field.height; ix <- 0 until 10){
if(field(ix, iy).id != 0 && !field(ix, iy).unerasable) flag = true
}
if(flag){
searchBigBomb()
} else {
handler.allClear(_this)
}
}
override def procedureBefore(executer: PhaseExecuter){
if(allEraseFlag){
allEraseFlag = false
executer.enterPhase(phaseErasingField, false)
}
else if(makingBigBombSet.size == 0) executer.enterPhase(phaseMoving, false)
else super.procedureBefore(executer)
}
def procedureWorking(executer: PhaseExecuter) {
handler.makeBigBomb(_this, makeBigBomb())
executer.enterPhase(phaseMoving, true)
}
}
val phaseErasingField: Phase = new Phase {
val id = 5
var beforeTime = 0
var afterTime = 180
def procedureWorking(executer: PhaseExecuter) {
executer.enterPhase(phaseMoving, true)
}
override def procedureAfter(executer: PhaseExecuter) {
if(executer.timer % 5 == 0) {
for(i <- 0 until 10){
field(i, executer.timer / 5) = new Block(0)
}
}
super.procedureAfter(executer)
}
}
def existBombLine: Boolean = {
for(iy <- field.filledLines; ix <- 0 until 10){
field(ix, iy).id match {
case 64 | 65 | 66 | 67 | 68 => return true
case _ =>
}
}
return false
}
private var fallCounter = 0f
var fallCounterDelta = 0f
private var softDropCounter = 0f
var softDropCounterDelta = 1f
private var lockdownTimer = 0
var lockdownTimerMax = 60
private var forceLockdownTimer = 0
var forceLockdownTimerMax = 180
private var moveDirection = 0
private var firstMoveTimer = 0
var firstMoveTimerMax = 5
private var moveCounter = 0f
var moveCounterDelta = 0.5f
private var lastLines = 0
private var chain = 0
var allEraseFlag = false
var nextAllBombFlag = false
// 0: Normal, 1: Puzzle
var nextMinoDisplayType = 0
// use if type=1
var nextMinoDisplayCursor = 0
handler.init(this)
field.init()
var executer: PhaseExecuter = new PhaseExecuter(phaseReady)
def update() {
executer.exec()
handler.update(this)
}
def render(g: Graphics) {
g.setBackground(Color.darkGray)
g.clear()
//Resource.design.draw()
g.pushTransform()
g.translate(168, 512)
drawField(g)(field)
if(executer.currentPhase.id == 0 && executer.currentPosition == Position.WORKING) {
drawFieldMinoGhost(g)(field)
drawFieldMino(g)(field, 0.2f - lockdownTimer / (lockdownTimerMax toFloat) * 0.15f)
}
if(executer.currentPhase.id == 2 && executer.currentPosition == Position.WORKING) {
for(e <- bombList){
var x = e._1 toFloat
var y = e._2 toFloat
var big = e._3
var width, height = 0f
if(big){
x += 0.5f
y += 0.5f
width = 4.5f
height = 4.5f
} else {
width = bombSize(lastLines + chain - 1 - 1)._1
height = bombSize(lastLines + chain - 1 - 1)._2
}
var multiplier = 0f
var transparency = 1f
if(bombTimer < bombTimerMiddle){
var t = bombTimer / (bombTimerMiddle toFloat)
multiplier = -(Math.pow((t - 1), 2) - 1) toFloat
} else {
transparency = 1f - (bombTimer - bombTimerMiddle) / (bombTimerMax - bombTimerMiddle toFloat)
multiplier = 1
}
width *= multiplier
height *= multiplier
var topLeftX = (x - width) * 16
var topLeftY = -(y + height + 1) * 16
var renderWidth = (width * 2 + 1) * 16
var renderHeight = (height * 2 + 1) * 16
Resource.bomb.draw(topLeftX, topLeftY, renderWidth, renderHeight, new Color(1f, 1f, 1f, transparency))
}
}
for(e <- field.fallingPieceSet) {
drawFallingPiece(g)(e)
}
for(e <- field.fallingPieceSetIndependent) {
drawFallingPiece(g)(e)
}
g.popTransform()
nextMinoDisplayType match {
case 0 => {
if(rule.numNext >= 1 && !field.generator(0).isEmpty) {
g.pushTransform()
g.translate(216, 136)
drawNextMino(g)(field.generator(0).get)
g.popTransform()
}
if(rule.numNext >= 2 && !field.generator(1).isEmpty) {
g.pushTransform()
g.translate(304, 128)
drawNextMino(g)(field.generator(1).get, true)
g.popTransform()
}
g.pushTransform()
g.translate(352, 128)
for(i <- 2 until rule.numNext){
if(!field.generator(i).isEmpty) drawNextMino(g)(field.generator(i).get, true)
g.translate(0, 32)
}
g.popTransform()
}
case 1 => {
if(rule.numNext >= 1 && !field.generator(0).isEmpty) {
g.pushTransform()
g.translate(216, 136)
drawNextMino(g)(field.generator(0).get)
g.popTransform()
}
g.pushTransform()
g.translate(352, 128)
for(i <- 1 + nextMinoDisplayCursor until 6 + nextMinoDisplayCursor){
if(!field.generator(i).isEmpty) drawNextMino(g)(field.generator(i).get, true)
g.translate(0, 32)
}
g.popTransform()
if(nextMinoDisplayCursor > 0) Resource.boldfont.drawString("^", 372, 96, TextAlign.CENTER)
if(nextMinoDisplayCursor + 6 < field.generator.size) Resource.boldfont.drawString("v", 372, 256, TextAlign.CENTER)
}
}
g.pushTransform()
g.translate(160, 128)
if(field.holdMino != null) drawNextMino(g)(field.holdMino, true)
g.popTransform()
Resource.frame.draw(152, 144)
Resource.frame.draw(456, 144)
g.setColor(new Color(1f, 1f, 1f))
Resource.boldfont.drawString("PhaseID: %d\\nPosition: %s\\nTimer: %d\\nFall: %f\\nSoft: %f\\nLock: %d\\nForce: %d\\nDirection: %d\\nFirstMove: %d\\nMove: %f\\nLines: %d\\nBomb: %d\\nFallPiece: %f\\nChain: %d\\nLastLines: %d".
format(executer.currentPhase.id,
executer.currentPosition.toString(),
executer.timer,
fallCounter,
softDropCounter,
lockdownTimer,
forceLockdownTimer,
moveDirection,
firstMoveTimer,
moveCounter,
lastLines,
bombTimer,
fallingPieceCounter,
chain,
lastLines),
472, 160)
handler.render(this, g)
}
/*override def graphicId(): Int = {
var id = block.id
if(0 <= id && id < 64) id
else if(id == 64) 128
else if(id == 65) 129
else if(id == 66) 130
else if(id == 67) 193
else if(id == 68) 194
else if(69 <= id && id <= 86) (id - 69) + 133
else id
}*/
} | suitougreentea/VariousMinos2 | src/main/scala/io/github/suitougreentea/VariousMinos/game/GameBomb.scala | Scala | mit | 22,123 |
import sbt._
import Keys._
import PlayProject._
object ApplicationBuild extends Build {
val appName = "Play2-ReactiveMongo-Sample"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
"org.reactivemongo" %% "play2-reactivemongo" % "0.9-SNAPSHOT"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
resolvers += "Sonatype snapshots" at "http://oss.sonatype.org/content/repositories/snapshots/"
)
}
| Rhinofly/Play-ReactiveMongo | samples/Play-ReactiveMongo-Sample/project/Build.scala | Scala | apache-2.0 | 479 |
package org.jetbrains.plugins.scala
package debugger.evaluation
import com.intellij.debugger.SourcePosition
import com.intellij.debugger.engine.evaluation.CodeFragmentFactoryContextWrapper
import com.intellij.debugger.engine.evaluation.expression._
import com.intellij.debugger.engine.{JVMName, JVMNameUtil}
import com.intellij.lang.java.JavaLanguage
import com.intellij.psi._
import com.intellij.psi.search.LocalSearchScope
import com.intellij.psi.search.searches.ReferencesSearch
import com.intellij.psi.util.CachedValueProvider.Result
import com.intellij.psi.util.{CachedValueProvider, CachedValuesManager, PsiTreeUtil}
import org.jetbrains.plugins.scala.debugger.ScalaPositionManager
import org.jetbrains.plugins.scala.debugger.evaluation.evaluator._
import org.jetbrains.plugins.scala.debugger.evaluation.util.DebuggerUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.expr.xml.ScXmlPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter, ScParameterClause}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportStmt
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScClassParents, ScTemplateBody}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScEarlyDefinitions, ScModifierListOwner, ScNamedElement, ScTypedDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.{ImplicitParametersOwner, ScPackage}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.ScSyntheticFunction
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.Parameter
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import scala.reflect.NameTransformer
/**
* Nikolay.Tropin
* 2014-09-28
*/
private[evaluation] trait ScalaEvaluatorBuilderUtil {
this: ScalaEvaluatorBuilder =>
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaEvaluatorBuilderUtil._
def fileName = contextClass.toOption.flatMap(_.getContainingFile.toOption).map(_.name).orNull
def importedQualifierEvaluator(ref: ScReferenceElement, resolveResult: ScalaResolveResult): Evaluator = {
val message = ScalaBundle.message("cannot.evaluate.imported.reference")
resolveResult.fromType match {
case Some(ScDesignatorType(element)) =>
element match {
case obj: ScObject => stableObjectEvaluator(obj)
case cl: PsiClass if cl.getLanguage.isInstanceOf[JavaLanguage] =>
new TypeEvaluator(JVMNameUtil.getJVMQualifiedName(cl))
case _ =>
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(element.name, ref.getContext, ref)
evaluatorFor(expr)
}
case Some(p: ScProjectionType) =>
def exprToEvaluate(p: ScProjectionType): String = p.projected match {
case ScDesignatorType(elem) => elem.name + "." + p.actualElement.name
case projected: ScProjectionType => exprToEvaluate(projected) + "." + projected.actualElement.name
case ScThisType(cl) if contextClass == cl => s"this.${p.actualElement.name}"
case ScThisType(cl) => s"${cl.name}.this.${p.actualElement.name}"
case _ => throw EvaluationException(message)
}
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprToEvaluate(p), ref.getContext, ref)
evaluatorFor(expr)
case _ => throw EvaluationException(message)
}
}
def thisOrImportedQualifierEvaluator(ref: ScReferenceElement): Evaluator = {
ref.bind() match {
case Some(resolveResult: ScalaResolveResult) =>
if (resolveResult.importsUsed.nonEmpty) importedQualifierEvaluator(ref, resolveResult)
else thisEvaluator(resolveResult)
case None => new ScalaThisEvaluator()
}
}
def thisEvaluator(resolveResult: ScalaResolveResult): Evaluator = {
//this reference
val elem = resolveResult.element
val containingClass = resolveResult.fromType match {
case Some(ScThisType(clazz)) => clazz
case Some(tp) => ScType.extractClass(tp, Some(elem.getProject)) match {
case Some(x) => x
case None => getContextClass(elem)
}
case _ => getContextClass(elem)
}
containingClass match {
case o: ScObject if isStable(o) =>
return stableObjectEvaluator(o)
case _ =>
}
val (outerClass, iterationCount) = findContextClass(e => e == null || e == containingClass)
if (outerClass != null)
new ScalaThisEvaluator(iterationCount)
else new ScalaThisEvaluator()
}
def thisOrSuperEvaluator(refOpt: Option[ScStableCodeReferenceElement], isSuper: Boolean): Evaluator = {
def thisEval(i: Int) = if (isSuper) new ScalaSuperEvaluator(i) else new ScalaThisEvaluator(i)
def stableEvaluator(e: Evaluator) = if (isSuper) new ScalaSuperDelegate(e) else e
def default: Evaluator = {
val (result, iters) = findContextClass(e => e == null || e.isInstanceOf[PsiClass])
if (result == null) thisEval(0)
else thisEval(iters)
}
refOpt match {
case Some(ResolvesTo(clazz: PsiClass)) =>
clazz match {
case o: ScObject if isStable(o) => stableEvaluator(stableObjectEvaluator(o))
case _ =>
val (result, iters) = findContextClass(e => e == null || e == clazz)
if (result == null) thisEval(0)
else thisEval(iters)
}
case Some(ref) =>
val refName = ref.refName
val (result, iters) = findContextClass {
case null => true
case cl: PsiClass if cl.name != null && cl.name == refName => true
case _ => false
}
result match {
case o: ScObject if isStable(o) => stableEvaluator(stableObjectEvaluator(o))
case null => default
case _ => thisEval(iters)
}
case _ => default
}
}
def findContextClass(stopCondition: PsiElement => Boolean): (PsiElement, Int) = {
var current: PsiElement = contextClass
var iterations = 0
while (!stopCondition(current)) {
iterations += anonClassCount(current)
current = getContextClass(current)
}
(current, iterations)
}
def localMethodEvaluator(fun: ScFunctionDefinition, argEvaluators: Seq[Evaluator]): Evaluator = {
val name = NameTransformer.encode(fun.name)
val containingClass = if (fun.isSynthetic) fun.containingClass else getContextClass(fun)
val message = ScalaBundle.message("cannot.evaluate.local.method")
if (contextClass == null) {
throw EvaluationException(message)
}
val thisEvaluator: Evaluator = containingClass match {
case obj: ScObject if isStable(obj) =>
stableObjectEvaluator(obj)
case t: ScTrait =>
thisOrSuperEvaluator(None, isSuper = true)
case _ =>
val (outerClass, iters) = findContextClass(e => e == null || e == containingClass)
if (outerClass != null) new ScalaThisEvaluator(iters)
else null
}
if (thisEvaluator != null) {
val locals = DebuggerUtil.localParamsForFunDef(fun)
val evaluators = argEvaluators ++ locals.map(fromLocalArgEvaluator)
val signature = DebuggerUtil.getFunctionJVMSignature(fun)
val positions = DebuggerUtil.getSourcePositions(fun.getNavigationElement)
val idx = localFunctionIndex(fun)
new ScalaMethodEvaluator(thisEvaluator, name, signature, evaluators, traitImplementation(fun), positions, idx)
}
else throw EvaluationException(message)
}
def stableObjectEvaluator(qual: String): ScalaFieldEvaluator = {
val jvm = JVMNameUtil.getJVMRawText(qual)
new ScalaFieldEvaluator(new TypeEvaluator(jvm), "MODULE$")
}
def stableObjectEvaluator(obj: ScObject): Evaluator = {
val qualName =
if (obj.isPackageObject)
obj.qualifiedName + ".package"
else obj.getQualifiedNameForDebugger
val qual = qualName.split('.').map(NameTransformer.encode).mkString(".") + "$"
stableObjectEvaluator(qual)
}
def objectEvaluator(obj: ScObject, qualEvaluator: () => Evaluator): Evaluator = {
if (isStable(obj)) stableObjectEvaluator(obj)
else {
val objName = NameTransformer.encode(obj.name)
new ScalaMethodEvaluator(qualEvaluator(), objName, null /* todo? */, Seq.empty,
traitImplementation(obj), DebuggerUtil.getSourcePositions(obj.getNavigationElement))
}
}
def syntheticFunctionEvaluator(synth: ScSyntheticFunction,
qualOpt: Option[ScExpression],
ref: ScReferenceExpression,
arguments: Seq[ScExpression]): Evaluator = {
if (synth.isStringPlusMethod && arguments.length == 1) {
val qualText = qualOpt.fold("this")(_.getText)
val exprText = s"($qualText).concat(_root_.java.lang.String.valueOf(${arguments(0).getText}))"
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, ref.getContext, ref)
return evaluatorFor(expr)
}
val name = synth.name
val argEvaluators = arguments.map(evaluatorFor(_))
def unaryEval(operatorName: String, function: Evaluator => Evaluator): Evaluator = {
if (argEvaluators.isEmpty) {
val eval = qualOpt match {
case None => new ScalaThisEvaluator()
case Some(qual) => evaluatorFor(qual)
}
function(eval)
} else throw EvaluationException(ScalaBundle.message("wrong.number.of.arguments", operatorName))
}
def unaryEvalForBoxes(operatorName: String, boxesName: String): Evaluator = {
unaryEval(operatorName, unaryEvaluator(_, boxesName))
}
def binaryEval(operatorName: String, function: (Evaluator, Evaluator) => Evaluator): Evaluator = {
if (argEvaluators.length == 1) {
val eval = qualOpt match {
case None => new ScalaThisEvaluator()
case Some(qual) => evaluatorFor(qual)
}
function(eval, argEvaluators(0))
} else throw EvaluationException(ScalaBundle.message("wrong.number.of.arguments", operatorName))
}
def binaryEvalForBoxes(operatorName: String, boxesName: String): Evaluator = {
binaryEval(operatorName, binaryEvaluator(_, _, boxesName))
}
def equalsEval(opName: String): Evaluator = {
val rawText = JVMNameUtil.getJVMRawText("(Ljava/lang/Object;Ljava/lang/Object;)Z")
binaryEval(name, (l, r) => new ScalaMethodEvaluator(BOXES_RUN_TIME, "equals", rawText, boxed(l, r)))
}
def isInstanceOfEval: Evaluator = {
unaryEval("isInstanceOf", eval => {
import org.jetbrains.plugins.scala.lang.psi.types.Nothing
val tp = ref.getParent match {
case gen: ScGenericCall => gen.typeArgs match {
case Some(args) => args.typeArgs match {
case Seq(arg) => arg.calcType
case _ => Nothing
}
case None => Nothing
}
case _ => Nothing
}
val jvmName: JVMName = DebuggerUtil.getJVMQualifiedName(tp)
new ScalaInstanceofEvaluator(eval, new TypeEvaluator(jvmName))
})
}
def trueEval = expressionFromTextEvaluator("true", ref)
def falseEval = expressionFromTextEvaluator("false", ref)
def conditionalOr = binaryEval("||", (first, second) => new ScalaIfEvaluator(first, trueEval, Some(second)))
def conditionalAnd = binaryEval("&&", (first, second) => new ScalaIfEvaluator(first, second, Some(falseEval)))
name match {
case "isInstanceOf" => isInstanceOfEval
case "asInstanceOf" => unaryEval(name, identity) //todo: primitive type casting?
case "##" => unaryEval(name, eval => new ScalaMethodEvaluator(BOXES_RUN_TIME, "hashFromObject",
JVMNameUtil.getJVMRawText("(Ljava/lang/Object;)I"), Seq(boxEvaluator(eval))))
case "==" => equalsEval("==")
case "!=" => unaryEvaluator(equalsEval("!="), "takeNot")
case "unary_!" => unaryEvalForBoxes("!", "takeNot")
case "unary_~" => unaryEvalForBoxes("~", "complement")
case "unary_+" => unaryEvalForBoxes("+", "positive")
case "unary_-" => unaryEvalForBoxes("-", "negate")
case "eq" => binaryEval(name, eqEvaluator)
case "ne" => binaryEval(name, neEvaluator)
case "<" => binaryEvalForBoxes(name, "testLessThan")
case ">" => binaryEvalForBoxes(name, "testGreaterThan")
case ">=" => binaryEvalForBoxes(name, "testGreaterOrEqualThan")
case "<=" => binaryEvalForBoxes(name, "testLessOrEqualThan")
case "+" => binaryEvalForBoxes(name, "add")
case "-" => binaryEvalForBoxes(name, "subtract")
case "*" => binaryEvalForBoxes(name, "multiply")
case "/" => binaryEvalForBoxes(name, "divide")
case "%" => binaryEvalForBoxes(name, "takeModulo")
case ">>" => binaryEvalForBoxes(name, "shiftSignedRight")
case "<<" => binaryEvalForBoxes(name, "shiftSignedLeft")
case ">>>" => binaryEvalForBoxes(name, "shiftLogicalRight")
case "&" => binaryEvalForBoxes(name, "takeAnd")
case "|" => binaryEvalForBoxes(name, "takeOr")
case "^" => binaryEvalForBoxes(name, "takeXor")
case "&&" => conditionalAnd
case "||" => conditionalOr
case "toInt" => unaryEvalForBoxes(name, "toInteger")
case "toChar" => unaryEvalForBoxes(name, "toCharacter")
case "toShort" => unaryEvalForBoxes(name, "toShort")
case "toByte" => unaryEvalForBoxes(name, "toByte")
case "toDouble" => unaryEvalForBoxes(name, "toDouble")
case "toLong" => unaryEvalForBoxes(name, "toLong")
case "toFloat" => unaryEvalForBoxes(name, "toFloat")
case "synchronized" =>
throw EvaluationException("synchronized statement is not supported")
case _ =>
throw EvaluationException("Cannot evaluate synthetic method: " + name)
}
}
def arrayMethodEvaluator(name: String, qual: Option[ScExpression], argEvaluators: Seq[Evaluator]): Evaluator = {
val qualEval = qual match {
case Some(q) => evaluatorFor(q)
case None => throw EvaluationException(ScalaBundle.message("array.instance.is.not.found", name))
}
val message = ScalaBundle.message("wrong.number.of.arguments", s"Array.$name")
name match {
case "apply" =>
if (argEvaluators.length == 1) new ScalaArrayAccessEvaluator(qualEval, argEvaluators(0))
else throw EvaluationException(message)
case "length" =>
if (argEvaluators.isEmpty) new ScalaFieldEvaluator(qualEval, "length")
else throw EvaluationException(message)
case "clone" =>
if (argEvaluators.isEmpty) new ScalaMethodEvaluator(qualEval, "clone", null/*todo*/, Nil)
else throw EvaluationException(message)
case "update" =>
if (argEvaluators.length == 2) {
val leftEval = new ScalaArrayAccessEvaluator(qualEval, argEvaluators(0))
new AssignmentEvaluator(leftEval, unboxEvaluator(argEvaluators(1)))
} else throw EvaluationException(message)
case "toString" =>
if (argEvaluators.isEmpty) new ScalaMethodEvaluator(qualEval, "toString", null/*todo*/, Nil)
else throw EvaluationException(message)
case _ =>
throw EvaluationException(ScalaBundle.message("array.method.not.supported"))
}
}
def isArrayFunction(fun: ScFunction): Boolean = {
fun.getContext match {
case tb: ScTemplateBody =>
fun.containingClass match {
case clazz: ScClass if clazz.qualifiedName == "scala.Array" => true
case _ => false
}
case _ => false
}
}
def isClassOfFunction(fun: ScFunction): Boolean = {
if (fun.name != "classOf") return false
fun.getContext match {
case tb: ScTemplateBody =>
fun.containingClass match {
case clazz: PsiClass if clazz.qualifiedName == "scala.Predef" => true
case _ => false
}
case _ => false
}
}
def classOfFunctionEvaluator(ref: ScReferenceExpression) = {
val clazzJVMName = ref.getContext match {
case gen: ScGenericCall =>
gen.arguments.apply(0).getType(TypingContext.empty).map(tp => {
ScType.extractClass(tp, Some(ref.getProject)) match {
case Some(clazz) =>
DebuggerUtil.getClassJVMName(clazz)
case None => null
}
}).getOrElse(null)
case _ => null
}
import org.jetbrains.plugins.scala.lang.psi.types.Null
if (clazzJVMName != null) new ClassObjectEvaluator(new TypeEvaluator(clazzJVMName))
else new ScalaLiteralEvaluator(null, Null)
}
def valueClassInstanceEvaluator(value: Evaluator, innerType: ScType, classType: ScType): Evaluator = {
val valueClassType = new TypeEvaluator(DebuggerUtil.getJVMQualifiedName(classType))
val innerJvmName = DebuggerUtil.getJVMStringForType(innerType, isParam = true)
val signature = JVMNameUtil.getJVMRawText(s"($innerJvmName)V")
new ScalaDuplexEvaluator(new ScalaNewClassInstanceEvaluator(valueClassType, signature, Array(value)), value)
}
def repeatedArgEvaluator(exprsForP: Seq[ScExpression], expectedType: ScType, context: PsiElement): Evaluator = {
def seqEvaluator: Evaluator = {
val argTypes = exprsForP.map(_.getType().getOrAny)
val argTypeText =
if (argTypes.isEmpty) expectedType.canonicalText
else Bounds.lub(argTypes).canonicalText
val argsText = if (exprsForP.nonEmpty) exprsForP.sortBy(_.getTextRange.getStartOffset).map(_.getText).mkString(".+=(", ").+=(", ").result()") else ""
val exprText = s"_root_.scala.collection.Seq.newBuilder[$argTypeText]$argsText"
val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, context, context)
evaluatorFor(newExpr)
}
if (exprsForP.length == 1) {
exprsForP(0) match {
case t: ScTypedStmt if t.isSequenceArg => evaluatorFor(t.expr)
case _ => seqEvaluator
}
} else seqEvaluator
}
def implicitArgEvaluator(fun: ScMethodLike, param: ScParameter, owner: ImplicitParametersOwner): Evaluator = {
assert(param.owner == fun)
val implicitParameters = fun.effectiveParameterClauses.lastOption match {
case Some(clause) if clause.isImplicit => clause.effectiveParameters
case _ => Seq.empty
}
val i = implicitParameters.indexOf(param)
val cannotFindMessage = ScalaBundle.message("cannot.find.implicit.parameters")
owner.findImplicitParameters match {
case Some(resolveResults) if resolveResults.length == implicitParameters.length =>
if (resolveResults(i) == null) throw EvaluationException(cannotFindMessage)
val exprText = resolveResults(i) match {
case ScalaResolveResult(clazz: ScTrait, substitutor) if clazz.qualifiedName == "scala.reflect.ClassManifest" =>
val argType = substitutor.subst(clazz.getType(TypingContext.empty).get)
argType match {
case ScParameterizedType(tp, Seq(paramType)) => classManifestText(paramType)
case _ =>
throw EvaluationException(cannotFindMessage)
}
case ScalaResolveResult(clazz: ScTrait, substitutor) if clazz.qualifiedName == "scala.reflect.ClassTag" =>
val argType = substitutor.subst(clazz.getType(TypingContext.empty).get)
argType match {
case ScParameterizedType(tp, Seq(arg)) => classTagText(arg)
case _ =>
throw EvaluationException(cannotFindMessage)
}
case ScalaResolveResult(elem, _) =>
val context = ScalaPsiUtil.nameContext(elem)
val clazz = context.getContext match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
ScalaPsiUtil.getContextOfType(context, true, classOf[PsiClass])
case _ if context.isInstanceOf[ScClassParameter] =>
ScalaPsiUtil.getContextOfType(context, true, classOf[PsiClass])
case _ => null
}
clazz match {
case o: ScObject if isStable(o) => o.qualifiedName + "." + elem.name
case o: ScObject => //todo: It can cover many cases!
throw EvaluationException(ScalaBundle.message("implicit.parameters.from.dependent.objects"))
case _ => elem.name //from scope
}
}
val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, owner.getContext, owner)
evaluatorFor(newExpr)
case None =>
throw EvaluationException(cannotFindMessage)
}
}
def parameterEvaluator(fun: PsiElement, resolve: PsiElement): Evaluator = {
val name = NameTransformer.encode(resolve.asInstanceOf[PsiNamedElement].name)
val evaluator = new ScalaLocalVariableEvaluator(name, fileName)
fun match {
case funDef: ScFunctionDefinition =>
def paramIndex(fun: ScFunctionDefinition, context: PsiElement, elem: PsiElement): Int = {
val locIndex = DebuggerUtil.localParamsForFunDef(fun).indexOf(elem)
val funParams = fun.effectiveParameterClauses.flatMap(_.effectiveParameters)
if (locIndex < 0) funParams.indexOf(elem)
else locIndex + funParams.size
}
val pIndex = paramIndex(funDef, getContextClass(fun), resolve)
evaluator.setParameterIndex(pIndex)
evaluator.setMethodName(funDef.name)
case funExpr: ScFunctionExpr =>
evaluator.setParameterIndex(funExpr.parameters.indexOf(resolve))
evaluator.setMethodName("apply")
case _ => throw EvaluationException(ScalaBundle.message("cannot.evaluate.parameter", name))
}
evaluator
}
def javaFieldEvaluator(field: PsiField, ref: ScReferenceExpression): Evaluator = {
ref.qualifier match {
case Some(qual) =>
if (field.hasModifierPropertyScala("static")) {
val eval = new TypeEvaluator(JVMNameUtil.getContextClassJVMQualifiedName(SourcePosition.createFromElement(field)))
val name = field.name
new ScalaFieldEvaluator(eval, name)
} else {
val qualEvaluator = evaluatorFor(qual)
new ScalaFieldEvaluator(qualEvaluator, field.name)
}
case None =>
val evaluator = thisOrImportedQualifierEvaluator(ref)
new ScalaFieldEvaluator(evaluator, field.name)
}
}
def javaMethodEvaluator(method: PsiMethod, ref: ScReferenceExpression, arguments: Seq[ScExpression]): Evaluator = {
def boxArguments(arguments: Seq[Evaluator], method: PsiElement): Seq[Evaluator] = {
val params = method match {
case fun: ScMethodLike => fun.effectiveParameterClauses.flatMap(_.parameters)
case m: PsiMethod => m.getParameterList.getParameters.toSeq
case _ => return arguments
}
arguments.zipWithIndex.map {
case (arg, i) =>
if (params.length <= i || isOfPrimitiveType(params(i))) arg
else boxEvaluator(arg)
}
}
val argEvals = boxArguments(arguments.map(evaluatorFor(_)), method)
val methodPosition = DebuggerUtil.getSourcePositions(method.getNavigationElement)
val signature = JVMNameUtil.getJVMSignature(method)
ref.qualifier match {
case Some(qual @ ExpressionType(tp)) if isPrimitiveScType(tp) =>
val boxEval = boxEvaluator(evaluatorFor(qual))
ScalaMethodEvaluator(boxEval, method.name, signature, argEvals, None, methodPosition)
case Some(q) if method.hasModifierPropertyScala("static") =>
val eval = new TypeEvaluator(JVMNameUtil.getContextClassJVMQualifiedName(SourcePosition.createFromElement(method)))
val name = method.name
ScalaMethodEvaluator(eval, name, signature, argEvals, None, methodPosition)
case Some(q) =>
val name = method.name
new ScalaMethodEvaluator(evaluatorFor(q), name, signature, argEvals, None, methodPosition)
case _ =>
val evaluator = thisOrImportedQualifierEvaluator(ref)
val name = method.name
new ScalaMethodEvaluator(evaluator, name, signature, argEvals, None, methodPosition)
}
}
def unresolvedMethodEvaluator(ref: ScReferenceExpression, args: Seq[ScExpression]): Evaluator = {
val argEvals = args.map(evaluatorFor(_))
val name = NameTransformer.encode(ref.refName)
ref.qualifier match {
case Some(q) => new ScalaMethodEvaluator(evaluatorFor(q), name, null, argEvals)
case _ => new ScalaMethodEvaluator(thisOrImportedQualifierEvaluator(ref), name, null, argEvals)
}
}
def argumentEvaluators(fun: ScMethodLike, matchedParameters: Map[Parameter, Seq[ScExpression]],
call: ScExpression, ref: ScReferenceExpression, arguments: Seq[ScExpression]): Seq[Evaluator] = {
val clauses = fun.effectiveParameterClauses
val parameters = clauses.flatMap(_.effectiveParameters).map(new Parameter(_))
def addForNextClause(previousClausesEvaluators: Seq[Evaluator], clause: ScParameterClause): Seq[Evaluator] = {
def isDefaultExpr(expr: ScExpression) = expr match {
case ChildOf(p: ScParameter) => p.isDefaultParam
case _ => false
}
previousClausesEvaluators ++ clause.effectiveParameters.map {
case param =>
val p = new Parameter(param)
val exprsForP = matchedParameters.find(_._1.name == p.name).map(_._2).getOrElse(Seq.empty).filter(_ != null)
if (p.isByName) throw new NeedCompilationException(ScalaBundle.message("method.with.by-name.parameters"))
val evaluator =
if (p.isRepeated) repeatedArgEvaluator(exprsForP, p.expectedType, call)
else if (exprsForP.size > 1) throw EvaluationException(ScalaBundle.message("wrong.number.of.expressions"))
else if (exprsForP.length == 1 && !isDefaultExpr(exprsForP.head)) evaluatorFor(exprsForP.head)
else if (param.isImplicitParameter) implicitArgEvaluator(fun, param, call)
else if (p.isDefault) {
val paramIndex = parameters.indexOf(p) + 1
val methodName = defaultParameterMethodName(fun, paramIndex)
val localParams = p.paramInCode.toSeq.flatMap(DebuggerUtil.localParamsForDefaultParam(_))
val localParamRefs =
localParams.map(td => ScalaPsiElementFactory.createExpressionWithContextFromText(td.name, call.getContext, call))
val localEvals = localParamRefs.map(evaluatorFor(_))
functionEvaluator(ref.qualifier, ref, methodName, previousClausesEvaluators ++ localEvals)
}
else throw EvaluationException(ScalaBundle.message("cannot.evaluate.parameter", p.name))
if (!isOfPrimitiveType(param)) boxEvaluator(evaluator)
else evaluator
}
}
val argEvaluators: Seq[Evaluator] = clauses.foldLeft(Seq.empty[Evaluator])(addForNextClause)
if (argEvaluators.contains(null)) arguments.map(arg => evaluatorFor(arg))
else argEvaluators
}
def functionEvaluator(qualOption: Option[ScExpression], ref: ScReferenceExpression,
funName: String, argEvaluators: Seq[Evaluator]): Evaluator = {
def qualEvaluator(r: ScalaResolveResult) = {
def defaultQualEvaluator = qualifierEvaluator(qualOption, ref)
r.getActualElement match {
case o: ScObject if funName == "apply" => objectEvaluator(o, defaultQualEvaluator _)
case _ => defaultQualEvaluator
}
}
val name = NameTransformer.encode(funName)
ref.bind() match {
case Some(r) if r.tuplingUsed => throw EvaluationException(ScalaBundle.message("tupling.not.supported"))
case None => throw EvaluationException(ScalaBundle.message("cannot.evaluate.method", funName))
case Some(r @ privateTraitMethod(tr, fun)) =>
val traitTypeEval = new TypeEvaluator(DebuggerUtil.getClassJVMName(tr, withPostfix = true))
val qualEval = qualEvaluator(r)
new ScalaMethodEvaluator(traitTypeEval, name, null, qualEval +: argEvaluators)
case Some(r) =>
val resolve = r.element
val qualEval = qualEvaluator(r)
val signature = resolve match {
case fun: ScFunction => DebuggerUtil.getFunctionJVMSignature(fun)
case _ => null
}
new ScalaMethodEvaluator(qualEval, name, signature, argEvaluators,
traitImplementation(resolve), DebuggerUtil.getSourcePositions(resolve.getNavigationElement))
}
}
def methodCallEvaluator(call: ScExpression, arguments: Seq[ScExpression], matchedParameters: Map[Parameter, Seq[ScExpression]]): Evaluator = {
val ref = call match {
case hasDeepestInvokedReference(r) => r
case _ => throw EvaluationException(ScalaBundle.message("cannot.evaluate.method", call.getText))
}
val qualOption = ref.qualifier
val resolve = ref.resolve()
resolve match {
case fun: ScFunctionDefinition if isLocalFunction(fun) =>
val args = argumentEvaluators(fun, matchedParameters, call, ref, arguments)
localMethodEvaluator(fun, args)
case fun: ScFunction if isClassOfFunction(fun) =>
classOfFunctionEvaluator(ref)
case synth: ScSyntheticFunction =>
syntheticFunctionEvaluator(synth, qualOption, ref, arguments) //todo: use matched parameters
case fun: ScFunction if isArrayFunction(fun) =>
val args = argumentEvaluators(fun, matchedParameters, call, ref, arguments)
arrayMethodEvaluator(fun.name, qualOption, args)
case fun: ScFunction =>
ref match {
case isInsideValueClass(c) if qualOption.isEmpty =>
val clName = c.name
val paramName = c.allClauses.flatMap(_.parameters).map(_.name).headOption.getOrElse("$this")
val text = s"new $clName($paramName).${call.getText}"
val expr = ScalaPsiElementFactory.createExpressionFromText(text, call.getContext)
evaluatorFor(expr)
case _ =>
val args: Seq[Evaluator] = argumentEvaluators(fun, matchedParameters, call, ref, arguments)
functionEvaluator(qualOption, ref, fun.name, args)
}
case method: PsiMethod =>
javaMethodEvaluator(method, ref, arguments)
case _ =>
unresolvedMethodEvaluator(ref, arguments)
}
}
def evaluatorForReferenceWithoutParameters(qualifier: Option[ScExpression],
resolve: PsiElement,
ref: ScReferenceExpression): Evaluator = {
def withOuterFieldEvaluator(containingClass: PsiElement, name: String, message: String) = {
val (innerClass, iterationCount) = findContextClass { e =>
e == null || {val nextClass = getContextClass(e); nextClass == null || nextClass == containingClass}
}
if (innerClass == null) throw EvaluationException(message)
val thisEval = new ScalaThisEvaluator(iterationCount)
new ScalaFieldEvaluator(thisEval, name)
}
def calcLocal(named: PsiNamedElement): Evaluator = {
val name = NameTransformer.encode(named.name)
val containingClass = getContextClass(named)
val localVariableEvaluator: Evaluator = ScalaPsiUtil.nameContext(named) match {
case param: ScParameter =>
param.owner match {
case fun@(_: ScFunction | _: ScFunctionExpr) => parameterEvaluator(fun, param)
case _ => throw EvaluationException(ScalaBundle.message("cannot.evaluate.parameter", param.name))
}
case caseCl: ScCaseClause => patternEvaluator(caseCl, named)
case _: ScGenerator | _: ScEnumerator if position != null && isNotUsedEnumerator(named, position.getElementAt) =>
throw EvaluationException(ScalaBundle.message("not.used.from.for.statement", name))
case LazyVal(_) => localLazyValEvaluator(named)
case _ => new ScalaLocalVariableEvaluator(name, fileName)
}
containingClass match {
case `contextClass` | _: ScGenerator | _: ScEnumerator => localVariableEvaluator
case _ if contextClass == null => localVariableEvaluator
case _ =>
val fieldEval = withOuterFieldEvaluator(containingClass, name, ScalaBundle.message("cannot.evaluate.local.variable", name))
new ScalaDuplexEvaluator(fieldEval, localVariableEvaluator)
}
}
def calcLocalObject(obj: ScObject) = {
def fromVolatileObjectReference(eval: Evaluator) = new ScalaFieldEvaluator(eval, "elem")
val containingClass = getContextClass(obj)
val name = NameTransformer.encode(obj.name) + "$module"
if (containingClass == contextClass) {
fromVolatileObjectReference(new ScalaLocalVariableEvaluator(name, fileName))
} else {
val fieldEval = withOuterFieldEvaluator(containingClass, name, ScalaBundle.message("cannot.evaluate.local.object", name))
fromVolatileObjectReference(fieldEval)
}
}
val labeledOrSynthetic = labeledOrSyntheticEvaluator(ref, resolve)
if (labeledOrSynthetic.isDefined) return labeledOrSynthetic.get
val isLocalValue = DebuggerUtil.isLocalV(resolve)
resolve match {
case Both(isInsideLocalFunction(fun), named: PsiNamedElement) if isLocalValue =>
new ScalaDuplexEvaluator(calcLocal(named), parameterEvaluator(fun, resolve))
case p: ScParameter if p.isCallByNameParameter && isLocalValue =>
val localEval = calcLocal(p)
new ScalaMethodEvaluator(localEval, "apply", null, Nil)
case obj: ScObject if isLocalValue => calcLocalObject(obj)
case named: PsiNamedElement if isLocalValue =>
calcLocal(named)
case obj: ScObject =>
objectEvaluator(obj, () => qualifierEvaluator(qualifier, ref))
case _: PsiMethod | _: ScSyntheticFunction =>
methodCallEvaluator(ref, Nil, Map.empty)
case cp: ScClassParameter if cp.isCallByNameParameter =>
val qualEval = qualifierEvaluator(qualifier, ref)
val name = NameTransformer.encode(cp.name)
val fieldEval = new ScalaFieldEvaluator(qualEval, name, true)
new ScalaMethodEvaluator(fieldEval, "apply", null, Nil)
case privateThisField(named) =>
val named = resolve.asInstanceOf[ScNamedElement]
val qualEval = qualifierEvaluator(qualifier, ref)
val name = NameTransformer.encode(named.name)
new ScalaFieldEvaluator(qualEval, name, true)
case cp: ScClassParameter if qualifier.isEmpty && ValueClassType.isValueClass(cp.containingClass) =>
//methods of value classes have hidden argument with underlying value
new ScalaLocalVariableEvaluator("$this", fileName)
case _: ScClassParameter | _: ScBindingPattern =>
//this is scala "field"
val named = resolve.asInstanceOf[ScNamedElement]
val name = NameTransformer.encode(named.name)
val qualEval = qualifierEvaluator(qualifier, ref)
val withSimpleNameEval = new ScalaMethodEvaluator(qualEval, name, null /* todo */, Seq.empty,
traitImplementation(resolve), DebuggerUtil.getSourcePositions(resolve.getNavigationElement))
getContextClass(named) match {
//in some cases compiler uses full qualified names for fields and methods
case clazz: ScTemplateDefinition if ScalaPsiUtil.hasStablePath(clazz)
&& clazz.members.contains(ScalaPsiUtil.nameContext(named)) =>
val qualName = clazz.qualifiedName
val newName = qualName.split('.').map(NameTransformer.encode).mkString("$") + "$$" + name
val reserveEval = new ScalaMethodEvaluator(qualEval, newName, null/* todo */, Seq.empty,
traitImplementation(resolve), DebuggerUtil.getSourcePositions(resolve.getNavigationElement))
new ScalaDuplexEvaluator(withSimpleNameEval, reserveEval)
case _ => withSimpleNameEval
}
case field: PsiField => javaFieldEvaluator(field, ref)
case pack: ScPackage =>
//let's try to find package object:
val qual = (pack.getQualifiedName + ".package$").split('.').map(NameTransformer.encode).mkString(".")
stableObjectEvaluator(qual)
case _ =>
//unresolved symbol => try to resolve it dynamically
val name = NameTransformer.encode(ref.refName)
val fieldOrVarEval = qualifier match {
case Some(qual) => new ScalaFieldEvaluator(evaluatorFor(qual), name)
case None => new ScalaLocalVariableEvaluator(name, fileName)
}
new ScalaDuplexEvaluator(fieldOrVarEval, unresolvedMethodEvaluator(ref, Seq.empty))
}
}
def labeledOrSyntheticEvaluator(ref: ScReferenceExpression, resolve: PsiElement): Option[Evaluator] = {
if (resolve == null) return None
val labeledValue = resolve.getUserData(CodeFragmentFactoryContextWrapper.LABEL_VARIABLE_VALUE_KEY)
if (labeledValue != null)
return Some(new IdentityEvaluator(labeledValue))
val isSynthetic = codeFragment.isAncestorOf(resolve)
if (isSynthetic && ref.qualifier.isEmpty)
Some(syntheticVariableEvaluator(ref.refName))
else None
}
def qualifierEvaluator(qualifier: Option[ScExpression], ref: ScReferenceExpression): Evaluator = qualifier match {
case Some(q) => evaluatorFor(q)
case _ => thisOrImportedQualifierEvaluator(ref)
}
def patternEvaluator(caseCl: ScCaseClause, namedElement: PsiNamedElement): Evaluator = {
val name = namedElement.name
if (caseCl.getParent != null) {
val pattern = caseCl.pattern
if (pattern.isEmpty) throw EvaluationException(ScalaBundle.message("cannot.find.pattern"))
caseCl.getParent.getParent match {
case matchStmt: ScMatchStmt if namedElement.isInstanceOf[ScPattern] =>
val expr = matchStmt.expr
if (expr.isEmpty) throw EvaluationException(ScalaBundle.message("cannot.find.expression.of.match"))
val exprEval = evaluatorFor(expr.get)
val fromPatternEvaluator = evaluateSubpatternFromPattern(exprEval, pattern.get, namedElement.asInstanceOf[ScPattern])
new ScalaDuplexEvaluator(new ScalaLocalVariableEvaluator(name, fileName), fromPatternEvaluator)
case block: ScBlockExpr => //it is anonymous function
val argEvaluator = new ScalaLocalVariableEvaluator("", fileName)
argEvaluator.setMethodName("apply")
argEvaluator.setParameterIndex(0)
val fromPatternEvaluator = evaluateSubpatternFromPattern(argEvaluator, pattern.get, namedElement.asInstanceOf[ScPattern])
new ScalaDuplexEvaluator(new ScalaLocalVariableEvaluator(name, fileName), fromPatternEvaluator)
case _ => new ScalaLocalVariableEvaluator(name, fileName)
}
} else throw EvaluationException(ScalaBundle.message("invalid.case.clause"))
}
def assignmentEvaluator(stmt: ScAssignStmt): Evaluator = {
val message = ScalaBundle.message("assignent.without.expression")
if (stmt.isNamedParameter) {
stmt.getRExpression match {
case Some(expr) => evaluatorFor(expr)
case _ => throw EvaluationException(message)
}
} else {
stmt.getLExpression match {
case call: ScMethodCall =>
val invokedText = call.getInvokedExpr.getText
val rExprText = stmt.getRExpression.fold("null")(_.getText)
val args = (call.args.exprs.map(_.getText) :+ rExprText).mkString("(", ", ", ")")
val exprText = s"($invokedText).update$args"
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, stmt.getContext, stmt)
evaluatorFor(expr)
case _ =>
val leftEvaluator = evaluatorFor(stmt.getLExpression)
val rightEvaluator = stmt.getRExpression match {
case Some(expr) => evaluatorFor(expr)
case _ => throw EvaluationException(message)
}
def createAssignEvaluator(leftEvaluator: Evaluator): Option[Evaluator] = {
leftEvaluator match {
case m: ScalaMethodEvaluator =>
Some(m.copy(_methodName = m.methodName + "_$eq", argumentEvaluators = Seq(rightEvaluator))) //todo: signature?
case ScalaDuplexEvaluator(first, second) =>
createAssignEvaluator(first) orElse createAssignEvaluator(second)
case _ => None
}
}
createAssignEvaluator(leftEvaluator).getOrElse(new AssignmentEvaluator(leftEvaluator, rightEvaluator))
}
}
}
def evaluateSubpatternFromPattern(exprEval: Evaluator, pattern: ScPattern, subPattern: ScPattern): Evaluator = {
def evaluateConstructorOrInfix(exprEval: Evaluator, ref: ScStableCodeReferenceElement, pattern: ScPattern, nextPatternIndex: Int): Evaluator = {
ref.resolve() match {
case fun: ScFunctionDefinition =>
val elem = ref.bind().get.getActualElement //object or case class
val qual = ref.qualifier.map(q => ScalaPsiElementFactory.createExpressionWithContextFromText(q.getText, q.getContext, q))
val refExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(ref.getText, ref.getContext, ref)
val refEvaluator = evaluatorForReferenceWithoutParameters(qual, elem, refExpr.asInstanceOf[ScReferenceExpression])
val funName = fun.name
val newEval =
if (funName == "unapply") {
val extractEval = new ScalaMethodEvaluator(refEvaluator, funName, DebuggerUtil.getFunctionJVMSignature(fun), Seq(exprEval))
if (pattern.subpatterns.length == 1)
new ScalaMethodEvaluator(extractEval, "get", null, Seq.empty)
else if (pattern.subpatterns.length > 1) {
val getEval = new ScalaMethodEvaluator(extractEval, "get", null, Seq.empty)
new ScalaFieldEvaluator(getEval, s"_${nextPatternIndex + 1}")
}
else throw EvaluationException(ScalaBundle.message("unapply.without.arguments"))
} else if (funName == "unapplySeq") {
val extractEval = new ScalaMethodEvaluator(refEvaluator, funName, DebuggerUtil.getFunctionJVMSignature(fun), Seq(exprEval))
val getEval = new ScalaMethodEvaluator(extractEval, "get", null, Seq.empty)
val indexExpr = ScalaPsiElementFactory.createExpressionFromText("" + nextPatternIndex, pattern.getManager)
val indexEval = evaluatorFor(indexExpr)
new ScalaMethodEvaluator(getEval, "apply", null, Seq(indexEval))
} else throw EvaluationException(ScalaBundle.message("pattern.doesnot.resolves.to.unapply", ref.refName))
val nextPattern = pattern.subpatterns(nextPatternIndex)
evaluateSubpatternFromPattern(newEval, nextPattern, subPattern)
case _ => throw EvaluationException(ScalaBundle.message("pattern.doesnot.resolves.to.unapply", ref.refName))
}
}
if (pattern == null || subPattern == null)
throw new IllegalArgumentException("Patterns should not be null")
val nextPatternIndex: Int = pattern.subpatterns.indexWhere(next => next == subPattern || subPattern.parents.contains(next))
if (pattern == subPattern) exprEval
else if (nextPatternIndex < 0) throw new IllegalArgumentException("Pattern is not ancestor of subpattern")
else {
pattern match {
case naming: ScNamingPattern => evaluateSubpatternFromPattern(exprEval, naming.named, subPattern)
case typed: ScTypedPattern => evaluateSubpatternFromPattern(exprEval, pattern.subpatterns.head, subPattern)
case par: ScParenthesisedPattern =>
val withoutPars = par.subpattern.getOrElse(throw new IllegalStateException("Empty parentheses pattern"))
evaluateSubpatternFromPattern(exprEval, withoutPars, subPattern)
case tuple: ScTuplePattern =>
val nextPattern = tuple.subpatterns(nextPatternIndex)
val newEval = new ScalaFieldEvaluator(exprEval, s"_${nextPatternIndex + 1}")
evaluateSubpatternFromPattern(newEval, nextPattern, subPattern)
case constr: ScConstructorPattern =>
val ref: ScStableCodeReferenceElement = constr.ref
evaluateConstructorOrInfix(exprEval, ref, constr, nextPatternIndex)
case infix: ScInfixPattern =>
val ref: ScStableCodeReferenceElement = infix.reference
evaluateConstructorOrInfix(exprEval, ref, infix, nextPatternIndex)
//todo: handle infix with tuple right pattern
case _: ScCompositePattern => throw EvaluationException(ScalaBundle.message("pattern.alternatives.cannot.bind.vars"))
case _: ScXmlPattern => throw EvaluationException(ScalaBundle.message("xml.patterns.not.supported")) //todo: xml patterns
case _ => throw EvaluationException(ScalaBundle.message("kind.of.patterns.not.supported", pattern.getText)) //todo: xml patterns
}
}
}
def newTemplateDefinitionEvaluator(templ: ScNewTemplateDefinition): Evaluator = {
templ.extendsBlock.templateParents match {
case Some(parents: ScClassParents) =>
if (parents.typeElements.length != 1) {
throw new NeedCompilationException(ScalaBundle.message("anon.classes.not.supported"))
}
parents.constructor match {
case Some(constr) =>
val tp = constr.typeElement.calcType
ScType.extractClass(tp, Some(templ.getProject)) match {
case Some(clazz) if clazz.qualifiedName == "scala.Array" =>
val typeArgs = constr.typeArgList.fold("")(_.getText)
val args = constr.args.fold("(0)")(_.getText)
val exprText = s"_root_.scala.Array.ofDim$typeArgs$args"
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, templ.getContext, templ)
evaluatorFor(expr)
case Some(clazz) =>
val jvmName = DebuggerUtil.getClassJVMName(clazz)
val typeEvaluator = new TypeEvaluator(jvmName)
val argumentEvaluators = constructorArgumentsEvaluators(templ, constr, clazz)
constr.reference.map(_.resolve()) match {
case Some(named: PsiNamedElement) =>
val signature = DebuggerUtil.constructorSignature(named)
new ScalaMethodEvaluator(typeEvaluator, "<init>", signature, argumentEvaluators)
case _ =>
new ScalaMethodEvaluator(typeEvaluator, "<init>", null, argumentEvaluators)
}
case _ => throw EvaluationException(ScalaBundle.message("new.expression.without.class.reference"))
}
case None => throw EvaluationException(ScalaBundle.message("new.expression.without.constructor.call"))
}
case _ => throw EvaluationException(ScalaBundle.message("new.expression.without.template.parents"))
}
}
def constructorArgumentsEvaluators(newTd: ScNewTemplateDefinition,
constr: ScConstructor,
clazz: PsiClass): Seq[Evaluator] = {
val constrDef = constr.reference match {
case Some(ResolvesTo(elem)) => elem
case _ => throw EvaluationException(ScalaBundle.message("could.not.resolve.constructor"))
}
val explicitArgs = constr.arguments.flatMap(_.exprs)
val explEvaluators =
for {
arg <- explicitArgs
} yield {
val eval = evaluatorFor(arg)
val param = ScalaPsiUtil.parameterOf(arg).flatMap(_.psiParam)
if (param.exists(!isOfPrimitiveType(_))) boxEvaluator(eval)
else eval
}
constrDef match {
case scMethod: ScMethodLike =>
val scClass = scMethod.containingClass.asInstanceOf[ScClass]
val contextClass = getContextClass(scClass)
val implicitParams = scMethod.parameterList.params.filter(_.isImplicitParameter)
val implicitsEvals =
for {
typeElem <- constr.simpleTypeElement.toSeq
p <- implicitParams
} yield {
val eval = implicitArgEvaluator(scMethod, p, typeElem)
if (isOfPrimitiveType(p)) eval
else boxEvaluator(eval)
}
val outerThis = contextClass match {
case obj: ScObject if isStable(obj) => None
case null => None
case _ => Some(new ScalaThisEvaluator())
}
val locals = DebuggerUtil.localParamsForConstructor(scClass)
outerThis ++: explEvaluators ++: implicitsEvals ++: locals.map(fromLocalArgEvaluator)
case _ => explEvaluators
}
}
def fromLocalArgEvaluator(local: ScTypedDefinition): Evaluator = {
val name = local.asInstanceOf[PsiNamedElement].name
val elemAt = position.getElementAt
val ref = ScalaPsiElementFactory.createExpressionWithContextFromText(name, elemAt, elemAt)
val refEval = evaluatorFor(ref)
if (local.isInstanceOf[ScObject]) {
val qual = "scala.runtime.VolatileObjectRef"
val typeEvaluator = new TypeEvaluator(JVMNameUtil.getJVMRawText(qual))
val signature = JVMNameUtil.getJVMRawText("(Ljava/lang/Object;)V")
new ScalaNewClassInstanceEvaluator(typeEvaluator, signature, Array(refEval))
}
else refEval
}
def expressionFromTextEvaluator(string: String, context: PsiElement): Evaluator = {
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(string, context.getContext, context)
evaluatorFor(expr)
}
def localLazyValEvaluator(named: PsiNamedElement): Evaluator = {
val name = named.name
val localRefName = s"$name$$lzy"
val localRefEval = new ScalaLocalVariableEvaluator(localRefName, fileName)
val lzyIndex = lazyValIndex(named)
val bitmapName = "bitmap$" + (lzyIndex / 8)
val bitmapEval = new ScalaLocalVariableEvaluator(bitmapName, fileName)
val localFunIndex = localFunctionIndex(named)
val methodName = s"$name$$$localFunIndex"
new ScalaMethodEvaluator(new ScalaThisEvaluator(), methodName, null, Seq(localRefEval, bitmapEval))
}
def ifStmtEvaluator(stmt: ScIfStmt): Evaluator = {
val condEvaluator = stmt.condition match {
case Some(cond) => evaluatorFor(cond)
case None => throw EvaluationException(ScalaBundle.message("if.statement.without.condition"))
}
val ifBranch = stmt.thenBranch match {
case Some(th) => evaluatorFor(th)
case None => throw EvaluationException(ScalaBundle.message("if.statement.without.if.branch"))
}
val elseBranch = stmt.elseBranch.map(evaluatorFor(_))
new ScalaIfEvaluator(condEvaluator, ifBranch, elseBranch)
}
def literalEvaluator(l: ScLiteral): Evaluator = {
l match {
case interpolated: ScInterpolatedStringLiteral =>
val evaluatorOpt = interpolated.getStringContextExpression.map(evaluatorFor(_))
evaluatorOpt.getOrElse(ScalaLiteralEvaluator(l))
case _ if l.isSymbol =>
val value = l.getValue.asInstanceOf[Symbol].name
val expr = ScalaPsiElementFactory.createExpressionFromText( s"""Symbol("$value")""", l.getContext)
evaluatorFor(expr)
case _ => ScalaLiteralEvaluator(l)
}
}
def whileStmtEvaluator(ws: ScWhileStmt): Evaluator = {
val condEvaluator = ws.condition match {
case Some(cond) => evaluatorFor(cond)
case None => throw EvaluationException(ScalaBundle.message("while.statement.without.condition"))
}
val iterationEvaluator = ws.body match {
case Some(body) => evaluatorFor(body)
case None => throw EvaluationException(ScalaBundle.message("while.statement.without.body"))
}
new WhileStatementEvaluator(condEvaluator, iterationEvaluator, null)
}
def doStmtEvaluator(doSt: ScDoStmt): Evaluator = {
val condEvaluator = doSt.condition match {
case Some(cond) => evaluatorFor(cond)
case None =>
throw EvaluationException(ScalaBundle.message("do.statement.without.condition"))
}
val iterationEvaluator = doSt.getExprBody match {
case Some(body) => evaluatorFor(body)
case None =>
throw EvaluationException(ScalaBundle.message("do.statement.without.body"))
}
new ScalaDoStmtEvaluator(condEvaluator, iterationEvaluator)
}
def scMethodCallEvaluator(methodCall: ScMethodCall): Evaluator = {
def applyCall(invokedText: String, argsText: String) = {
val newExprText = s"($invokedText).apply$argsText"
ScalaPsiElementFactory.createExpressionWithContextFromText(newExprText, methodCall.getContext, methodCall)
}
@tailrec
def collectArgumentsAndBuildEvaluator(call: ScMethodCall,
collected: Seq[ScExpression] = Seq.empty,
tailString: String = "",
matchedParameters: Map[Parameter, Seq[ScExpression]] = Map.empty): Evaluator = {
if (call.isApplyOrUpdateCall) {
if (!call.isUpdateCall) {
val expr = applyCall(call.getInvokedExpr.getText, call.args.getText + tailString)
return evaluatorFor(expr)
} else {
//should be handled on assignment
throw new NeedCompilationException("Update method is not supported")
}
}
val message = ScalaBundle.message("cannot.evaluate.method", call.getText)
call.getInvokedExpr match {
case ref: ScReferenceExpression =>
methodCallEvaluator(methodCall, call.argumentExpressions ++ collected, matchedParameters ++ call.matchedParametersMap)
case newCall: ScMethodCall =>
collectArgumentsAndBuildEvaluator(newCall, call.argumentExpressions ++ collected, call.args.getText + tailString,
matchedParameters ++ call.matchedParametersMap)
case gen: ScGenericCall =>
gen.referencedExpr match {
case ref: ScReferenceExpression if ref.resolve().isInstanceOf[PsiMethod] =>
methodCallEvaluator(methodCall, call.argumentExpressions ++ collected, matchedParameters ++ call.matchedParametersMap)
case ref: ScReferenceExpression =>
ref.getType().getOrAny match {
//isApplyOrUpdateCall does not work for generic calls
case ScType.ExtractClass(psiClass) if psiClass.findMethodsByName("apply", true).nonEmpty =>
val typeArgsText = gen.typeArgs.fold("")(_.getText)
val expr = applyCall(ref.getText, s"$typeArgsText${call.args.getText}$tailString")
evaluatorFor(expr)
case _ => throw EvaluationException(message)
}
case _ =>
throw EvaluationException(message)
}
case _ => throw EvaluationException(message)
}
}
methodCall match {
case hasDeepestInvokedReference(ScReferenceExpression.withQualifier(implicitlyConvertedTo(expr))) =>
val copy = methodCall.copy().asInstanceOf[ScMethodCall]
copy match {
case hasDeepestInvokedReference(ScReferenceExpression.withQualifier(q)) =>
q.replaceExpression(expr, removeParenthesis = false)
evaluatorFor(copy)
case _ =>
val message = ScalaBundle.message("method.call.implicitly.converted.qualifier", methodCall.getText)
throw EvaluationException(message)
}
case _ =>
//todo: handle partially applied functions
collectArgumentsAndBuildEvaluator(methodCall)
}
}
def infixExpressionEvaluator(infix: ScInfixExpr): Evaluator = {
val operation = infix.operation
def isUpdate(ref: ScReferenceExpression): Boolean = {
ref.refName.endsWith("=") &&
(ref.resolve() match {
case n: PsiNamedElement if n.name + "=" == ref.refName => true
case _ => false
})
}
if (isUpdate(operation)) {
val baseExprText = infix.getBaseExpr.getText
val operationText = operation.refName.dropRight(1)
val argText = infix.getArgExpr.getText
val exprText = s"$baseExprText = $baseExprText $operationText $argText"
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, infix.getContext, infix)
evaluatorFor(expr)
}
else {
val equivCall = ScalaPsiElementFactory.createEquivMethodCall(infix)
evaluatorFor(equivCall)
}
}
def blockExprEvaluator(block: ScBlock): Evaluator = {
withNewSyntheticVariablesHolder {
val evaluators = block.statements.filter(!_.isInstanceOf[ScImportStmt]).map(evaluatorFor)
new ScalaBlockExpressionEvaluator(evaluators.toSeq)
}
}
def postfixExprEvaluator(p: ScPostfixExpr): Evaluator = {
val equivRef = ScalaPsiElementFactory.createEquivQualifiedReference(p)
evaluatorFor(equivRef)
}
def prefixExprEvaluator(p: ScPrefixExpr): Evaluator = {
val newExprText = s"(${p.operand.getText}).unary_${p.operation.refName}"
val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(newExprText, p.getContext, p)
evaluatorFor(newExpr)
}
def refExpressionEvaluator(ref: ScReferenceExpression): Evaluator = {
ref.qualifier match {
case Some(implicitlyConvertedTo(e)) =>
val copy = ref.copy().asInstanceOf[ScReferenceExpression]
copy.qualifier.get.replaceExpression(e, removeParenthesis = false)
evaluatorFor(copy)
case _ =>
val resolve: PsiElement = ref.resolve()
evaluatorForReferenceWithoutParameters(ref.qualifier, resolve, ref)
}
}
def tupleEvaluator(tuple: ScTuple): Evaluator = {
val exprText = "_root_.scala.Tuple" + tuple.exprs.length + tuple.exprs.map(_.getText).mkString("(", ", ", ")")
val expr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, tuple.getContext, tuple)
evaluatorFor(expr)
}
def valOrVarDefinitionEvaluator(pList: ScPatternList, expr: ScExpression) = {
val evaluators = ArrayBuffer[Evaluator]()
val exprEval = new ScalaCachingEvaluator(evaluatorFor(expr))
evaluators += exprEval
for {
pattern <- pList.patterns
binding <- pattern.bindings
} {
val name = binding.name
createSyntheticVariable(name)
val leftEval = syntheticVariableEvaluator(name)
val rightEval = evaluateSubpatternFromPattern(exprEval, pattern, binding)
evaluators += new AssignmentEvaluator(leftEval, rightEval)
}
new ScalaBlockExpressionEvaluator(evaluators)
}
def variableDefinitionEvaluator(vd: ScVariableDefinition): Evaluator = {
vd.expr match {
case None => throw EvaluationException(s"Variable definition needs right hand side: ${vd.getText}")
case Some(e) => valOrVarDefinitionEvaluator(vd.pList, e)
}
}
def patternDefinitionEvaluator(pd: ScPatternDefinition): Evaluator = {
pd.expr match {
case None => throw EvaluationException(s"Value definition needs right hand side: ${pd.getText}")
case Some(e) => valOrVarDefinitionEvaluator(pd.pList, e)
}
}
def postProcessExpressionEvaluator(expr: ScExpression, evaluator: Evaluator): Evaluator = {
//boxing and unboxing actions
def unbox(typeTo: String) = unaryEvaluator(unboxEvaluator(evaluator), typeTo)
def box() = boxEvaluator(evaluator)
def valueClassInstance(eval: Evaluator) = {
expr match {
case _: ScNewTemplateDefinition => eval
case ExpressionType(_: ValType) => eval
case ExpressionType(tp @ ValueClassType(inner)) =>
valueClassInstanceEvaluator(eval, inner, tp)
case _ => eval
}
}
import org.jetbrains.plugins.scala.lang.psi.types._
val unboxed = expr.smartExpectedType() match {
case Some(Int) => unbox("toInteger")
case Some(Byte) => unbox("toByte")
case Some(Long) => unbox("toLong")
case Some(Boolean) => unboxEvaluator(evaluator)
case Some(Float) => unbox("toFloat")
case Some(Short) => unbox("toShort")
case Some(Double) => unbox("toDouble")
case Some(Char) => unbox("toCharacter")
case Some(Unit) => new BlockStatementEvaluator(Array(evaluator, unitEvaluator()))
case None => evaluator
case _ => box()
}
valueClassInstance(unboxed)
}
}
object ScalaEvaluatorBuilderUtil {
private val BOXES_RUN_TIME = new TypeEvaluator(JVMNameUtil.getJVMRawText("scala.runtime.BoxesRunTime"))
private val BOXED_UNIT = new TypeEvaluator(JVMNameUtil.getJVMRawText("scala.runtime.BoxedUnit"))
def boxEvaluator(eval: Evaluator): Evaluator = new ScalaBoxingEvaluator(eval)
def boxed(evaluators: Evaluator*): Seq[Evaluator] = evaluators.map(boxEvaluator)
def unboxEvaluator(eval: Evaluator): Evaluator = new UnBoxingEvaluator(eval)
def notEvaluator(eval: Evaluator): Evaluator = {
val rawText = JVMNameUtil.getJVMRawText("(Ljava/lang/Object;)Ljava/lang/Object;")
unboxEvaluator(new ScalaMethodEvaluator(BOXES_RUN_TIME, "takeNot", rawText, boxed(eval)))
}
def eqEvaluator(left: Evaluator, right: Evaluator): Evaluator = {
new ScalaEqEvaluator(left, right)
}
def neEvaluator(left: Evaluator, right: Evaluator): Evaluator = {
notEvaluator(eqEvaluator(left, right))
}
def unitEvaluator(): Evaluator = {
new ScalaFieldEvaluator(BOXED_UNIT, "UNIT")
}
def unaryEvaluator(eval: Evaluator, boxesRunTimeName: String): Evaluator = {
val rawText = JVMNameUtil.getJVMRawText("(Ljava/lang/Object;)Ljava/lang/Object;")
unboxEvaluator(new ScalaMethodEvaluator(BOXES_RUN_TIME, boxesRunTimeName, rawText, boxed(eval)))
}
def binaryEvaluator(left: Evaluator, right: Evaluator, boxesRunTimeName: String): Evaluator = {
val rawText = JVMNameUtil.getJVMRawText("(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;")
unboxEvaluator(new ScalaMethodEvaluator(BOXES_RUN_TIME, boxesRunTimeName, rawText, boxed(left, right)))
}
object hasDeepestInvokedReference {
@tailrec
final def unapply(expr: ScExpression): Option[ScReferenceExpression] = {
expr match {
case call: ScMethodCall => unapply(call.deepestInvokedExpr)
case genCall: ScGenericCall => unapply(genCall.referencedExpr)
case ref: ScReferenceExpression => Some(ref)
case _ => None
}
}
}
def classTagText(arg: ScType): String = {
import org.jetbrains.plugins.scala.lang.psi.types._
arg match {
case Short => "_root_.scala.reflect.ClassTag.Short"
case Byte => "_root_.scala.reflect.ClassTag.Byte"
case Char => "_root_.scala.reflect.ClassTag.Char"
case Int => "_root_.scala.reflect.ClassTag.Int"
case Long => "_root_.scala.reflect.ClassTag.Long"
case Float => "_root_.scala.reflect.ClassTag.Float"
case Double => "_root_.scala.reflect.ClassTag.Double"
case Boolean => "_root_.scala.reflect.ClassTag.Boolean"
case Unit => "_root_.scala.reflect.ClassTag.Unit"
case Any => "_root_.scala.reflect.ClassTag.Any"
case AnyVal => "_root_.scala.reflect.ClassTag.AnyVal"
case Nothing => "_root_.scala.reflect.ClassTag.Nothing"
case Null => "_root_.scala.reflect.ClassTag.Null"
case Singleton => "_root_.scala.reflect.ClassTag.Object"
//todo:
case _ => "_root_.scala.reflect.ClassTag.apply(classOf[_root_.java.lang.Object])"
}
}
def classManifestText(scType: ScType): String = {
import org.jetbrains.plugins.scala.lang.psi.types._
scType match {
case Short => "_root_.scala.reflect.ClassManifest.Short"
case Byte => "_root_.scala.reflect.ClassManifest.Byte"
case Char => "_root_.scala.reflect.ClassManifest.Char"
case Int => "_root_.scala.reflect.ClassManifest.Int"
case Long => "_root_.scala.reflect.ClassManifest.Long"
case Float => "_root_.scala.reflect.ClassManifest.Float"
case Double => "_root_.scala.reflect.ClassManifest.Double"
case Boolean => "_root_.scala.reflect.ClassManifest.Boolean"
case Unit => "_root_.scala.reflect.ClassManifest.Unit"
case Any => "_root_.scala.reflect.ClassManifest.Any"
case AnyVal => "_root_.scala.reflect.ClassManifest.AnyVal"
case Nothing => "_root_.scala.reflect.ClassManifest.Nothing"
case Null => "_root_.scala.reflect.ClassManifest.Null"
case Singleton => "_root_.scala.reflect.ClassManifest.Object"
case JavaArrayType(arg) =>
"_root_.scala.reflect.ClassManifest.arrayType(" + classManifestText(arg) + ")"
case ScParameterizedType(ScDesignatorType(clazz: ScClass), Seq(arg))
if clazz.qualifiedName == "scala.Array" =>
"_root_.scala.reflect.ClassManifest.arrayType(" + classManifestText(arg) + ")"
/*case ScParameterizedType(des, args) =>
ScType.extractClass(des, Option(expr.getProject)) match {
case Some(clazz) =>
"_root_.scala.reflect.ClassManifest.classType(" +
case _ => "null"
}*/ //todo:
case _ => ScType.extractClass(scType) match {
case Some(clss) => "_root_.scala.reflect.ClassManifest.classType(classOf[_root_." +
clss.qualifiedName + "])"
case _ => "_root_.scala.reflect.ClassManifest.classType(classOf[_root_.java.lang." +
"Object])"
}
}
}
def isOfPrimitiveType(param: PsiParameter) = param match { //todo specialized type parameters
case p: ScParameter =>
val tp: ScType = p.getType(TypingContext.empty).getOrAny
isPrimitiveScType(tp)
case p: PsiParameter =>
val tp = param.getType
import com.intellij.psi.PsiType._
Set[PsiType](BOOLEAN, INT, CHAR, DOUBLE, FLOAT, LONG, BYTE, SHORT).contains(tp)
case _ => false
}
def isPrimitiveScType(tp: ScType) = {
import org.jetbrains.plugins.scala.lang.psi.types._
Set[ScType](Boolean, Int, Char, Double, Float, Long, Byte, Short).contains(tp)
}
object implicitlyConvertedTo {
def unapply(expr: ScExpression): Option[ScExpression] = {
val implicits = expr.getImplicitConversions(fromUnder = true)
implicits._2 match {
case Some(fun: ScFunction) =>
val exprText = expr.getText
val callText = s"${fun.name}($exprText)"
val newExprText = fun.containingClass match {
case o: ScObject if isStable(o) => s"${o.qualifiedName}.$callText"
case o: ScObject => //todo: It can cover many cases!
throw EvaluationException(ScalaBundle.message("implicit.conversions.from.dependent.objects"))
case _ => callText //from scope
}
Some(ScalaPsiElementFactory.createExpressionWithContextFromText(newExprText, expr.getContext, expr))
case _ => None
}
}
}
@tailrec
final def isStable(o: ScObject): Boolean = {
val context = PsiTreeUtil.getParentOfType(o, classOf[ScTemplateDefinition], classOf[ScExpression])
if (context == null) return true
context match {
case o: ScObject => isStable(o)
case _ => false
}
}
def getContextClass(elem: PsiElement, strict: Boolean = true): PsiElement = {
if (!strict && isGenerateClass(elem)) elem
else elem.contexts.find(isGenerateClass).orNull
}
def isGenerateClass(elem: PsiElement): Boolean = {
if (ScalaPositionManager.isCompiledWithIndyLambdas(elem.getContainingFile))
isGenerateNonAnonfunClass(elem) || isAnonfunInsideSuperCall(elem)
else isGenerateNonAnonfunClass(elem) || isGenerateAnonfun(elem)
}
def isGenerateNonAnonfunClass(elem: PsiElement): Boolean = {
elem match {
case newTd: ScNewTemplateDefinition if !DebuggerUtil.generatesAnonClass(newTd) => false
case clazz: PsiClass => true
case _ => false
}
}
def isAnonfunInsideSuperCall(elem: PsiElement) = {
def isInsideSuperCall(td: ScTypeDefinition) = {
val extBlock = td.extendsBlock
PsiTreeUtil.getParentOfType(elem, classOf[ScEarlyDefinitions], classOf[ScConstructor]) match {
case ed: ScEarlyDefinitions if ed.getParent == extBlock => true
case c: ScConstructor if c.getParent.getParent == extBlock => true
case _ => false
}
}
val containingClass = PsiTreeUtil.getParentOfType(elem, classOf[ScTypeDefinition])
isGenerateAnonfun(elem) && isInsideSuperCall(containingClass)
}
def isGenerateAnonfun(elem: PsiElement): Boolean = {
def isGenerateAnonfunWithCache: Boolean = {
def computation = elem match {
case e: ScExpression if ScUnderScoreSectionUtil.underscores(e).nonEmpty => true
case e: ScExpression if ScalaPsiUtil.isByNameArgument(e) || ScalaPsiUtil.isArgumentOfFunctionType(e) => true
case ScalaPsiUtil.MethodValue(_) => true
case _ => false
}
def cacheProvider = new CachedValueProvider[Boolean] {
override def compute(): Result[Boolean] = Result.create(computation, elem)
}
if (elem == null) false
else CachedValuesManager.getCachedValue(elem, cacheProvider)
}
def isGenerateAnonfunSimple: Boolean = {
elem match {
case f: ScFunctionExpr => true
case (_: ScExpression) childOf (_: ScForStatement) => true
case (cc: ScCaseClauses) childOf (b: ScBlockExpr) if b.isAnonymousFunction => true
case (g: ScGuard) childOf (_: ScEnumerators) => true
case (g: ScGenerator) childOf (enums: ScEnumerators) if !enums.generators.headOption.contains(g) => true
case e: ScEnumerator => true
case _ => false
}
}
isGenerateAnonfunSimple || isGenerateAnonfunWithCache
}
def anonClassCount(elem: PsiElement): Int = { //todo: non irrefutable patterns?
elem match {
case (e: ScExpression) childOf (f: ScForStatement) =>
f.enumerators.fold(1)(e => e.generators.length)
case (e @ (_: ScEnumerator | _: ScGenerator | _: ScGuard)) childOf (enums: ScEnumerators) =>
enums.children.takeWhile(_ != e).count(_.isInstanceOf[ScGenerator])
case _ => 1
}
}
def localFunctionIndex(named: PsiNamedElement): Int = {
elementsWithSameNameIndex(named, {
case f: ScFunction if f.isLocal && f.name == named.name => true
case Both(ScalaPsiUtil.inNameContext(LazyVal(_)), lzy: ScBindingPattern) if lzy.name == named.name => true
case _ => false
})
}
def lazyValIndex(named: PsiNamedElement): Int = {
elementsWithSameNameIndex(named, {
case Both(ScalaPsiUtil.inNameContext(LazyVal(_)), lzy: ScBindingPattern) if lzy.name == named.name => true
case _ => false
})
}
def defaultParameterMethodName(method: ScMethodLike, paramIndex: Int): String = {
method match {
case fun: ScFunction if !fun.isConstructor =>
val suffix: String = if (!fun.isLocal) "" else "$" + localFunctionIndex(fun)
fun.name + "$default$" + paramIndex + suffix
case _ if method.isConstructor => "$lessinit$greater$default$" + paramIndex + "()"
}
}
def elementsWithSameNameIndex(named: PsiNamedElement, condition: PsiElement => Boolean): Int = {
val containingClass = getContextClass(named)
if (containingClass == null) return -1
val depthFirstIterator = containingClass.depthFirst {
case `containingClass` => true
case elem if isGenerateClass(elem) => false
case _ => true
}
val sameNameElements = depthFirstIterator.filter(condition).toList
sameNameElements.indexOf(named) + 1
}
def traitImplementation(elem: PsiElement): Option[JVMName] = {
val clazz = getContextClass(elem)
clazz match {
case t: ScTrait =>
Some(DebuggerUtil.getClassJVMName(t, withPostfix = true))
case _ => None
}
}
def isLocalFunction(fun: ScFunction): Boolean = {
!fun.getContext.isInstanceOf[ScTemplateBody]
}
def isNotUsedEnumerator(named: PsiNamedElement, place: PsiElement): Boolean = {
named match {
case ScalaPsiUtil.inNameContext(enum @ (_: ScEnumerator | _: ScGenerator)) =>
enum.getParent.getParent match {
case ScForStatement(enums, body) =>
enums.namings.map(_.pattern) match {
case Seq(refPattern: ScReferencePattern) => return false //can always evaluate from single simple generator
case _ =>
}
def insideBody = PsiTreeUtil.isAncestor(body, place, false)
def isNotUsed = ReferencesSearch.search(named, new LocalSearchScope(body)).findFirst() == null
insideBody && isNotUsed
case _ => false
}
case _ => false
}
}
object isInsideValueClass {
def unapply(elem: PsiElement): Option[ScClass] = {
getContextClass(elem) match {
case c: ScClass if ValueClassType.isValueClass(c) => Some(c)
case _ => None
}
}
}
object isInsideLocalFunction {
def unapply(elem: PsiElement): Option[ScFunction] = {
@tailrec
def inner(element: PsiElement): Option[ScFunction] = {
element match {
case null => None
case fun: ScFunction if isLocalFunction(fun) &&
!fun.parameters.exists(param => PsiTreeUtil.isAncestor(param, elem, false)) =>
Some(fun)
case other if other.getContext != null => inner(other.getContext)
case _ => None
}
}
inner(elem)
}
}
object privateTraitMethod {
def unapply(r: ScalaResolveResult): Option[(ScTrait, ScFunctionDefinition)] = {
r.getElement match {
case Both(fun: ScFunctionDefinition, ContainingClass(tr: ScTrait)) if fun.isPrivate => Some(tr, fun)
case _ => None
}
}
}
object privateThisField {
def unapply(elem: PsiElement): Option[ScNamedElement] = {
elem match {
case c: ScClassParameter if c.isPrivateThis => Some(c)
case Both(bp: ScBindingPattern, ScalaPsiUtil.inNameContext(v @ (_: ScVariable | _: ScValue))) =>
v match {
case mo: ScModifierListOwner if mo.getModifierList.accessModifier.exists(am => am.isPrivate && am.isThis) => Some(bp)
case _ => None
}
case _ => None
}
}
}
}
| JetBrains/intellij-scala-historical | src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala | Scala | apache-2.0 | 74,466 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.filter
import java.util.Date
import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom._
import org.geotools.data.DataUtilities
import org.geotools.filter.spatial.BBOXImpl
import org.joda.time.{DateTime, DateTimeZone}
import org.locationtech.geomesa.filter.visitor.IdDetectingFilterVisitor
import org.locationtech.geomesa.utils.geohash.GeohashUtils._
import org.locationtech.geomesa.utils.geotools.GeometryUtils
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter._
import org.opengis.filter.expression.PropertyName
import org.opengis.filter.spatial._
import org.opengis.filter.temporal.{After, Before, During, TEquals}
import org.opengis.temporal.Period
import scala.collection.GenTraversableOnce
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import scala.util.{Failure, Success}
object FilterHelper {
import org.locationtech.geomesa.utils.geotools.GeometryUtils.{geoFactory => gf}
import org.locationtech.geomesa.utils.geotools.WholeWorldPolygon
val MinDateTime = new DateTime(0, 1, 1, 0, 0, 0, DateTimeZone.UTC)
val MaxDateTime = new DateTime(9999, 12, 31, 23, 59, 59, DateTimeZone.UTC)
private val SafeGeomString = "gm-safe"
// helper shim to let other classes avoid importing FilterHelper.logger
object FilterHelperLogger extends LazyLogging {
def log = logger
}
/**
* Creates a new filter with valid bounds and attribute
*
* @param op spatial op
* @param sft simple feature type
* @return valid op
*/
def visitBinarySpatialOp(op: BinarySpatialOperator, sft: SimpleFeatureType): Filter = {
val prop = org.locationtech.geomesa.filter.checkOrderUnsafe(op.getExpression1, op.getExpression2)
val geom = prop.literal.evaluate(null, classOf[Geometry])
if (geom.getUserData == SafeGeomString) {
op // we've already visited this geom once
} else {
// check for null or empty attribute and replace with default geometry name
val attribute = Option(prop.name).filterNot(_.isEmpty).getOrElse(if (sft == null) null else sft.getGeomField)
// copy the geometry so we don't modify the original
val geomCopy = gf.createGeometry(geom)
// trim to world boundaries
val trimmedGeom = geomCopy.intersection(WholeWorldPolygon)
if (trimmedGeom.isEmpty) {
Filter.EXCLUDE
} else {
// add waypoints if needed so that IDL is handled correctly
val geomWithWayPoints = if (op.isInstanceOf[BBOX]) addWayPointsToBBOX(trimmedGeom) else trimmedGeom
val safeGeometry = tryGetIdlSafeGeom(geomWithWayPoints)
// mark it as being visited
safeGeometry.setUserData(SafeGeomString)
recreateAsIdlSafeFilter(op, attribute, safeGeometry, prop.flipped)
}
}
}
/**
* Creates a new filter with valid bounds and attributes. Distance will be converted into degrees.
* Note: units will still refer to 'meters', but that is due to ECQL issues
*
* @param op dwithin
* @param sft simple feature type
* @return valid dwithin
*/
def visitDwithin(op: DWithin, sft: SimpleFeatureType): Filter = {
val prop = org.locationtech.geomesa.filter.checkOrderUnsafe(op.getExpression1, op.getExpression2)
val geom = prop.literal.evaluate(null, classOf[Geometry])
if (geom.getUserData == SafeGeomString) {
op // we've already visited this geom once
} else {
val units = Option(op.getDistanceUnits).map(_.trim).filter(_.nonEmpty).map(_.toLowerCase).getOrElse("meters")
val multiplier = units match {
case "meters" => 1.0
case "kilometers" => 1000.0
case "feet" => 0.3048
case "statute miles" => 1609.347
case "nautical miles" => 1852.0
case _ => 1.0 // not part of ECQL spec...
}
val distanceMeters = op.getDistance * multiplier
val distanceDegrees = GeometryUtils.distanceDegrees(geom, distanceMeters)
// check for null or empty attribute and replace with default geometry name
val attribute = Option(prop.name).filterNot(_.isEmpty).getOrElse(if (sft == null) null else sft.getGeomField)
// copy the geometry so we don't modify the original
val geomCopy = gf.createGeometry(geom)
// trim to world boundaries
val trimmedGeom = geomCopy.intersection(WholeWorldPolygon)
val safeGeometry = tryGetIdlSafeGeom(trimmedGeom)
// mark it as being visited
safeGeometry.setUserData(SafeGeomString)
recreateAsIdlSafeFilter(op, attribute, safeGeometry, prop.flipped, distanceDegrees)
}
}
private def tryGetIdlSafeGeom(geom: Geometry): Geometry = getInternationalDateLineSafeGeometry(geom) match {
case Success(g) => g
case Failure(e) => FilterHelperLogger.log.warn(s"Error splitting geometry on IDL for $geom", e); geom
}
private def recreateAsIdlSafeFilter(op: BinarySpatialOperator,
property: String,
geom: Geometry,
flipped: Boolean,
args: Any = null): Filter = {
geom match {
case g: GeometryCollection =>
// geometry collections are OR'd together
val asList = getGeometryListOf(g)
asList.foreach(_.setUserData(geom.getUserData))
ff.or(asList.map(recreateFilter(op, property, _, flipped, args)))
case _ => recreateFilter(op, property, geom, flipped, args)
}
}
private def recreateFilter(op: BinarySpatialOperator,
property: String,
geom: Geometry,
flipped: Boolean,
args: Any): Filter = {
val (e1, e2) = if (flipped) (ff.literal(geom), ff.property(property)) else (ff.property(property), ff.literal(geom))
op match {
case op: Within => ff.within(e1, e2)
case op: Intersects => ff.intersects(e1, e2)
case op: Overlaps => ff.overlaps(e1, e2)
// note: The ECQL spec doesn't allow for us to put the measurement
// in "degrees", but that's how this filter will be used.
case op: DWithin => ff.dwithin(e1, e2, args.asInstanceOf[Double], "meters")
// use the direct constructor so that we preserve our geom user data
case op: BBOX => new BBOXImpl(e1, e2)
case op: Contains => ff.contains(e1, e2)
}
}
def isFilterWholeWorld(f: Filter): Boolean = f match {
case op: BBOX => isOperationGeomWholeWorld(op)
case op: Intersects => isOperationGeomWholeWorld(op)
case op: Overlaps => isOperationGeomWholeWorld(op)
case op: Within => isOperationGeomWholeWorld(op, SpatialOpOrder.PropertyFirst)
case op: Contains => isOperationGeomWholeWorld(op, SpatialOpOrder.LiteralFirst)
case _ => false
}
private def isOperationGeomWholeWorld[Op <: BinarySpatialOperator]
(op: Op, order: SpatialOpOrder.SpatialOpOrder = SpatialOpOrder.AnyOrder): Boolean = {
val prop = checkOrder(op.getExpression1, op.getExpression2)
// validate that property and literal are in the specified order
prop.exists { p =>
val ordered = order match {
case SpatialOpOrder.AnyOrder => true
case SpatialOpOrder.PropertyFirst => !p.flipped
case SpatialOpOrder.LiteralFirst => p.flipped
}
ordered && Option(p.literal.evaluate(null, classOf[Geometry])).exists(isWholeWorld)
}
}
def isWholeWorld[G <: Geometry](g: G): Boolean = g != null && g.union.covers(WholeWorldPolygon)
def getGeometryListOf(inMP: Geometry): Seq[Geometry] =
for( i <- 0 until inMP.getNumGeometries ) yield inMP.getGeometryN(i)
def addWayPointsToBBOX(g: Geometry): Geometry = {
val gf = g.getFactory
val geomArray = g.getCoordinates
val correctedGeom = GeometryUtils.addWayPoints(geomArray).toArray
if (geomArray.length == correctedGeom.length) g else gf.createPolygon(correctedGeom)
}
/**
* Extracts geometries from a filter into a sequence of OR'd geometries
*
* @param filter filter to evaluate
* @param attribute attribute to consider
* @param intersect intersect AND'd geometries or return them all
* note if not intersected, 'and/or' distinction will be lost
* @return geometry bounds from spatial filters
*/
def extractGeometries(filter: Filter, attribute: String, intersect: Boolean = true): FilterValues[Geometry] =
extractUnclippedGeometries(filter, attribute, intersect).map(_.intersection(WholeWorldPolygon))
/**
* Extract geometries from a filter without validating boundaries.
*
* @param filter filter to evaluate
* @param attribute attribute to consider
* @param intersect intersect AND'd geometries or return them all
* @return geometry bounds from spatial filters
*/
private def extractUnclippedGeometries(filter: Filter, attribute: String, intersect: Boolean): FilterValues[Geometry] = {
filter match {
case o: Or =>
val all = o.getChildren.map(extractUnclippedGeometries(_, attribute, intersect))
val join = FilterValues.or[Geometry]((l, r) => l ++ r) _
all.reduceLeftOption[FilterValues[Geometry]](join).getOrElse(FilterValues.empty)
case a: And =>
val all = a.getChildren.map(extractUnclippedGeometries(_, attribute, intersect)).filter(_.nonEmpty)
if (intersect) {
val intersect = FilterValues.and[Geometry]((l, r) => Option(l.intersection(r)).filterNot(_.isEmpty)) _
all.reduceLeftOption[FilterValues[Geometry]](intersect).getOrElse(FilterValues.empty)
} else {
FilterValues(all.flatMap(_.values))
}
// Note: although not technically required, all known spatial predicates are also binary spatial operators
case f: BinarySpatialOperator if isSpatialFilter(f) =>
val geometry = for {
prop <- checkOrder(f.getExpression1, f.getExpression2)
if prop.name == null || prop.name == attribute
geom <- Option(prop.literal.evaluate(null, classOf[Geometry]))
} yield {
val buffered = filter match {
// note: the dwithin should have already between rewritten
case dwithin: DWithin => geom.buffer(dwithin.getDistance)
case bbox: BBOX =>
val geomCopy = gf.createGeometry(geom)
val trimmedGeom = geomCopy.intersection(WholeWorldPolygon)
addWayPointsToBBOX(trimmedGeom)
case _ => geom
}
tryGetIdlSafeGeom(buffered)
}
FilterValues(geometry.map(flattenGeometry).getOrElse(Seq.empty))
case _ => FilterValues.empty
}
}
private def flattenGeometry(geometry: Geometry): Seq[Geometry] = geometry match {
case g: GeometryCollection => (0 until g.getNumGeometries).map(g.getGeometryN).flatMap(flattenGeometry)
case _ => Seq(geometry)
}
/**
* Extracts intervals from a filter. Intervals will be merged where possible - the resulting sequence
* is considered to be a union (i.e. OR)
*
* @param filter filter to evaluate
* @param attribute attribute to consider
* @param intersect intersect extracted values together, or return them all
* note if not intersected, 'and/or' distinction will be lost
* @return a sequence of intervals, if any. disjoint intervals will result in Seq((null, null))
*/
def extractIntervals(filter: Filter,
attribute: String,
intersect: Boolean = true,
handleExclusiveBounds: Boolean = false): FilterValues[(DateTime, DateTime)] = {
def roundSecondsUp(dt: DateTime): DateTime = dt.plusSeconds(1).withMillisOfSecond(0)
def roundSecondsDown(dt: DateTime): DateTime = {
val millis = dt.getMillisOfSecond
if (millis == 0) dt.minusSeconds(1) else dt.withMillisOfSecond(0)
}
extractAttributeBounds(filter, attribute, classOf[Date]).map { bounds =>
def roundLo(dt: DateTime) = if (handleExclusiveBounds && !bounds.inclusive) roundSecondsUp(dt) else dt
def roundUp(dt: DateTime) = if (handleExclusiveBounds && !bounds.inclusive) roundSecondsDown(dt) else dt
val lower = bounds.lower.map(new DateTime(_, DateTimeZone.UTC)).map(roundLo).getOrElse(MinDateTime)
val upper = bounds.upper.map(new DateTime(_, DateTimeZone.UTC)).map(roundUp).getOrElse(MaxDateTime)
(lower, upper)
}
}
/**
* Extracts bounds from filters that pertain to a given attribute. Bounds will be merged where
* possible.
*
* @param filter filter to evaluate
* @param attribute attribute name to consider
* @param binding attribute type
* @param intersect intersect resulting values, or return all separately
* note if not intersected, 'and/or' distinction will be lost
* @return a sequence of bounds, if any
*/
def extractAttributeBounds[T](filter: Filter,
attribute: String,
binding: Class[T],
intersect: Boolean = true): FilterValues[Bounds[T]] = {
filter match {
case o: Or =>
val all = o.getChildren.map(extractAttributeBounds(_, attribute, binding)).filter(_.nonEmpty)
val join = FilterValues.or[Bounds[T]](Bounds.union[T]) _
all.reduceLeftOption[FilterValues[Bounds[T]]](join).getOrElse(FilterValues.empty)
case a: And =>
val all = a.getChildren.map(extractAttributeBounds(_, attribute, binding)).filter(_.nonEmpty)
if (intersect) {
val intersection = FilterValues.and[Bounds[T]](Bounds.intersection[T]) _
all.reduceLeftOption[FilterValues[Bounds[T]]](intersection).getOrElse(FilterValues.empty)
} else {
FilterValues(all.flatMap(_.values))
}
case f: PropertyIsEqualTo =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
FilterValues(Seq(Bounds(Some(lit), Some(lit), inclusive = true)))
}
}.getOrElse(FilterValues.empty)
case f: PropertyIsBetween =>
try {
val prop = f.getExpression.asInstanceOf[PropertyName].getPropertyName
if (prop != attribute) { FilterValues.empty } else {
val lower = f.getLowerBoundary.evaluate(null, binding)
val upper = f.getUpperBoundary.evaluate(null, binding)
// note that between is inclusive
val bounds = Bounds(Option(lower), Option(upper), inclusive = true)
FilterValues(Seq(bounds))
}
} catch {
case e: Exception =>
FilterHelperLogger.log.warn(s"Unable to extract bounds from filter '${filterToString(f)}'", e)
FilterValues.empty
}
case f: During if classOf[Date].isAssignableFrom(binding) =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, classOf[Period])).map { p =>
val lower = p.getBeginning.getPosition.getDate.asInstanceOf[T]
val upper = p.getEnding.getPosition.getDate.asInstanceOf[T]
// note that during is exclusive
val bounds = Bounds(Option(lower), Option(upper), inclusive = false)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: PropertyIsGreaterThan =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val (lower, upper) = if (prop.flipped) (None, Some(lit)) else (Some(lit), None)
val bounds = Bounds(lower, upper, inclusive = false)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: PropertyIsGreaterThanOrEqualTo =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val (lower, upper) = if (prop.flipped) (None, Some(lit)) else (Some(lit), None)
val bounds = Bounds(lower, upper, inclusive = true)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: PropertyIsLessThan =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val (lower, upper) = if (prop.flipped) (Some(lit), None) else (None, Some(lit))
val bounds = Bounds(lower, upper, inclusive = false)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: PropertyIsLessThanOrEqualTo =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val (lower, upper) = if (prop.flipped) (Some(lit), None) else (None, Some(lit))
val bounds = Bounds(lower, upper, inclusive = true)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: Before =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val (lower, upper) = if (prop.flipped) (Option(lit), None) else (None, Option(lit))
// note that before is exclusive
val bounds = Bounds(lower, upper, inclusive = false)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: After =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val (lower, upper) = if (prop.flipped) (None, Option(lit)) else (Option(lit), None)
// note that after is exclusive
val bounds = Bounds(lower, upper, inclusive = false)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case f: PropertyIsLike if binding == classOf[String] =>
try {
val prop = f.getExpression.asInstanceOf[PropertyName].getPropertyName
if (prop != attribute) { FilterValues.empty } else {
// Remove the trailing wildcard and create a range prefix
val literal = f.getLiteral
val lower = if (literal.endsWith(MULTICHAR_WILDCARD)) {
literal.substring(0, literal.length - MULTICHAR_WILDCARD.length)
} else {
literal
}
val upper = Some(lower + WILDCARD_SUFFIX).asInstanceOf[Some[T]]
val bounds = Bounds(Some(lower.asInstanceOf[T]), upper, inclusive = true)
FilterValues(Seq(bounds))
}
} catch {
case e: Exception =>
FilterHelperLogger.log.warn(s"Unable to extract bounds from filter '${filterToString(f)}'", e)
FilterValues.empty
}
case f: Not if f.getFilter.isInstanceOf[PropertyIsNull] =>
try {
val isNull = f.getFilter.asInstanceOf[PropertyIsNull]
val prop = isNull.getExpression.asInstanceOf[PropertyName].getPropertyName
if (prop != attribute) { FilterValues.empty } else {
val bounds = Bounds[T](None, None, inclusive = true)
FilterValues(Seq(bounds))
}
} catch {
case e: Exception =>
FilterHelperLogger.log.warn(s"Unable to extract bounds from filter '${filterToString(f)}'", e)
FilterValues.empty
}
case f: Not =>
// we extract the sub-filter bounds, then invert them
val inverted = extractAttributeBounds(f.getFilter, attribute, binding)
if (inverted.isEmpty) {
inverted
} else if (inverted.disjoint) {
FilterValues(Seq(Bounds(None, None, inclusive = true))) // equivalent to not null
} else {
// NOT(A OR B) turns into NOT(A) AND NOT(B)
val uninverted = inverted.values.map { bound =>
// NOT the single bound
val not = bound.bounds match {
case (None, None) => Seq.empty
case (Some(lo), None) => Seq(Bounds(None, Some(lo), !bound.inclusive))
case (None, Some(hi)) => Seq(Bounds(Some(hi), None, !bound.inclusive))
case (Some(lo), Some(hi)) =>
Seq(Bounds(None, Some(lo), !bound.inclusive), Bounds(Some(hi), None, !bound.inclusive))
}
FilterValues(not)
}
// AND together
val intersect = FilterValues.and[Bounds[T]](Bounds.intersection[T]) _
uninverted.reduceLeft[FilterValues[Bounds[T]]](intersect)
}
case f: TEquals =>
checkOrder(f.getExpression1, f.getExpression2).filter(_.name == attribute).flatMap { prop =>
Option(prop.literal.evaluate(null, binding)).map { lit =>
val bounds = Bounds(Some(lit), Some(lit), inclusive = true)
FilterValues(Seq(bounds))
}
}.getOrElse(FilterValues.empty)
case _ => FilterValues.empty
}
}
def propertyNames(filter: Filter, sft: SimpleFeatureType): Seq[String] =
DataUtilities.propertyNames(filter, sft).map(_.getPropertyName).toSeq.sorted
def hasIdFilter(filter: Filter): Boolean =
filter.accept(new IdDetectingFilterVisitor, false).asInstanceOf[Boolean]
def filterListAsAnd(filters: Seq[Filter]): Option[Filter] = andOption(filters)
/**
* Simplifies filters to make them easier to process.
*
* Current simplifications:
*
* 1) Extracts out common parts in an OR clause to simplify further processing.
*
* Example: OR(AND(1, 2), AND(1, 3), AND(1, 4)) -> AND(1, OR(2, 3, 4))
*
* 2) N/A - add more simplifications here as needed
*
* @param filter filter
* @return
*/
def simplify(filter: Filter): Filter = {
def deduplicateOrs(f: Filter): Filter = f match {
case and: And => ff.and(and.getChildren.map(deduplicateOrs))
case or: Or =>
// OR(AND(1,2,3), AND(1,2,4)) -> Seq(Seq(1,2,3), Seq(1,2,4))
val decomposed = or.getChildren.map(decomposeAnd)
val clauses = decomposed.head // Seq(1,2,3)
val duplicates = clauses.filter(c => decomposed.tail.forall(_.contains(c))) // Seq(1,2)
if (duplicates.isEmpty) { or } else {
val deduplicated = orOption(decomposed.flatMap(d => andOption(d.filterNot(duplicates.contains))))
andFilters(deduplicated.toSeq ++ duplicates)
}
case _ => f
}
// TODO GEOMESA-1533 simplify ANDs of ORs for DNF
flatten(deduplicateOrs(flatten(filter)))
}
/**
* Flattens nested ands and ors.
*
* Example: AND(1, AND(2, 3)) -> AND(1, 2, 3)
*
* @param filter filter
* @return
*/
def flatten(filter: Filter): Filter = {
filter match {
case and: And => ff.and(flattenAnd(and.getChildren))
case or: Or => ff.or(flattenOr(or.getChildren))
case f: Filter => f
}
}
private def flattenAnd(filters: Seq[Filter]): ListBuffer[Filter] = {
val remaining = ListBuffer.empty[Filter] ++ filters
val result = ListBuffer.empty[Filter]
do {
remaining.remove(0) match {
case f: And => remaining.appendAll(f.getChildren)
case f => result.append(flatten(f))
}
} while (remaining.nonEmpty)
result
}
private def flattenOr(filters: Seq[Filter]): ListBuffer[Filter] = {
val remaining = ListBuffer.empty[Filter] ++ filters
val result = ListBuffer.empty[Filter]
do {
remaining.remove(0) match {
case f: Or => remaining.appendAll(f.getChildren)
case f => result.append(flatten(f))
}
} while (remaining.nonEmpty)
result
}
private object SpatialOpOrder extends Enumeration {
type SpatialOpOrder = Value
val PropertyFirst, LiteralFirst, AnyOrder = Value
}
}
/**
* Holds values extracted from a filter. Values may be empty, in which case nothing was extracted from
* the filter. May be marked as 'disjoint', which means that mutually exclusive values were extracted
* from the filter. This may be checked to short-circuit queries that will not result in any hits.
*
* @param values values extracted from the filter. If nothing was extracted, will be empty
* @param disjoint mutually exclusive values were extracted, e.g. 'a < 1 && a > 2'
* @tparam T type parameter
*/
case class FilterValues[T](values: Seq[T], disjoint: Boolean = false) {
def map[U](f: T => U): FilterValues[U] = FilterValues(values.map(f), disjoint)
def flatMap[U](f: T => GenTraversableOnce[U]): FilterValues[U] = FilterValues(values.flatMap(f), disjoint)
def foreach[U](f: T => U): Unit = values.foreach(f)
def filter(f: T => Boolean): FilterValues[T] = FilterValues(values.filter(f), disjoint)
def nonEmpty: Boolean = values.nonEmpty || disjoint
def isEmpty: Boolean = !nonEmpty
}
object FilterValues {
def empty[T]: FilterValues[T] = FilterValues[T](Seq.empty)
def disjoint[T]: FilterValues[T] = FilterValues[T](Seq.empty, disjoint = true)
def or[T](join: (Seq[T], Seq[T]) => Seq[T])(left: FilterValues[T], right: FilterValues[T]): FilterValues[T] = {
(left.disjoint, right.disjoint) match {
case (false, false) => FilterValues(join(left.values, right.values))
case (false, true) => left
case (true, false) => right
case (true, true) => FilterValues.disjoint
}
}
def and[T](intersect: (T, T) => Option[T])(left: FilterValues[T], right: FilterValues[T]): FilterValues[T] = {
if (left.disjoint || right.disjoint) {
FilterValues.disjoint
} else {
val intersections = left.values.flatMap(v => right.values.flatMap(intersect(_, v)))
if (intersections.isEmpty) {
FilterValues.disjoint
} else {
FilterValues(intersections)
}
}
}
} | spandanagrawal/geomesa | geomesa-filter/src/main/scala/org/locationtech/geomesa/filter/FilterHelper.scala | Scala | apache-2.0 | 26,895 |
package com.crobox.clickhouse.dsl.column
import com.crobox.clickhouse.dsl.{Column, ExpressionColumn}
trait StringFunctions { self: Magnets =>
abstract class StringFunctionCol[+V](val innerCol: Column) extends ExpressionColumn[V](innerCol)
case class Length(col: EmptyNonEmptyCol[_]) extends StringFunctionCol[Int](col.column)
case class LengthUTF8(col: EmptyNonEmptyCol[_]) extends StringFunctionCol[String](col.column)
case class Lower(col: StringColMagnet[_]) extends StringFunctionCol[String](col.column)
case class Upper(col: StringColMagnet[_]) extends StringFunctionCol[String](col.column)
case class LowerUTF8(col: StringColMagnet[_]) extends StringFunctionCol[String](col.column)
case class UpperUTF8(col: StringColMagnet[_]) extends StringFunctionCol[String](col.column)
case class Reverse(col: StringColMagnet[_]) extends StringFunctionCol[String](col.column)
case class ReverseUTF8(col: StringColMagnet[_]) extends StringFunctionCol[String](col.column)
case class Concat(col1: StringColMagnet[_], col2: StringColMagnet[_], columns: StringColMagnet[_]*)
extends StringFunctionCol[String](col1.column)
case class Substring(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_])
extends StringFunctionCol[String](col.column)
case class SubstringUTF8(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_])
extends StringFunctionCol[String](col.column)
case class AppendTrailingCharIfAbsent(col: StringColMagnet[_], c: StringColMagnet[_])
extends StringFunctionCol[String](col.column)
case class ConvertCharset(col: StringColMagnet[_], from: StringColMagnet[_], to: StringColMagnet[_])
extends StringFunctionCol[String](col.column)
// TODO: Enum the charsets?
trait StringOps { self: StringColMagnet[_] with EmptyNonEmptyCol[_] =>
def length() = Length(self)
def lengthUTF8() = LengthUTF8(self)
def lower() = Lower(self)
def upper() = Upper(self)
def lowerUTF8() = LowerUTF8(self)
def upperUTF8() = UpperUTF8(self)
def reverse() = Reverse(self)
def reverseUTF8() = ReverseUTF8(self)
def ||(col2: StringColMagnet[_]) = Concat(self, col2)
def concat(col2: StringColMagnet[_], coln: StringColMagnet[_]*) =
Concat(self, col2: StringColMagnet[_], coln: _*)
def substring(offset: NumericCol[_], length: NumericCol[_]) =
Substring(self, offset: NumericCol[_], length: NumericCol[_])
def substringUTF8(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_]) =
SubstringUTF8(self, offset: NumericCol[_], length: NumericCol[_])
def appendTrailingCharIfAbsent(col: StringColMagnet[_], c: StringColMagnet[_]) =
AppendTrailingCharIfAbsent(self, c: StringColMagnet[_])
def convertCharset(from: StringColMagnet[_], to: StringColMagnet[_]) =
ConvertCharset(self, from: StringColMagnet[_], to: StringColMagnet[_])
}
def length(col: EmptyNonEmptyCol[_]) = Length(col: EmptyNonEmptyCol[_])
def lengthUTF8(col: EmptyNonEmptyCol[_]) = LengthUTF8(col: EmptyNonEmptyCol[_])
def lower(col: StringColMagnet[_]) = Lower(col: StringColMagnet[_])
def upper(col: StringColMagnet[_]) = Upper(col: StringColMagnet[_])
def lowerUTF8(col: StringColMagnet[_]) = LowerUTF8(col: StringColMagnet[_])
def upperUTF8(col: StringColMagnet[_]) = UpperUTF8(col: StringColMagnet[_])
def reverse(col: StringColMagnet[_]) = Reverse(col: StringColMagnet[_])
def reverseUTF8(col: StringColMagnet[_]) = ReverseUTF8(col: StringColMagnet[_])
def concat(col: StringColMagnet[_], col2: StringColMagnet[_], coln: StringColMagnet[_]*) =
Concat(col: StringColMagnet[_], col2: StringColMagnet[_], coln: _*)
def substring(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_]) =
Substring(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_])
def substringUTF8(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_]) =
SubstringUTF8(col: StringColMagnet[_], offset: NumericCol[_], length: NumericCol[_])
def appendTrailingCharIfAbsent(col: StringColMagnet[_], c: StringColMagnet[_]) =
AppendTrailingCharIfAbsent(col: StringColMagnet[_], c: StringColMagnet[_])
def convertCharset(col: StringColMagnet[_], from: StringColMagnet[_], to: StringColMagnet[_]) =
ConvertCharset(col: StringColMagnet[_], from: StringColMagnet[_], to: StringColMagnet[_])
}
| crobox/clickhouse-scala-client | dsl/src/main/scala/com.crobox.clickhouse/dsl/column/StringFunctions.scala | Scala | lgpl-3.0 | 4,480 |
package lila.message
import org.joda.time.DateTime
import ornicar.scalalib.Random
import lila.user.User
case class Thread(
id: String,
name: String,
createdAt: DateTime,
updatedAt: DateTime,
posts: List[Post],
creatorId: String,
invitedId: String,
visibleByUserIds: List[String]) {
def +(post: Post) = copy(
posts = posts :+ post,
updatedAt = post.createdAt)
def isCreator(user: User) = creatorId == user.id
def isReadBy(user: User) = nbUnreadBy(user) == 0
def isUnReadBy(user: User) = !isReadBy(user)
def nbUnreadBy(user: User): Int = isCreator(user).fold(
posts count { post => post.isByInvited && post.isUnRead },
posts count { post => post.isByCreator && post.isUnRead })
def nbUnread: Int = posts count (_.isUnRead)
def firstPostUnreadBy(user: User): Option[Post] = posts find { post =>
post.isUnRead && post.isByCreator != isCreator(user)
}
def userIds = List(creatorId, invitedId)
def hasUser(user: User) = userIds contains user.id
def otherUserId(user: User) = isCreator(user).fold(invitedId, creatorId)
def senderOf(post: Post) = post.isByCreator.fold(creatorId, invitedId)
def receiverOf(post: Post) = post.isByCreator.fold(invitedId, creatorId)
def isWrittenBy(post: Post, user: User) = post.isByCreator == isCreator(user)
def nonEmptyName = (name.trim.some filter (_.nonEmpty)) | "No subject"
def deleteFor(user: User) = copy(
visibleByUserIds = visibleByUserIds filter (user.id !=)
)
def hasPostsWrittenBy(userId: String) = posts exists (_.isByCreator == (creatorId == userId))
}
object Thread {
val idSize = 8
def make(
name: String,
text: String,
creatorId: String,
invitedId: String): Thread = Thread(
id = Random nextStringUppercase idSize,
name = name,
createdAt = DateTime.now,
updatedAt = DateTime.now,
posts = List(Post.make(
text = text,
isByCreator = true
)),
creatorId = creatorId,
invitedId = invitedId,
visibleByUserIds = List(creatorId, invitedId))
import lila.db.JsTube
import JsTube.Helpers._
import play.api.libs.json._
private[message] lazy val tube = Post.tube |> { implicit pt =>
JsTube(
(__.json update (
readDate('createdAt) andThen readDate('updatedAt)
)) andThen Json.reads[Thread],
Json.writes[Thread] andThen (__.json update (
writeDate('createdAt) andThen writeDate('updatedAt)
))
)
}
}
| bjhaid/lila | modules/message/src/main/Thread.scala | Scala | mit | 2,469 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
// Added: Sat Oct 7 16:08:21 2006
//todo: use inherited type info also for vars and values
// Added: Thu Apr 12 18:23:58 2007
//todo: disallow C#D in superclass
//todo: treat :::= correctly
package scala
package tools.nsc
package typechecker
import scala.collection.{mutable, immutable}
import scala.reflect.internal.util.{ Statistics, ListOfNil }
import mutable.ListBuffer
import symtab.Flags._
import Mode._
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
/** This trait provides methods to assign types to trees.
*
* @author Martin Odersky
* @version 1.0
*/
trait Typers extends Adaptations with Tags with TypersTracking with PatternTypers {
self: Analyzer =>
import global._
import definitions._
import TypersStats._
final def forArgMode(fun: Tree, mode: Mode) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
// Also used to cache imports type-checked by namer.
val transformed = new mutable.AnyRefMap[Tree, Tree]
final val shortenImports = false
// allows override of the behavior of the resetTyper method w.r.t comments
def resetDocComments() = {
clearDocComments()
}
def resetTyper() {
//println("resetTyper called")
resetContexts()
resetImplicits()
resetDocComments()
}
sealed abstract class SilentResult[+T] {
def isEmpty: Boolean
def nonEmpty = !isEmpty
@inline final def fold[U](none: => U)(f: T => U): U = this match {
case SilentResultValue(value) => f(value)
case _ => none
}
@inline final def map[U](f: T => U): SilentResult[U] = this match {
case SilentResultValue(value) => SilentResultValue(f(value))
case x: SilentTypeError => x
}
@inline final def filter(p: T => Boolean): SilentResult[T] = this match {
case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p")))
case _ => this
}
@inline final def orElse[T1 >: T](f: Seq[AbsTypeError] => T1): T1 = this match {
case SilentResultValue(value) => value
case s : SilentTypeError => f(s.reportableErrors)
}
}
class SilentTypeError private(val errors: List[AbsTypeError], val warnings: List[(Position, String)]) extends SilentResult[Nothing] {
override def isEmpty = true
def err: AbsTypeError = errors.head
def reportableErrors = errors match {
case (e1: AmbiguousImplicitTypeError) +: _ =>
List(e1) // DRYer error reporting for neg/t6436b.scala
case all =>
all
}
}
object SilentTypeError {
def apply(errors: AbsTypeError*): SilentTypeError = apply(errors.toList, Nil)
def apply(errors: List[AbsTypeError], warnings: List[(Position, String)]): SilentTypeError = new SilentTypeError(errors, warnings)
// todo: this extracts only one error, should be a separate extractor.
def unapply(error: SilentTypeError): Option[AbsTypeError] = error.errors.headOption
}
// todo: should include reporter warnings in SilentResultValue.
// e.g. tryTypedApply could print warnings on arguments when the typing succeeds.
case class SilentResultValue[+T](value: T) extends SilentResult[T] { override def isEmpty = false }
def newTyper(context: Context): Typer = new NormalTyper(context)
private class NormalTyper(context : Context) extends Typer(context)
// A transient flag to mark members of anonymous classes
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
private final val InterpolatorCodeRegex = """\\$\\{\\s*(.*?)\\s*\\}""".r
private final val InterpolatorIdentRegex = """\\$[$\\w]+""".r // note that \\w doesn't include $
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors {
import context0.unit
import typeDebug.ptTree
import TyperErrorGen._
val runDefinitions = currentRun.runDefinitions
import runDefinitions._
private val transformed: mutable.Map[Tree, Tree] = unit.transformed
val infer = new Inferencer {
def context = Typer.this.context
// See SI-3281 re undoLog
override def isCoercible(tp: Type, pt: Type) = undoLog undo viewExists(tp, pt)
}
/** Overridden to false in scaladoc and/or interactive. */
def canAdaptConstantTypeToLiteral = true
def canTranslateEmptyListToNil = true
def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
// used to exempt synthetic accessors (i.e. those that are synthesized by the compiler to access a field)
// from skolemization because there's a weird bug that causes spurious type mismatches
// (it seems to have something to do with existential abstraction over values
// https://github.com/scala/scala-dev/issues/165
// when we're past typer, lazy accessors are synthetic, but before they are user-defined
// to make this hack less hacky, we could rework our flag assignment to allow for
// requiring both the ACCESSOR and the SYNTHETIC bits to trigger the exemption
private def isSyntheticAccessor(sym: Symbol) = sym.isAccessor && (!sym.isLazy || isPastTyper)
// when type checking during erasure, generate erased types in spots that aren't transformed by erasure
// (it erases in TypeTrees, but not in, e.g., the type a Function node)
def phasedAppliedType(sym: Symbol, args: List[Type]) = {
val tp = appliedType(sym, args)
if (phase.erasedTypes) erasure.specialScalaErasure(tp) else tp
}
def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
typed(docDef.definition, mode, pt)
/** Find implicit arguments and pass them to given tree.
*/
def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
case MethodType(params, _) =>
val argResultsBuff = new ListBuffer[SearchResult]()
val argBuff = new ListBuffer[Tree]()
// paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
// hide some valid errors for params preceding the erroneous one.
var paramFailed = false
var mkArg: (Name, Tree) => Tree = (_, tree) => tree
// DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
//
// apply the substitutions (undet type param -> type) that were determined
// by implicit resolution of implicit arguments on the left of this argument
for(param <- params) {
var paramTp = param.tpe
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
val res =
if (paramFailed || (paramTp.isErroneous && {paramFailed = true; true})) SearchFailure
else inferImplicitFor(paramTp, fun, context, reportAmbiguous = context.reportErrors)
argResultsBuff += res
if (res.isSuccess) {
argBuff += mkArg(param.name, res.tree)
} else {
mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
if (!param.hasDefault && !paramFailed) {
context.reporter.reportFirstDivergentError(fun, param, paramTp)(context)
paramFailed = true
}
/* else {
TODO: alternative (to expose implicit search failure more) -->
resolve argument, do type inference, keep emitting positional args, infer type params based on default value for arg
for (ar <- argResultsBuff) ar.subst traverse defaultVal
val targs = exprTypeArgs(context.undetparams, defaultVal.tpe, paramTp)
substExpr(tree, tparams, targs, pt)
}*/
}
}
val args = argBuff.toList
for (ar <- argResultsBuff) {
ar.subst traverse fun
for (arg <- args) ar.subst traverse arg
}
new ApplyToImplicitArgs(fun, args) setPos fun.pos
case ErrorType =>
fun
}
def viewExists(from: Type, to: Type): Boolean = (
!from.isError
&& !to.isError
&& context.implicitsEnabled
&& (inferView(context.tree, from, to, reportAmbiguous = false) != EmptyTree)
// SI-8230 / SI-8463 We'd like to change this to `saveErrors = false`, but can't.
// For now, we can at least pass in `context.tree` rather then `EmptyTree` so as
// to avoid unpositioned type errors.
)
/** Infer an implicit conversion (`view`) between two types.
* @param tree The tree which needs to be converted.
* @param from The source type of the conversion
* @param to The target type of the conversion
* @param reportAmbiguous Should ambiguous implicit errors be reported?
* False iff we search for a view to find out
* whether one type is coercible to another.
* @param saveErrors Should ambiguous and divergent implicit errors that were buffered
* during the inference of a view be put into the original buffer.
* False iff we don't care about them.
*/
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree =
if (isPastTyper || from.isInstanceOf[MethodType] || from.isInstanceOf[OverloadedType] || from.isInstanceOf[PolyType]) EmptyTree
else {
debuglog(s"Inferring view from $from to $to for $tree (reportAmbiguous= $reportAmbiguous, saveErrors=$saveErrors)")
val fromNoAnnot = from.withoutAnnotations
val result = inferImplicitView(fromNoAnnot, to, tree, context, reportAmbiguous, saveErrors) match {
case fail if fail.isFailure => inferImplicitView(byNameType(fromNoAnnot), to, tree, context, reportAmbiguous, saveErrors)
case ok => ok
}
if (result.subst != EmptyTreeTypeSubstituter) {
result.subst traverse tree
notifyUndetparamsInferred(result.subst.from, result.subst.to)
}
result.tree
}
import infer._
private var namerCache: Namer = null
def namer = {
if ((namerCache eq null) || namerCache.context != context)
namerCache = newNamer(context)
namerCache
}
var context = context0
def context1 = context
// for use with silent type checking to when we can't have results with undetermined type params
// note that this captures the context var
val isMonoContext = (_: Any) => context.undetparams.isEmpty
def dropExistential(tp: Type): Type = tp match {
case ExistentialType(tparams, tpe) =>
new SubstWildcardMap(tparams).apply(tp)
case TypeRef(_, sym, _) if sym.isAliasType =>
val tp0 = tp.dealias
if (tp eq tp0) {
devWarning(s"dropExistential did not progress dealiasing $tp, see SI-7126")
tp
} else {
val tp1 = dropExistential(tp0)
if (tp1 eq tp0) tp else tp1
}
case _ => tp
}
private def errorNotClass(tpt: Tree, found: Type) = { ClassTypeRequiredError(tpt, found); false }
private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false }
/** Check that `tpt` refers to a non-refinement class type */
def checkClassType(tpt: Tree): Boolean = {
val tpe = unwrapToClass(tpt.tpe)
isNonRefinementClassType(tpe) || errorNotClass(tpt, tpe)
}
/** Check that `tpt` refers to a class type with a stable prefix. */
def checkStablePrefixClassType(tpt: Tree): Boolean = {
val tpe = unwrapToStableClass(tpt.tpe)
def prefixIsStable = {
def checkPre = tpe match {
case TypeRef(pre, _, _) => pre.isStable || errorNotStable(tpt, pre)
case _ => false
}
// A type projection like X#Y can get by the stable check if the
// prefix is singleton-bounded, so peek at the tree too.
def checkTree = tpt match {
case SelectFromTypeTree(qual, _) => isSingleType(qual.tpe) || errorNotClass(tpt, tpe)
case _ => true
}
checkPre && checkTree
}
( (isNonRefinementClassType(tpe) || errorNotClass(tpt, tpe))
&& (isPastTyper || prefixIsStable)
)
}
/** Check that type `tp` is not a subtype of itself.
*/
def checkNonCyclic(pos: Position, tp: Type): Boolean = {
def checkNotLocked(sym: Symbol) = {
sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
}
tp match {
case TypeRef(pre, sym, args) =>
checkNotLocked(sym) &&
((!sym.isNonClassType) || checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym))
// @M! info for a type ref to a type parameter now returns a polytype
// @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
case SingleType(pre, sym) =>
checkNotLocked(sym)
case st: SubType =>
checkNonCyclic(pos, st.supertype)
case ct: CompoundType =>
ct.parents forall (x => checkNonCyclic(pos, x))
case _ =>
true
}
}
def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false
else checkNonCyclic(pos, tp)
} finally {
lockedSym.unlock()
}
def checkNonCyclic(sym: Symbol) {
if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType)
}
def checkNonCyclic(defn: Tree, tpt: Tree) {
if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) {
tpt setType ErrorType
defn.symbol.setInfo(ErrorType)
}
}
def checkParamsConvertible(tree: Tree, tpe0: Type) {
def checkParamsConvertible0(tpe: Type) =
tpe match {
case MethodType(formals, restpe) =>
/*
if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1)
error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters")
if (formals exists (isRepeatedParamType(_)))
error(pos, "methods with `*`-parameters cannot be converted to function values");
*/
if (tpe.isDependentMethodType)
DependentMethodTpeConversionToFunctionError(tree, tpe)
checkParamsConvertible(tree, restpe)
case _ =>
}
checkParamsConvertible0(tpe0)
}
/** Check that type of given tree does not contain local or private
* components.
*/
object checkNoEscaping extends TypeMap {
private var owner: Symbol = _
private var scope: Scope = _
private var hiddenSymbols: List[Symbol] = _
/** Check that type `tree` does not refer to private
* components unless itself is wrapped in something private
* (`owner` tells where the type occurs).
*/
def privates[T <: Tree](owner: Symbol, tree: T): T =
check(owner, EmptyScope, WildcardType, tree)
private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
this.owner = owner
this.scope = scope
hiddenSymbols = List()
val tp1 = apply(tree.tpe)
if (hiddenSymbols.isEmpty) tree setType tp1
else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree)
else if (isFullyDefined(pt)) tree setType pt
else if (tp1.typeSymbol.isAnonymousClass)
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
else if (owner == NoSymbol)
tree setType packSymbols(hiddenSymbols.reverse, tp1)
else if (!isPastTyper) { // privates
val badSymbol = hiddenSymbols.head
SymbolEscapesScopeError(tree, badSymbol)
} else tree
}
def addHidden(sym: Symbol) =
if (!(hiddenSymbols contains sym)) hiddenSymbols = sym :: hiddenSymbols
override def apply(t: Type): Type = {
def checkNoEscape(sym: Symbol) {
if (sym.isPrivate && !sym.hasFlag(SYNTHETIC_PRIVATE)) {
var o = owner
while (o != NoSymbol && o != sym.owner && o != sym.owner.linkedClassOfClass &&
!o.isLocalToBlock && !o.isPrivate &&
!o.privateWithin.hasTransOwner(sym.owner))
o = o.owner
if (o == sym.owner || o == sym.owner.linkedClassOfClass)
addHidden(sym)
} else if (sym.owner.isTerm && !sym.isTypeParameterOrSkolem) {
var e = scope.lookupEntry(sym.name)
var found = false
while (!found && (e ne null) && e.owner == scope) {
if (e.sym == sym) {
found = true
addHidden(sym)
} else {
e = scope.lookupNextEntry(e)
}
}
}
}
mapOver(
t match {
case TypeRef(_, sym, args) =>
checkNoEscape(sym)
if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym &&
sym.isAliasType && sameLength(sym.typeParams, args)) {
hiddenSymbols = hiddenSymbols.tail
t.dealias
} else t
case SingleType(_, sym) =>
checkNoEscape(sym)
t
case _ =>
t
})
}
}
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
/** The qualifying class
* of a this or super with prefix `qual`.
* packageOk is equal false when qualifying class symbol
*/
def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
context.enclClass.owner.ownerChain.find(o => qual.isEmpty || o.isClass && o.name == qual) match {
case Some(c) if packageOK || !c.isPackageClass => c
case _ => QualifyingClassError(tree, qual) ; NoSymbol
}
/** The typer for an expression, depending on where we are. If we are before a superclass
* call, this is a typer over a constructor context; otherwise it is the current typer.
*/
final def constrTyperIf(inConstr: Boolean): Typer =
if (inConstr) {
assert(context.undetparams.isEmpty, context.undetparams)
newTyper(context.makeConstructorContext)
} else this
@inline
final def withCondConstrTyper[T](inConstr: Boolean)(f: Typer => T): T =
if (inConstr) {
assert(context.undetparams.isEmpty, context.undetparams)
val c = context.makeConstructorContext
typerWithLocalContext(c)(f)
} else {
f(this)
}
@inline
final def typerWithCondLocalContext[T](c: => Context)(cond: Boolean)(f: Typer => T): T =
if (cond) typerWithLocalContext(c)(f) else f(this)
@inline
final def typerWithLocalContext[T](c: Context)(f: Typer => T): T =
c.reporter.propagatingErrorsTo(context.reporter)(f(newTyper(c)))
/** The typer for a label definition. If this is part of a template we
* first have to enter the label definition.
*/
def labelTyper(ldef: LabelDef): Typer =
if (ldef.symbol == NoSymbol) { // labeldef is part of template
val typer1 = newTyper(context.makeNewScope(ldef, context.owner))
typer1.enterLabelDef(ldef)
typer1
} else this
/** Is symbol defined and not stale?
*/
def reallyExists(sym: Symbol) = {
if (isStale(sym)) sym.setInfo(NoType)
sym.exists
}
/** A symbol is stale if it is toplevel, to be loaded from a classfile, and
* the classfile is produced from a sourcefile which is compiled in the current run.
*/
def isStale(sym: Symbol): Boolean = {
sym.rawInfo.isInstanceOf[loaders.ClassfileLoader] && {
sym.rawInfo.load(sym)
(sym.sourceFile ne null) &&
(currentRun.compiledFiles contains sym.sourceFile.path)
}
}
/** Does the context of tree `tree` require a stable type?
*/
private def isStableContext(tree: Tree, mode: Mode, pt: Type) = {
def ptSym = pt.typeSymbol
def expectsStable = (
pt.isStable
|| mode.inQualMode && !tree.symbol.isConstant
|| !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.bounds.lo.isStable || ptSym.isRefinementClass)
)
( isNarrowable(tree.tpe)
&& mode.typingExprNotLhs
&& expectsStable
)
}
/** Make symbol accessible. This means:
* If symbol refers to package object, insert `.package` as second to last selector.
* (exception for some symbols in scala package which are dealiased immediately)
* Call checkAccessible, which sets tree's attributes.
* Also note that checkAccessible looks up sym on pre without checking that pre is well-formed
* (illegal type applications in pre will be skipped -- that's why typedSelect wraps the resulting tree in a TreeWithDeferredChecks)
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
if (context.isInPackageObject(sym, pre.typeSymbol)) {
if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) {
// short cut some aliases. It seems pattern matching needs this
// to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
//
// TODO SI-6609 Eliminate this special case once the old pattern matcher is removed.
def dealias(sym: Symbol) =
(atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
case nme.List => return dealias(ListModule)
case nme.Seq => return dealias(SeqModule)
case nme.Nil => return dealias(NilModule)
case _ =>
}
}
val qual = typedQualifier { atPos(tree.pos.makeTransparent) {
tree match {
case Ident(_) =>
val packageObject =
if (!sym.isOverloaded && sym.owner.isModuleClass) sym.owner.sourceModule // historical optimization, perhaps no longer needed
else pre.typeSymbol.packageObject
Ident(packageObject)
case Select(qual, _) => Select(qual, nme.PACKAGEkw)
case SelectFromTypeTree(qual, _) => Select(qual, nme.PACKAGEkw)
}
}}
val tree1 = atPos(tree.pos) {
tree match {
case Ident(name) => Select(qual, name)
case Select(_, name) => Select(qual, name)
case SelectFromTypeTree(_, name) => SelectFromTypeTree(qual, name)
}
}
(checkAccessible(tree1, sym, qual.tpe, qual), qual.tpe)
} else {
(checkAccessible(tree, sym, pre, site), pre)
}
/** Post-process an identifier or selection node, performing the following:
* 1. Check that non-function pattern expressions are stable (ignoring volatility concerns -- SI-6815)
* (and narrow the type of modules: a module reference in a pattern has type Foo.type, not "object Foo")
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
// Side effect time! Don't be an idiot like me and think you
// can move "val sym = tree.symbol" before this line, because
// inferExprAlternative side-effects the tree's symbol.
if (tree.symbol.isOverloaded && !mode.inFunMode)
inferExprAlternative(tree, pt)
val sym = tree.symbol
val isStableIdPattern = mode.typingPatternNotConstructor && tree.isTerm
def isModuleTypedExpr = (
treeInfo.admitsTypeSelection(tree)
&& (isStableContext(tree, mode, pt) || sym.isModuleNotMethod)
)
def isStableValueRequired = (
isStableIdPattern
|| mode.in(all = EXPRmode, none = QUALmode) && !phase.erasedTypes
)
// To fully benefit from special casing the return type of
// getClass, we have to catch it immediately so expressions like
// x.getClass().newInstance() are typed with the type of x. TODO: If the
// type of the qualifier is inaccessible, we can cause private types to
// escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
// this so for now it requires the type symbol be public.
def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
def narrowIf(tree: Tree, condition: Boolean) =
if (condition) tree setType singleType(pre, sym) else tree
def checkStable(tree: Tree): Tree =
if (treeInfo.isStableIdentifierPattern(tree)) tree
else UnstableTreeError(tree)
if (tree.isErrorTyped)
tree
else if (!sym.isValue && isStableValueRequired) // (2)
NotAValueError(tree, sym)
else if (isStableIdPattern) // (1)
// A module reference in a pattern has type Foo.type, not "object Foo"
narrowIf(checkStable(tree), sym.isModuleNotMethod)
else if (isModuleTypedExpr) // (3)
narrowIf(tree, true)
else if (isGetClassCall) // (4)
tree setType MethodType(Nil, getClassReturnType(pre))
else
tree
}
private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match {
case TypeRef(_, _, _) | RefinedType(_, _) => true
case _ => !phase.erasedTypes
}
def stabilizeFun(tree: Tree, mode: Mode, pt: Type): Tree = {
val sym = tree.symbol
val pre = tree match {
case Select(qual, _) => qual.tpe
case _ => NoPrefix
}
def stabilizable = (
pre.isStable
&& sym.tpe.params.isEmpty
&& (isStableContext(tree, mode, pt) || sym.isModule)
)
tree.tpe match {
case MethodType(_, _) if stabilizable => tree setType MethodType(Nil, singleType(pre, sym)) // TODO: should this be a NullaryMethodType?
case _ => tree
}
}
/** The member with given name of given qualifier tree */
def member(qual: Tree, name: Name) = {
def callSiteWithinClass(clazz: Symbol) = context.enclClass.owner hasTransOwner clazz
val includeLocals = qual.tpe match {
case ThisType(clazz) if callSiteWithinClass(clazz) => true
case SuperType(clazz, _) if callSiteWithinClass(clazz.typeSymbol) => true
case _ => phase.next.erasedTypes
}
if (includeLocals) qual.tpe member name
else qual.tpe nonLocalMember name
}
def silent[T](op: Typer => T,
reportAmbiguousErrors: Boolean = context.ambiguousErrors,
newtree: Tree = context.tree): SilentResult[T] = {
val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeFailed) else null
val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberFailed) else null
val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeFailed) else null
val failedSilentStart = if (Statistics.canEnable) Statistics.startTimer(failedSilentNanos) else null
def stopStats() = {
if (Statistics.canEnable) Statistics.stopCounter(rawTypeFailed, rawTypeStart)
if (Statistics.canEnable) Statistics.stopCounter(findMemberFailed, findMemberStart)
if (Statistics.canEnable) Statistics.stopCounter(subtypeFailed, subtypeStart)
if (Statistics.canEnable) Statistics.stopTimer(failedSilentNanos, failedSilentStart)
}
@inline def wrapResult(reporter: ContextReporter, result: T) =
if (reporter.hasErrors) {
stopStats()
SilentTypeError(reporter.errors.toList, reporter.warnings.toList)
} else SilentResultValue(result)
try {
if (context.reportErrors ||
reportAmbiguousErrors != context.ambiguousErrors ||
newtree != context.tree) {
val context1 = context.makeSilent(reportAmbiguousErrors, newtree)
context1.undetparams = context.undetparams
context1.savedTypeBounds = context.savedTypeBounds
context1.namedApplyBlockInfo = context.namedApplyBlockInfo
val typer1 = newTyper(context1)
val result = op(typer1)
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
// If we have a successful result, emit any warnings it created.
if (!context1.reporter.hasErrors)
context1.reporter.emitWarnings()
wrapResult(context1.reporter, result)
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
context.reporter.withFreshErrorBuffer {
wrapResult(context.reporter, op(this))
}
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError =>
// fallback in case TypeError is still thrown
// @H this happens for example in cps annotation checker
stopStats()
SilentTypeError(TypeErrorWrapper(ex))
}
}
/** Check whether feature given by `featureTrait` is enabled.
* If it is not, issue an error or a warning depending on whether the feature is required.
* @param construct A string expression that is substituted for "#" in the feature description string
* @param immediate When set, feature check is run immediately, otherwise it is run
* at the end of the typechecking run for the enclosing unit. This
* is done to avoid potential cyclic reference errors by implicits
* that are forced too early.
* @return if feature check is run immediately: true if feature is enabled, false otherwise
* if feature check is delayed or suppressed because we are past typer: true
*/
def checkFeature(pos: Position, featureTrait: Symbol, construct: => String = "", immediate: Boolean = false): Boolean =
if (isPastTyper) true
else {
val nestedOwners =
featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
def action(): Boolean = {
def hasImport = inferImplicitByType(featureTrait.tpe, context).isSuccess
def hasOption = settings.language contains featureName
val OK = hasImport || hasOption
if (!OK) {
val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
featureTrait getAnnotation LanguageFeatureAnnot
context.featureWarning(pos, featureName, featureDesc, featureTrait, construct, required)
}
OK
}
if (immediate) {
action()
} else {
unit.toCheck += action
true
}
}
def checkExistentialsFeature(pos: Position, tpe: Type, prefix: String) = tpe match {
case extp: ExistentialType if !extp.isRepresentableWithWildcards =>
checkFeature(pos, ExistentialsFeature, prefix+" "+tpe)
case _ =>
}
/**
* Convert a SAM type to the corresponding FunctionType,
* extrapolating BoundedWildcardTypes in the process
* (no type precision is lost by the extrapolation,
* but this facilitates dealing with the types arising from Java's use-site variance).
*/
def samToFunctionType(tp: Type, sam: Symbol = NoSymbol): Type = {
val samSym = sam orElse samOf(tp)
def correspondingFunctionSymbol = {
val numVparams = samSym.info.params.length
if (numVparams > definitions.MaxFunctionArity) NoSymbol
else FunctionClass(numVparams)
}
if (samSym.exists && tp.typeSymbol != correspondingFunctionSymbol) // don't treat Functions as SAMs
wildcardExtrapolation(normalize(tp memberInfo samSym))
else NoType
}
/** Perform the following adaptations of expression, pattern or type `tree` wrt to
* given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
* (0) Convert expressions with constant types to literals (unless in interactive/scaladoc mode)
* (1) Resolve overloading, unless mode contains FUNmode
* (2) Apply parameterless functions
* (3) Apply polymorphic types to fresh instances of their type parameters and
* store these instances in context.undetparams,
* unless followed by explicit type application.
* (4) Do the following to unapplied methods used as values:
* (4.1) If the method has only implicit parameters pass implicit arguments
* (4.2) otherwise, if `pt` is a function type and method is not a constructor,
* convert to function by eta-expansion,
* (4.3) otherwise, if the method is nullary with a result type compatible to `pt`
* and it is not a constructor, apply it to ()
* otherwise issue an error
* (5) Convert constructors in a pattern as follows:
* (5.1) If constructor refers to a case class factory, set tree's type to the unique
* instance of its primary constructor that is a subtype of the expected type.
* (5.2) If constructor refers to an extractor, convert to application of
* unapply or unapplySeq method.
*
* (6) Convert all other types to TypeTree nodes.
* (7) When in TYPEmode but not FUNmode or HKmode, check that types are fully parameterized
* (7.1) In HKmode, higher-kinded types are allowed, but they must have the expected kind-arity
* (8) When in both EXPRmode and FUNmode, add apply method calls to values of object type.
* (9) If there are undetermined type variables and not POLYmode, infer expression instance
* Then, if tree's type is not a subtype of expected type, try the following adaptations:
* (10) If the expected type is Byte, Short or Char, and the expression
* is an integer fitting in the range of that type, convert it to that type.
* (11) Widen numeric literals to their expected type, if necessary
* (12) When in mode EXPRmode, convert E to { E; () } if expected type is scala.Unit.
* (13) When in mode EXPRmode, apply AnnotationChecker conversion if expected type is annotated.
* (14) When in mode EXPRmode, do SAM conversion
* (15) When in mode EXPRmode, apply a view
* If all this fails, error
*/
protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
def hasUndets = context.undetparams.nonEmpty
def hasUndetsInMonoMode = hasUndets && !mode.inPolyMode
def adaptToImplicitMethod(mt: MethodType): Tree = {
if (hasUndets) { // (9) -- should revisit dropped condition `hasUndetsInMonoMode`
// dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
// needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
// approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
mt.approximate,
keepNothings = false,
useWeaklyCompatible = true) // #3808
}
// avoid throwing spurious DivergentImplicit errors
if (context.reporter.hasErrors)
setError(tree)
else
withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
if (original != EmptyTree && pt != WildcardType) (
typer1 silent { tpr =>
val withImplicitArgs = tpr.applyImplicitArgs(tree)
if (tpr.context.reporter.hasErrors) tree // silent will wrap it in SilentTypeError anyway
else tpr.typed(withImplicitArgs, mode, pt)
}
orElse { _ =>
val resetTree = resetAttrs(original)
resetTree match {
case treeInfo.Applied(fun, targs, args) =>
if (fun.symbol != null && fun.symbol.isError)
// SI-9041 Without this, we leak error symbols past the typer!
// because the fallback typechecking notices the error-symbol,
// refuses to re-attempt typechecking, and presumes that someone
// else was responsible for issuing the related type error!
fun.setSymbol(NoSymbol)
case _ =>
}
debuglog(s"fallback on implicits: ${tree}/$resetTree")
val tree1 = typed(resetTree, mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
// we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
)
else
typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
)
}
def instantiateToMethodType(mt: MethodType): Tree = {
val meth = tree match {
// a partial named application is a block (see comment in EtaExpansion)
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
def cantAdapt =
if (context.implicitsEnabled) MissingArgsForMethodTpeError(tree, meth)
else setError(tree)
// constructors do not eta-expand
if (meth.isConstructor) cantAdapt
// (4.2) eta-expand method value when function or sam type is expected
else if (isFunctionType(pt) || (!mt.params.isEmpty && samOf(pt).exists)) {
// SI-9536 `!mt.params.isEmpty &&`: for backwards compatiblity with 2.11,
// we don't adapt a zero-arg method value to a SAM
// In 2.13, we won't do any eta-expansion for zero-arg method values, but we should deprecate first
debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
// SI-7187 eta-expansion of zero-arg method value is deprecated, switch order of (4.3) and (4.2) in 2.13
def isExplicitEtaExpansion = original match {
case Typed(_, Function(Nil, EmptyTree)) => true // tree shape for `f _`
case _ => false
}
if (mt.params.isEmpty && !isExplicitEtaExpansion) {
currentRun.reporting.deprecationWarning(tree.pos, NoSymbol,
s"Eta-expansion of zero-argument method values is deprecated. Did you intend to write ${Apply(tree, Nil)}?", "2.12.0")
}
val tree0 = etaExpand(context.unit, tree, this)
// #2624: need to infer type arguments for eta expansion of a polymorphic method
// context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
// need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
// can't type with the expected type, as we can't recreate the setup in (3) without calling typed
// (note that (3) does not call typed to do the polymorphic type instantiation --
// it is called after the tree has been typed with a polymorphic expected result type)
if (hasUndets)
instantiate(typed(tree0, mode), mode, pt)
else
typed(tree0, mode, pt)
}
// (4.3) apply to empty argument list -- TODO 2.13: move this one case up to avoid eta-expanding at arity 0
else if (mt.params.isEmpty) adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
else cantAdapt
}
def adaptType(): Tree = {
// @M When not typing a type constructor (!context.inTypeConstructorAllowed)
// or raw type, types must be of kind *,
// and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
def properTypeRequired = (
tree.hasSymbolField
&& !context.inTypeConstructorAllowed
&& !context.unit.isJava
)
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
// @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol
// (TypeTree's must also be checked here, and they don't directly have a symbol)
def kindArityMismatch = (
context.inTypeConstructorAllowed
&& !sameLength(tree.tpe.typeParams, pt.typeParams)
)
// Note that we treat Any and Nothing as kind-polymorphic.
// We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
// (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
def kindArityMismatchOk = tree.tpe.typeSymbol match {
case NothingClass | AnyClass => true
case _ => pt == WildcardType
}
// todo. It would make sense when mode.inFunMode to instead use
// tree setType tree.tpe.normalize
// when typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
// because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
// but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
if (mode.inFunMode)
tree
else if (properTypeRequired && tree.symbol.typeParams.nonEmpty) // (7)
MissingTypeParametersError(tree)
else if (kindArityMismatch && !kindArityMismatchOk) // (7.1) @M: check kind-arity
KindArityMismatchError(tree, pt)
else tree match { // (6)
case TypeTree() => tree
case _ => TypeTree(tree.tpe) setOriginal tree
}
}
def insertApply(): Tree = {
assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
val qual = gen.stabilize(adapted)
typedPos(tree.pos, mode, pt) {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
}
def adaptConstant(value: Constant): Tree = {
val sym = tree.symbol
if (sym != null && sym.isDeprecated)
context.deprecationWarning(tree.pos, sym)
treeCopy.Literal(tree, value)
}
// Ignore type errors raised in later phases that are due to mismatching types with existential skolems
// We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
// Here's my hypothesis why this happens. The pattern matcher defines a variable of type
//
// val x: T = expr
//
// where T is the type of expr, but T contains existential skolems ts.
// In that case, this value definition does not typecheck.
// The value definition
//
// val x: T forSome { ts } = expr
//
// would typecheck. Or one can simply leave out the type of the `val`:
//
// val x = expr
//
// SI-6029 shows another case where we also fail (in uncurry), but this time the expected
// type is an existential type.
//
// The reason for both failures have to do with the way we (don't) transform
// skolem types along with the trees that contain them. We'd need a
// radically different approach to do it. But before investing a lot of time to
// to do this (I have already sunk 3 full days with in the end futile attempts
// to consistently transform skolems and fix 6029), I'd like to
// investigate ways to avoid skolems completely.
//
// upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
// (which is the return type of the macro definition instantiated in the context of expandee):
//
// Test.scala:2: error: type mismatch;
// found : $u.Expr[Class[_ <: Object]]
// required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
// scala.reflect.runtime.universe.reify(new Object().getClass)
// ^
// Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
// (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
//
def adaptMismatchedSkolems() = {
def canIgnoreMismatch = (
!context.reportErrors && isPastTyper
|| tree.hasAttachment[MacroExpansionAttachment]
)
def bound = pt match {
case ExistentialType(qs, _) => qs
case _ => Nil
}
def msg = sm"""
|Recovering from existential or skolem type error in
| $tree
|with type: ${tree.tpe}
| pt: $pt
| context: ${context.tree}
| adapted
""".trim
val boundOrSkolems = if (canIgnoreMismatch) bound ++ pt.skolemsExceptMethodTypeParams else Nil
boundOrSkolems match {
case Nil => AdaptTypeError(tree, tree.tpe, pt) ; setError(tree)
case _ => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt)))
}
}
def adaptExprNotFunMode(): Tree = {
def lastTry(err: AbsTypeError = null): Tree = {
debuglog("error tree = " + tree)
if (settings.debug && settings.explaintypes) explainTypes(tree.tpe, pt)
if (err ne null) context.issue(err)
if (tree.tpe.isErroneous || pt.isErroneous) setError(tree)
else adaptMismatchedSkolems()
}
// TODO: should we even get to fallbackAfterVanillaAdapt for an ill-typed tree?
if (mode.typingExprNotFun && !tree.tpe.isErroneous) {
@inline def tpdPos(transformed: Tree) = typedPos(tree.pos, mode, pt)(transformed)
@inline def tpd(transformed: Tree) = typed(transformed, mode, pt)
@inline def warnValueDiscard(): Unit = if (!isPastTyper && settings.warnValueDiscard) {
def isThisTypeResult = (tree, tree.tpe) match {
case (Apply(Select(receiver, _), _), SingleType(_, sym)) => sym == receiver.symbol
case _ => false
}
if (!isThisTypeResult) context.warning(tree.pos, "discarded non-Unit value")
}
@inline def warnNumericWiden(): Unit =
if (!isPastTyper && settings.warnNumericWiden) context.warning(tree.pos, "implicit numeric widening")
// The <: Any requirement inhibits attempts to adapt continuation types to non-continuation types.
val anyTyped = tree.tpe <:< AnyTpe
pt.dealias match {
case TypeRef(_, UnitClass, _) if anyTyped => // (12)
warnValueDiscard() ; tpdPos(gen.mkUnitBlock(tree))
case TypeRef(_, numValueCls, _) if anyTyped && isNumericValueClass(numValueCls) && isNumericSubType(tree.tpe, pt) => // (10) (11)
warnNumericWiden() ; tpdPos(Select(tree, s"to${numValueCls.name}"))
case dealiased if dealiased.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt) => // (13)
tpd(adaptAnnotations(tree, this, mode, pt))
case _ =>
if (hasUndets) instantiate(tree, mode, pt)
else {
// (14) sam conversion
// TODO: figure out how to avoid partially duplicating typedFunction (samMatchingFunction)
// Could we infer the SAM type, assign it to the tree and add the attachment,
// all in one fell swoop at the end of typedFunction?
val samAttach = inferSamType(tree, pt, mode)
if (samAttach.samTp ne NoType) tree.setType(samAttach.samTp).updateAttachment(samAttach)
else { // (15) implicit view application
val coercion =
if (context.implicitsEnabled) inferView(tree, tree.tpe, pt)
else EmptyTree
if (coercion ne EmptyTree) {
def msg = s"inferred view from ${tree.tpe} to $pt via $coercion: ${coercion.tpe}"
if (settings.logImplicitConv) context.echo(tree.pos, msg)
else debuglog(msg)
val viewApplied = new ApplyImplicitView(coercion, List(tree)) setPos tree.pos
val silentContext = context.makeImplicit(context.ambiguousErrors)
val typedView = newTyper(silentContext).typed(viewApplied, mode, pt)
silentContext.reporter.firstError match {
case None => typedView
case Some(err) => lastTry(err)
}
} else lastTry()
}
}
}
} else lastTry()
}
def vanillaAdapt(tree: Tree) = {
def applyPossible = {
def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty)
def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0)
dyna.acceptsApplyDynamic(tree.tpe) || (
if (mode.inTappMode)
tree.tpe.typeParams.isEmpty && hasPolymorphicApply
else
hasMonomorphicApply
)
}
def shouldInsertApply(tree: Tree) = mode.typingExprFun && {
tree.tpe match {
case _: MethodType | _: OverloadedType | _: PolyType => false
case _ => applyPossible
}
}
if (tree.isType)
adaptType()
else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree) && !isMacroExpansionSuppressed(tree))
macroExpand(this, tree, mode, pt)
else if (mode.typingConstructorPattern)
typedConstructorPattern(tree, pt)
else if (shouldInsertApply(tree))
insertApply()
else if (hasUndetsInMonoMode) { // (9)
assert(!context.inTypeConstructorAllowed, context) //@M
instantiatePossiblyExpectingUnit(tree, mode, pt)
}
else if (tree.tpe <:< pt)
tree
else if (mode.inPatternMode && { inferModulePattern(tree, pt); isPopulated(tree.tpe, approximateAbstracts(pt)) })
tree
else {
val constFolded = constfold(tree, pt)
if (constFolded.tpe <:< pt) adapt(constFolded, mode, pt, original) // set stage for (0)
else adaptExprNotFunMode() // (10) -- (15)
}
}
// begin adapt
if (isMacroImplRef(tree)) {
if (treeInfo.isMacroApplication(tree)) adapt(unmarkMacroImplRef(tree), mode, pt, original)
else tree
} else tree.tpe match {
case atp @ AnnotatedType(_, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
adaptConstant(value)
case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
adaptAfterOverloadResolution(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
adapt(tree setType arg, mode, pt, original)
case tp if mode.typingExprNotLhs && isExistentialType(tp) && !isSyntheticAccessor(context.owner) =>
adapt(tree setType tp.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode) && !context.inTypeConstructorAllowed => // (3)
// assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
// we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
// ticket #2197 triggered turning the assert into a guard
// I guess this assert wasn't violated before because type aliases weren't expanded as eagerly
// (the only way to get a PolyType for an anonymous type function is by normalisation, which applies eta-expansion)
// -- are we sure we want to expand aliases this early?
// -- what caused this change in behaviour??
val tparams1 = cloneSymbols(tparams)
val tree1 = (
if (tree.isType) tree
else TypeApply(tree, tparams1 map (tparam => TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
)
context.undetparams ++= tparams1
notifyUndetparamsAdded(tparams1)
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
case mt: MethodType if mode.typingExprNotFunNotLhs && mt.isImplicit => // (4.1)
adaptToImplicitMethod(mt)
case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) =>
instantiateToMethodType(mt)
case _ =>
vanillaAdapt(tree)
}
}
// This just exists to help keep track of the spots where we have to adapt a tree after
// overload resolution. These proved hard to find during the fix for SI-8267.
def adaptAfterOverloadResolution(tree: Tree, mode: Mode, pt: Type = WildcardType, original: Tree = EmptyTree): Tree = {
adapt(tree, mode, pt, original)
}
def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
}
/** If the expected type is Unit: try instantiating type arguments
* with expected type Unit, but if that fails, try again with pt = WildcardType
* and discard the expression.
*/
def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = {
val savedUndetparams = context.undetparams
silent(_.instantiate(tree, mode, UnitTpe)) orElse { _ =>
context.undetparams = savedUndetparams
val valueDiscard = atPos(tree.pos)(gen.mkUnitBlock(instantiate(tree, mode, WildcardType)))
typed(valueDiscard, mode, UnitTpe)
}
}
def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
if (mode.typingExprNotFun && pt.typeSymbol == UnitClass && !tree.tpe.isInstanceOf[MethodType])
instantiateExpectingUnit(tree, mode)
else
instantiate(tree, mode, pt)
}
private def isAdaptableWithView(qual: Tree) = {
val qtpe = qual.tpe.widen
( !isPastTyper
&& qual.isTerm
&& !qual.isInstanceOf[Super]
&& ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue)
&& !qtpe.isError
&& !qtpe.typeSymbol.isBottomClass
&& qtpe != WildcardType
&& !qual.isInstanceOf[ApplyImplicitView] // don't chain views
&& (context.implicitsEnabled || context.enrichmentEnabled)
// Elaborating `context.implicitsEnabled`:
// don't try to adapt a top-level type that's the subject of an implicit search
// this happens because, if isView, typedImplicit tries to apply the "current" implicit value to
// a value that needs to be coerced, so we check whether the implicit value has an `apply` method.
// (If we allow this, we get divergence, e.g., starting at `conforms` during ant quick.bin)
// Note: implicit arguments are still inferred (this kind of "chaining" is allowed)
)
}
def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
if (isAdaptableWithView(qual)) {
qual.tpe.dealiasWiden match {
case et: ExistentialType =>
qual setType et.skolemizeExistential(context.owner, qual) // open the existential
case _ =>
}
inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
case EmptyTree => qual
case coercion =>
if (settings.logImplicitConv)
context.echo(qual.pos,
"applied implicit conversion from %s to %s = %s".format(
qual.tpe, searchTemplate, coercion.symbol.defString))
typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
}
}
else qual
}
/** Try to apply an implicit conversion to `qual` to that it contains
* a method `name` which can be applied to arguments `args` with expected type `pt`.
* If `pt` is defined, there is a fallback to try again with pt = ?.
* This helps avoiding propagating result information too far and solves
* #1756.
* If no conversion is found, return `qual` unchanged.
*
*/
def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
if (pt == WildcardType)
doAdapt(pt)
else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ =>
logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType))
)
}
/** Try to apply an implicit conversion to `qual` so that it contains
* a method `name`. If that's ambiguous try taking arguments into
* account using `adaptToArguments`.
*/
def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
def onError(reportError: => Tree): Tree = context.tree match {
case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
( silent (_.typedArgs(args.map(_.duplicate), mode))
filter (xs => !(xs exists (_.isErrorTyped)))
map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors))
orElse ( _ => reportError)
)
case _ =>
reportError
}
silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (errs =>
onError {
if (reportAmbiguous) errs foreach (context issue _)
setError(tree)
}
)
}
/** Try to apply an implicit conversion to `qual` to that it contains a
* member `name` of arbitrary type.
* If no conversion is found, return `qual` unchanged.
*/
def adaptToName(qual: Tree, name: Name) =
if (member(qual, name) != NoSymbol) qual
else adaptToMember(qual, HasMember(name))
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
context.error(clazz.pos, (
"case %s has case ancestor %s, but case-to-case inheritance is prohibited."+
" To overcome this limitation, use extractors to pattern match on non-leaf nodes."
).format(clazz, ancestor.fullName))
}
}
}
private def checkEphemeral(clazz: Symbol, body: List[Tree]) = {
// NOTE: Code appears to be messy in this method for good reason: it clearly
// communicates the fact that it implements rather ad-hoc, arbitrary and
// non-regular set of rules that identify features that interact badly with
// value classes. This code can be cleaned up a lot once implementation
// restrictions are addressed.
val isValueClass = !clazz.isTrait
def where = if (isValueClass) "value class" else "universal trait extending from class Any"
def implRestriction(tree: Tree, what: String) =
context.error(tree.pos, s"implementation restriction: $what is not allowed in $where" +
"\\nThis restriction is planned to be removed in subsequent releases.")
/**
* Deeply traverses the tree in search of constructs that are not allowed
* in value classes (at any nesting level).
*
* All restrictions this object imposes are probably not fundamental but require
* fair amount of work and testing. We are conservative for now when it comes
* to allowing language features to interact with value classes.
* */
object checkEphemeralDeep extends Traverser {
override def traverse(tree: Tree): Unit = if (isValueClass) {
tree match {
case _: ModuleDef =>
//see https://issues.scala-lang.org/browse/SI-6359
implRestriction(tree, "nested object")
//see https://issues.scala-lang.org/browse/SI-6444
//see https://issues.scala-lang.org/browse/SI-6463
case cd: ClassDef if !cd.symbol.isAnonymousClass => // Don't warn about partial functions, etc. SI-7571
implRestriction(tree, "nested class") // avoiding Type Tests that might check the $outer pointer.
case Select(sup @ Super(qual, mix), selector) if selector != nme.CONSTRUCTOR && qual.symbol == clazz && mix != tpnme.EMPTY =>
//see https://issues.scala-lang.org/browse/SI-6483
implRestriction(sup, "qualified super reference")
case _ =>
}
super.traverse(tree)
}
}
for (stat <- body) {
def notAllowed(what: String) = context.error(stat.pos, s"$what is not allowed in $where")
stat match {
// see https://issues.scala-lang.org/browse/SI-6444
// see https://issues.scala-lang.org/browse/SI-6463
case ClassDef(mods, _, _, _) if isValueClass =>
implRestriction(stat, s"nested ${ if (mods.isTrait) "trait" else "class" }")
case _: Import | _: ClassDef | _: TypeDef | EmptyTree => // OK
case DefDef(_, name, _, _, _, rhs) =>
if (stat.symbol.isAuxiliaryConstructor)
notAllowed("secondary constructor")
else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic)
notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.")
else if (stat.symbol != null && stat.symbol.isParamAccessor)
notAllowed("additional parameter")
checkEphemeralDeep.traverse(rhs)
case _: ValDef =>
notAllowed("field definition")
case _: ModuleDef =>
//see https://issues.scala-lang.org/browse/SI-6359
implRestriction(stat, "nested object")
case _ =>
notAllowed("this statement")
}
}
}
private def validateDerivedValueClass(clazz: Symbol, body: List[Tree]) = {
if (clazz.isTrait)
context.error(clazz.pos, "only classes (not traits) are allowed to extend AnyVal")
if (!clazz.isStatic)
context.error(clazz.pos, "value class may not be a "+
(if (clazz.owner.isTerm) "local class" else "member of another class"))
if (!clazz.isPrimitiveValueClass) {
clazz.primaryConstructor.paramss match {
case List(List(param)) =>
val decls = clazz.info.decls
val paramAccessor = clazz.constrParamAccessors.head
if (paramAccessor.isMutable)
context.error(paramAccessor.pos, "value class parameter must not be a var")
val accessor = decls.toList.find(x => x.isMethod && x.accessedOrSelf == paramAccessor)
accessor match {
case None =>
context.error(paramAccessor.pos, "value class parameter must be a val and not be private[this]")
case Some(acc) if acc.isProtectedLocal =>
context.error(paramAccessor.pos, "value class parameter must not be protected[this]")
case Some(acc) =>
if (acc.tpe.typeSymbol.isDerivedValueClass)
context.error(acc.pos, "value class may not wrap another user-defined value class")
checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor))
}
case _ =>
context.error(clazz.pos, "value class needs to have exactly one val parameter")
}
}
for (tparam <- clazz.typeParams)
if (tparam hasAnnotation definitions.SpecializedClass)
context.error(tparam.pos, "type parameter of value class may not be specialized")
}
/** Typechecks a parent type reference.
*
* This typecheck is harder than it might look, because it should honor early
* definitions and also perform type argument inference with the help of super call
* arguments provided in `encodedtpt`.
*
* The method is called in batches (batch = 1 time per each parent type referenced),
* two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
* and once from typer, when typechecking the definition.
*
* ***Arguments***
*
* `encodedtpt` represents the parent type reference wrapped in an `Apply` node
* which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
* If no value arguments are provided by the user, the `Apply` node is still
* there, but its `args` will be set to `Nil`.
* This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
*
* `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
* Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
* (I wrote "maybe" because trait constructors don't call super constructors).
* This argument is synthesized by `tools.nsc.ast.Trees.Template`.
*
* `inMixinPosition` indicates whether the reference is not the first in the
* list of parents (and therefore cannot be a class) or the opposite.
*
* ***Return value and side effects***
*
* Returns a `TypeTree` representing a resolved parent type.
* If the typechecked parent reference implies non-nullary and non-empty argument list,
* this argument list is attached to the returned value in SuperArgsAttachment.
* The attachment is necessary for the subsequent typecheck to fixup a super constructor call
* in the body of the primary constructor (see `typedTemplate` for details).
*
* This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
* described in the docs of that method. It might also attribute the Super(_, _) reference
* (if present) inside the primary constructor of `templ`.
*
* ***Example***
*
* For the following definition:
*
* class D extends {
* val x = 2
* val y = 4
* } with B(x)(3) with C(y) with T
*
* this method will be called six times:
*
* (3 times from the namer)
* typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
* typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
* typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
*
* (3 times from the typer)
* <the same three calls>
*/
private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
val app = treeInfo.dissectApplied(encodedtpt)
val (treeInfo.Applied(core, _, argss), decodedtpt) = ((app, app.callee))
val argssAreTrivial = argss == Nil || argss == ListOfNil
// we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
// we'll have to check the probe for isTypeMacro anyways.
// therefore I think it's reasonable to trade a more specific "inherits itself" error
// for a generic, yet understandable "cyclic reference" error
var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
if (probe == null) probe = NoSymbol
probe.initialize
if (probe.isTrait || inMixinPosition) {
if (!argssAreTrivial) {
if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
else () // a class in a mixin position - this warrants an error in `validateParentClasses`
// therefore here we do nothing, e.g. don't check that the # of ctor arguments
// matches the # of ctor parameters or stuff like that
}
typedType(decodedtpt)
} else {
val supertpt = typedTypeConstructor(decodedtpt)
val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else Nil
def inferParentTypeArgs: Tree = {
typedPrimaryConstrBody(templ) {
val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
val supercall = New(supertpe, mmap(argss)(_.duplicate))
val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
atPos(supertpt.pos.focus)(supercall)
} match {
case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt); supertpt
case tpt => TypeTree(tpt.tpe) setPos supertpt.pos // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
}
}
val supertptWithTargs = if (supertparams.isEmpty || context.unit.isJava) supertpt else inferParentTypeArgs
// this is the place where we tell the typer what argss should be used for the super call
// if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
// the super call dummy is already good enough, so we don't need to do anything
if (argssAreTrivial) supertptWithTargs else supertptWithTargs updateAttachment SuperArgsAttachment(argss)
}
}
/** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
* Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
* `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
*
* ***Return value and side effects***
*
* If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
* Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
*
* As a side effect, this method attributes the underlying fields of early vals.
* Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
* at least once per definition. It'd be great to disentangle this logic at some point.
*
* ***Example***
*
* For the following definition:
*
* class D extends {
* val x = 2
* val y = 4
* } with B(x)(3) with C(y) with T
*
* the primary constructor of `templ` will be:
*
* Block(List(
* ValDef(NoMods, x, TypeTree(), 2)
* ValDef(NoMods, y, TypeTree(), 4)
* global.pendingSuperCall,
* Literal(Constant(())))
*
* Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
* which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
* from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
*/
private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
treeInfo.firstConstructor(templ.body) match {
case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
val (preSuperStats, superCall) = {
val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
(stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
}
val superCall1 = (superCall match {
case global.pendingSuperCall => actualSuperCall
case EmptyTree => EmptyTree
}) orElse cunit
val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
val clazz = context.owner
assert(clazz != NoSymbol, templ)
// SI-9086 The position of this symbol is material: implicit search will avoid triggering
// cyclic errors in an implicit search in argument to the super constructor call on
// account of the "ignore symbols without complete info that succeed the implicit search"
// in this source file. See `ImplicitSearch#isValid` and `ImplicitInfo#isCyclicOrErroneous`.
val dummy = context.outer.owner.newLocalDummy(context.owner.pos)
val cscope = context.outer.makeNewScope(ctor, dummy)
if (dummy.isTopLevel) currentRun.symSource(dummy) = currentUnit.source.file
val cbody2 = { // called both during completion AND typing.
val typer1 = newTyper(cscope)
// XXX: see about using the class's symbol....
clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
typer1.typed(cbody1)
}
val preSuperVals = treeInfo.preSuperFields(templ.body)
if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
devWarning("Wanted to zip empty presuper val list with " + preSuperStats)
else
map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe)
if (superCall1 == cunit) EmptyTree
else cbody2 match {
case Block(_, expr) => expr
case tree => tree
}
case _ =>
EmptyTree
}
/** Makes sure that the first type tree in the list of parent types is always a class.
* If the first parent is a trait, prepend its supertype to the list until it's a class.
*/
private def normalizeFirstParent(parents: List[Tree]): List[Tree] = {
@annotation.tailrec
def explode0(parents: List[Tree]): List[Tree] = {
val supertpt :: rest = parents // parents is always non-empty here - it only grows
if (supertpt.tpe.typeSymbol == AnyClass) {
supertpt setType AnyRefTpe
parents
} else if (treeInfo isTraitRef supertpt) {
val supertpt1 = typedType(supertpt)
def supersuper = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
if (supertpt1.isErrorTyped) rest
else explode0(supersuper :: supertpt1 :: rest)
} else parents
}
def explode(parents: List[Tree]) =
if (treeInfo isTraitRef parents.head) explode0(parents)
else parents
if (parents.isEmpty) Nil else explode(parents)
}
/** Certain parents are added in the parser before it is known whether
* that class also declared them as parents. For instance, this is an
* error unless we take corrective action here:
*
* case class Foo() extends Serializable
*
* So we strip the duplicates before typer.
*/
private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
case Nil => Nil
case x :: xs =>
val sym = x.symbol
x :: fixDuplicateSyntheticParents(
if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
else xs
)
}
def typedParentTypes(templ: Template): List[Tree] = templ.parents match {
case Nil => List(atPos(templ.pos)(TypeTree(AnyRefTpe)))
case first :: rest =>
try {
val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
typedParentType(first, templ, inMixinPosition = false) +:
(rest map (typedParentType(_, templ, inMixinPosition = true)))))
// if that is required to infer the targs of a super call
// typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
// as a side effect, that typecheck also assigns types to the fields underlying early vals
// however if inference is not required, the typecheck doesn't happen
// and therefore early fields have their type trees not assigned
// here we detect this situation and take preventive measures
if (treeInfo.hasUntypedPreSuperFields(templ.body))
typedPrimaryConstrBody(templ)(EmptyTree)
supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
}
catch {
case ex: TypeError =>
// fallback in case of cyclic errors
// @H none of the tests enter here but I couldn't rule it out
// upd. @E when a definition inherits itself, we end up here
// because `typedParentType` triggers `initialize` for parent types symbols
log("Type error calculating parents in template " + templ)
log("Error: " + ex)
ParentTypesError(templ, ex)
List(TypeTree(AnyRefTpe))
}
}
/** <p>Check that</p>
* <ul>
* <li>all parents are class types,</li>
* <li>first parent class is not a mixin; following classes are mixins,</li>
* <li>final classes are not inherited,</li>
* <li>
* sealed classes are only inherited by classes which are
* nested within definition of base class, or that occur within same
* statement sequence,
* </li>
* <li>self-type of current class is a subtype of self-type of each parent class.</li>
* <li>no two parents define same symbol.</li>
* </ul>
*/
def validateParentClasses(parents: List[Tree], selfType: Type) {
val pending = ListBuffer[AbsTypeError]()
def validateDynamicParent(parent: Symbol, parentPos: Position) =
if (parent == DynamicClass) checkFeature(parentPos, DynamicsFeature)
def validateParentClass(parent: Tree, superclazz: Symbol) =
if (!parent.isErrorTyped) {
val psym = parent.tpe.typeSymbol.initialize
checkStablePrefixClassType(parent)
if (psym != superclazz) {
if (psym.isTrait) {
val ps = psym.info.parents
if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol))
pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym)
} else {
pending += ParentNotATraitMixinError(parent, psym)
}
}
if (psym.isFinal)
pending += ParentFinalInheritanceError(parent, psym)
val sameSourceFile = context.unit.source.file == psym.sourceFile
if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation &&
!sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) {
val version = psym.deprecatedInheritanceVersion.getOrElse("")
val since = if (version.isEmpty) version else s" (since $version)"
val message = psym.deprecatedInheritanceMessage.map(msg => s": $msg").getOrElse("")
val report = s"inheritance from ${psym.fullLocationString} is deprecated$since$message"
context.deprecationWarning(parent.pos, psym, report, version)
}
if (psym.isSealed && !phase.erasedTypes)
if (sameSourceFile)
psym addChild context.owner
else
pending += ParentSealedInheritanceError(parent, psym)
if (psym.isLocalToBlock && !phase.erasedTypes)
psym addChild context.owner
val parentTypeOfThis = parent.tpe.dealias.typeOfThis
if (!(selfType <:< parentTypeOfThis) &&
!phase.erasedTypes &&
!context.owner.isSynthetic && // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
!selfType.isErroneous &&
!parent.tpe.isErroneous)
{
pending += ParentSelfTypeConformanceError(parent, selfType)
if (settings.explaintypes) explainTypes(selfType, parentTypeOfThis)
}
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
pending += ParentInheritedTwiceError(parent, psym)
validateDynamicParent(psym, parent.pos)
}
if (!parents.isEmpty && parents.forall(!_.isErrorTyped)) {
val superclazz = parents.head.tpe.typeSymbol
for (p <- parents) validateParentClass(p, superclazz)
}
pending.foreach(ErrorUtils.issueTypeError)
}
def checkFinitary(classinfo: ClassInfoType) {
val clazz = classinfo.typeSymbol
for (tparam <- clazz.typeParams) {
if (classinfo.expansiveRefs(tparam) contains tparam) {
val newinfo = ClassInfoType(
classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefTpe))),
classinfo.decls,
clazz)
updatePolyClassInfo(clazz, newinfo)
FinitaryError(tparam)
}
}
}
private def updatePolyClassInfo(clazz: Symbol, newinfo: ClassInfoType): clazz.type = {
clazz.setInfo {
clazz.info match {
case PolyType(tparams, _) => PolyType(tparams, newinfo)
case _ => newinfo
}
}
}
def typedClassDef(cdef: ClassDef): Tree = {
val clazz = cdef.symbol
val typedMods = typedModifiers(cdef.mods)
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
if (!clazz.owner.isPackageClass)
context.error(clazz.pos, "inner classes cannot be classfile annotations")
// Ignore @SerialVersionUID, because it is special-cased and handled completely differently.
// It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement
// of constant argument values "for free". Related to SI-7041.
else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit,
"""|subclassing Classfile does not
|make your annotation visible at runtime. If that is what
|you want, you must write the annotation class in Java.""".stripMargin)
}
warnTypeParameterShadow(tparams1, clazz)
if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
val m = companionSymbolOf(clazz, context)
if (m != NoSymbol)
m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List()))
}
}
treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2)
.setType(NoType)
}
def typedModuleDef(mdef: ModuleDef): Tree = {
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
val linkedClass = companionSymbolOf(mdef.symbol, context)
if (linkedClass != NoSymbol)
linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize)
val clazz = mdef.symbol.moduleClass
val typedMods = typedModifiers(mdef.mods)
assert(clazz != NoSymbol, mdef)
val noSerializable = (
(linkedClass eq NoSymbol)
|| linkedClass.isErroneous
|| !linkedClass.isSerializable
|| clazz.isSerializable
)
val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
typedParentTypes(mdef.impl) ++ (
if (noSerializable) Nil
else {
clazz.makeSerializable()
List(TypeTree(SerializableTpe) setPos clazz.pos.focus)
}
)
})
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (settings.isScala211 && mdef.symbol == PredefModule)
ensurePredefParentsAreInSameSourceFile(impl2)
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
private def ensurePredefParentsAreInSameSourceFile(template: Template) = {
val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass)
if (parentSyms exists (_.associatedFile != PredefModule.associatedFile))
context.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
* ...but it turns out it's also the ideal spot for namer/typer coordination for
* the tricky method synthesis scenarios, so we'll make it that.
*/
protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = {
addSyntheticMethods(templ, clazz, context)
}
/** For flatMapping a list of trees when you want the DocDefs and Annotated
* to be transparent.
*/
def rewrappingWrapperTrees(f: Tree => List[Tree]): Tree => List[Tree] = {
case dd @ DocDef(comment, defn) => f(defn) map (stat => DocDef(comment, stat) setPos dd.pos)
case Annotated(annot, defn) => f(defn) map (stat => Annotated(annot, stat))
case tree => f(tree)
}
protected def enterSyms(txt: Context, trees: List[Tree]) = {
var txt0 = txt
for (tree <- trees) txt0 = enterSym(txt0, tree)
}
protected def enterSym(txt: Context, tree: Tree): Context =
if (txt eq context) namer enterSym tree
else newNamer(txt) enterSym tree
/** <!-- 2 --> Check that inner classes do not inherit from Annotation
*/
def typedTemplate(templ0: Template, parents1: List[Tree]): Template = {
val templ = templ0
// please FIXME: uncommenting this line breaks everything
// val templ = treeCopy.Template(templ0, templ0.body, templ0.self, templ0.parents)
val clazz = context.owner
val parentTypes = parents1.map(_.tpe)
// The parents may have been normalized by typedParentTypes.
// We must update the info as well, or we won't find the super constructor for our now-first parent class
// Consider `class C ; trait T extends C ; trait U extends T`
// `U`'s info will start with parent `T`, but `typedParentTypes` will return `List(C, T)` (`== parents1`)
// now, the super call in the primary ctor will fail to find `C`'s ctor, since it bases its search on
// `U`'s info, not the trees.
//
// For correctness and performance, we restrict this rewrite to anonymous classes,
// as others have their parents in order already (it seems!), and we certainly
// don't want to accidentally rewire superclasses for e.g. the primitive value classes.
//
// TODO: Find an example of a named class needing this rewrite, I tried but couldn't find one.
if (clazz.isAnonymousClass && clazz.info.parents != parentTypes) {
// println(s"updating parents of $clazz from ${clazz.info.parents} to $parentTypes")
updatePolyClassInfo(clazz, ClassInfoType(parentTypes, clazz.info.decls, clazz))
}
clazz.annotations.map(_.completeInfo())
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = (templ.self: @unchecked) match {
case vd @ ValDef(_, _, tpt, EmptyTree) =>
val tpt1 = checkNoEscaping.privates(
clazz.thisSym,
treeCopy.TypeTree(tpt).setOriginal(tpt) setType vd.symbol.tpe
)
copyValDef(vd)(tpt = tpt1, rhs = EmptyTree) setType NoType
}
// was:
// val tpt1 = checkNoEscaping.privates(clazz.thisSym, typedType(tpt))
// treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
// but this leads to cycles for existential self types ==> #2545
if (self1.name != nme.WILDCARD)
context.scope enter self1.symbol
val selfType = (
if (clazz.isAnonymousClass && !phase.erasedTypes)
intersectionType(clazz.info.parents, clazz.owner)
else
clazz.typeOfThis
)
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
val body1 = pluginsEnterStats(this, templ.body)
enterSyms(context.outer.make(templ, clazz, clazz.info.decls), body1)
if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
validateParentClasses(parents1, selfType)
if (clazz.isCase)
validateNoCaseAncestor(clazz)
if (clazz.isTrait && hasSuperArgs(parents1.head))
ConstrArgsInParentOfTraitError(parents1.head, clazz)
if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel)
context.error(clazz.pos, "inner classes cannot be classfile annotations")
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
val bodyWithPrimaryCtor = {
val primaryCtor = treeInfo.firstConstructor(body1)
val primaryCtor1 = primaryCtor match {
case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
val argss = superArgs(parents1.head) getOrElse Nil
val pos = wrappingPos(parents1.head.pos, primaryCtor :: argss.flatten).makeTransparent
val superCall = atPos(pos)(PrimarySuperCall(argss))
deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
case _ => primaryCtor
}
body1 mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
}
val body3 = typedStats(bodyWithPrimaryCtor, templ.symbol)
if (clazz.info.firstParent.typeSymbol == AnyValClass)
validateDerivedValueClass(clazz, body3)
if (clazz.isTrait) {
for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
context.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.")
}
}
treeCopy.Template(templ, parents1, self1, body3) setType clazz.tpe_*
}
/** Remove definition annotations from modifiers (they have been saved
* into the symbol's `annotations` in the type completer / namer)
*
* However reification does need annotation definitions to proceed.
* Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
* The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
* that involve locally defined annotations. See more about that in Reifiers.scala.
*
* That's why the original tree gets saved into `original` field of AnnotationInfo (happens elsewhere).
* The field doesn't get pickled/unpickled and exists only during a single compilation run.
* This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
def typedModifiers(mods: Modifiers): Modifiers =
mods.copy(annotations = Nil) setPositions mods.positions
def typedValDef(vdef: ValDef): ValDef = {
val sym = vdef.symbol
val valDefTyper = {
val maybeConstrCtx =
if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
else context
newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
}
valDefTyper.typedValDefImpl(vdef)
}
// use typedValDef instead. this version is called after creating a new context for the ValDef
private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
val typedMods = typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo())
val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
// allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to
// the field that's mixed into a subclass
if (sym.hasAnnotation(definitions.VolatileAttr) && !((sym hasFlag MUTABLE) || (sym hasFlag ACCESSOR) && sym.owner.isTrait))
VolatileValueError(vdef)
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper)
LocalVarUninitializedError(vdef)
vdef.rhs
} else {
val tpt2 = if (sym.hasDefault) {
// When typechecking default parameter, replace all type parameters in the expected type by Wildcard.
// This allows defining "def foo[T](a: T = 1)"
val tparams = sym.owner.skipConstructor.info.typeParams
val subst = new SubstTypeMap(tparams, tparams map (_ => WildcardType)) {
override def matches(sym: Symbol, sym1: Symbol) =
if (sym.isSkolem) matches(sym.deSkolemize, sym1)
else if (sym1.isSkolem) matches(sym, sym1.deSkolemize)
else super.matches(sym, sym1)
}
// allow defaults on by-name parameters
if (sym hasFlag BYNAMEPARAM)
if (tpt1.tpe.typeArgs.isEmpty) WildcardType // during erasure tpt1 is Function0
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
/** Enter all aliases of local parameter accessors.
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
val pending = ListBuffer[AbsTypeError]()
// !!! This method is redundant with other, less buggy ones.
def decompose(call: Tree): (Tree, List[Tree]) = call match {
case _ if call.isErrorTyped => // e.g. SI-7636
(call, Nil)
case Apply(fn, args) =>
// an object cannot be allowed to pass a reference to itself to a superconstructor
// because of initialization issues; SI-473, SI-3913, SI-6928.
foreachSubTreeBoundTo(args, clazz) { tree =>
if (tree.symbol.isModule)
pending += SuperConstrReferenceError(tree)
tree match {
case This(qual) =>
pending += SuperConstrArgsThisReferenceError(tree)
case _ => ()
}
}
val (superConstr, preArgs) = decompose(fn)
val params = fn.tpe.params
// appending a dummy tree to represent Nil for an empty varargs (is this really necessary?)
val applyArgs = if (args.length < params.length) args :+ EmptyTree else args take params.length
assert(sameLength(applyArgs, params) || call.isErrorTyped,
s"arity mismatch but call is not error typed: $clazz (params=$params, args=$applyArgs)")
(superConstr, preArgs ::: applyArgs)
case Block(_ :+ superCall, _) =>
decompose(superCall)
case _ =>
(call, Nil)
}
// associate superclass paramaccessors with their aliases
val (superConstr, superArgs) = decompose(rhs)
if (superConstr.symbol.isPrimaryConstructor) {
val superClazz = superConstr.symbol.owner
if (!superClazz.isJavaDefined) {
val superParamAccessors = superClazz.constrParamAccessors
if (sameLength(superParamAccessors, superArgs)) {
for ((superAcc, superArg@Ident(name)) <- superParamAccessors zip superArgs) {
if (mexists(vparamss)(_.symbol == superArg.symbol)) {
val alias = (
superAcc.initialize.alias
orElse (superAcc getterIn superAcc.owner)
filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
)
if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
case acc => acc
}
ownAcc match {
case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) =>
debuglog(s"$acc has alias ${alias.fullLocationString}")
acc setAlias alias
case _ =>
}
}
}
}
}
}
}
pending.foreach(ErrorUtils.issueTypeError)
}
// Check for SI-4842.
private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol) {
val pending = ListBuffer[AbsTypeError]()
ddef.rhs match {
case Block(stats, expr) =>
val selfConstructorCall = stats.headOption.getOrElse(expr)
foreachSubTreeBoundTo(List(selfConstructorCall), clazz) {
case tree @ This(qual) =>
pending += SelfConstrArgsThisReferenceError(tree)
case _ => ()
}
case _ =>
}
pending.foreach(ErrorUtils.issueTypeError)
}
/**
* Run the provided function for each sub tree of `trees` that
* are bound to a symbol with `clazz` as a base class.
*
* @param f This function can assume that `tree.symbol` is non null
*/
private def foreachSubTreeBoundTo[A](trees: List[Tree], clazz: Symbol)(f: Tree => Unit): Unit =
for {
tree <- trees
subTree <- tree
} {
val sym = subTree.symbol
if (sym != null && sym.info.baseClasses.contains(clazz))
f(subTree)
}
/** Check if a structurally defined method violates implementation restrictions.
* A method cannot be called if it is a non-private member of a refinement type
* and if its parameter's types are any of:
* - the self-type of the refinement
* - a type member of the refinement
* - an abstract type declared outside of the refinement.
* - an instance of a value class
* Furthermore, the result type may not be a value class either
*/
def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
val meth = ddef.symbol
def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match {
case Nil => ""
case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
}
def fail(pos: Position, msg: String): Boolean = {
context.error(pos, msg)
false
}
/* Have to examine all parameters in all lists.
*/
def paramssTypes(tp: Type): List[List[Type]] = tp match {
case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
case PolyType(_, restpe) => paramssTypes(restpe)
case _ => Nil
}
def resultType = meth.tpe_*.finalResultType
def nthParamPos(n1: Int, n2: Int) =
try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
def failStruct(pos: Position, what: String, where: String = "Parameter type") =
fail(pos, s"$where in structural refinement may not refer to $what")
foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) =>
foreachWithIndex(paramList) { (paramType, paramIdx) =>
val sym = paramType.typeSymbol
def paramPos = nthParamPos(listIdx, paramIdx)
/* Not enough to look for abstract types; have to recursively check the bounds
* of each abstract type for more abstract types. Almost certainly there are other
* exploitable type soundness bugs which can be seen by bounding a type parameter
* by an abstract type which itself is bounded by an abstract type.
*/
def checkAbstract(tp0: Type, what: String): Boolean = {
def check(sym: Symbol): Boolean = !sym.isAbstractType || {
log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""")
( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what))
|| (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what))
|| checkAbstract(sym.info.bounds.hi, "Type bound")
)
}
tp0.dealiasWidenChain forall (t => check(t.typeSymbol))
}
checkAbstract(paramType, "Parameter type")
if (sym.isDerivedValueClass)
failStruct(paramPos, "a user-defined value class")
if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
failStruct(paramPos, "the type of that refinement (self type)")
}
}
if (resultType.typeSymbol.isDerivedValueClass)
failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
def typedDefDef(ddef: DefDef): DefDef = {
// an accessor's type completer may mutate a type inside `ddef` (`== context.unit.synthetics(ddef.symbol)`)
// concretely: it sets the setter's parameter type or the getter's return type (when derived from a valdef with empty tpt)
val meth = ddef.symbol.initialize
reenterTypeParams(ddef.tparams)
reenterValueParams(ddef.vparamss)
// for `val` and `var` parameter, look at `target` meta-annotation
if (!isPastTyper && meth.isPrimaryConstructor) {
for (vparams <- ddef.vparamss; vd <- vparams) {
if (vd.mods.isParamAccessor) {
vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true))
}
}
}
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
warnTypeParameterShadow(tparams1, meth)
meth.annotations.map(_.completeInfo())
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
StarParamNotLastError(vparam1)
val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
val typedMods = typedModifiers(ddef.mods)
var rhs1 =
if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors
if (!meth.isPrimaryConstructor &&
(!meth.owner.isClass ||
meth.owner.isModuleClass ||
meth.owner.isAnonOrRefinementClass))
InvalidConstructorDefError(ddef)
typed(ddef.rhs)
} else if (meth.isMacro) {
// typechecking macro bodies is sort of unconventional
// that's why we employ our custom typing scheme orchestrated outside of the typer
transformedOr(ddef.rhs, typedMacroBody(this, ddef))
} else {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) {
// There are no supercalls for AnyVal or constructors from Java sources, which
// would blow up in computeParamAliases; there's nothing to be computed for them
// anyway.
if (meth.isPrimaryConstructor)
computeParamAliases(meth.owner, vparamss1, rhs1)
else
checkSelfConstructorArgs(ddef, meth.owner)
}
if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass)
rhs1 = checkDead(rhs1)
if (!isPastTyper && meth.owner.isClass &&
meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe)))
StarWithDefaultError(meth)
if (!isPastTyper) {
val allParams = meth.paramss.flatten
for (p <- allParams) {
for (n <- p.deprecatedParamName) {
if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n))))
DeprecatedParamNameError(p, n)
}
}
if (meth.isStructuralRefinementMember)
checkMethodStructuralCompatible(ddef)
if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
case List(param) :: _ if !param.isImplicit =>
checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
case _ =>
}
}
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
}
def typedTypeDef(tdef: TypeDef): TypeDef =
typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
_.typedTypeDefImpl(tdef)
}
// use typedTypeDef instead. this version is called after creating a new context for the TypeDef
private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
val typedMods = typedModifiers(tdef.mods)
tdef.symbol.annotations.map(_.completeInfo())
warnTypeParameterShadow(tparams1, tdef.symbol)
// @specialized should not be pickled when compiling with -no-specialize
if (settings.nospecialization && currentRun.compiles(tdef.symbol)) {
tdef.symbol.removeAnnotation(definitions.SpecializedClass)
tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass)
}
val rhs1 = checkNoEscaping.privates(tdef.symbol, typedType(tdef.rhs))
checkNonCyclic(tdef.symbol)
if (tdef.symbol.owner.isType)
rhs1.tpe match {
case TypeBounds(lo1, hi1) if (!(lo1 <:< hi1)) => LowerBoundError(tdef, lo1, hi1)
case _ => ()
}
if (tdef.symbol.isDeferred && tdef.symbol.info.isHigherKinded)
checkFeature(tdef.pos, HigherKindsFeature)
treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType
}
private def enterLabelDef(stat: Tree) {
stat match {
case ldef @ LabelDef(_, _, _) =>
if (ldef.symbol == NoSymbol)
ldef.symbol = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitTpe))
case _ =>
}
}
def typedLabelDef(ldef: LabelDef): LabelDef = {
if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) {
val restpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs, restpe)
ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs1) setType restpe
}
else {
val initpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs)
val restpe = rhs1.tpe
if (restpe == initpe) { // stable result, no need to check again
ldef.params foreach (param => param setType param.symbol.tpe)
treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
} else {
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val LabelDef(_, _, rhs1) = resetAttrs(ldef)
val rhs2 = typed(brutallyResetAttrs(rhs1), restpe)
ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
}
}
def typedBlock(block0: Block, mode: Mode, pt: Type): Block = {
val syntheticPrivates = new ListBuffer[Symbol]
try {
namer.enterSyms(block0.stats)
val block = treeCopy.Block(block0, pluginsEnterStats(this, block0.stats), block0.expr)
for (stat <- block.stats) enterLabelDef(stat)
if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
// This is very tricky stuff, because we are navigating the Skylla and Charybdis of
// anonymous classes and what to return from them here. On the one hand, we cannot admit
// every non-private member of an anonymous class as a part of the structural type of the
// enclosing block. This runs afoul of the restriction that a structural type may not
// refer to an enclosing type parameter or abstract types (which in turn is necessitated
// by what can be done in Java reflection). On the other hand, making every term member
// private conflicts with private escape checking - see ticket #3174 for an example.
//
// The cleanest way forward is if we would find a way to suppress structural type checking
// for these members and maybe defer type errors to the places where members are called.
// But that would be a big refactoring and also a big departure from existing code. The
// probably safest fix for 2.8 is to keep members of an anonymous class that are not
// mentioned in a parent type private (as before) but to disable escape checking for code
// that's in the same anonymous class. That's what's done here.
//
// We really should go back and think hard whether we find a better way to address the
// problem of escaping idents on the one hand and well-formed structural types on the
// other.
block match {
case Block(List(classDef @ ClassDef(_, _, _, _)), Apply(Select(New(_), _), _)) =>
val classDecls = classDef.symbol.info.decls
val visibleMembers = pt match {
case WildcardType => classDecls.toList
case BoundedWildcardType(TypeBounds(lo, _)) => lo.members
case _ => pt.members
}
def matchesVisibleMember(member: Symbol) = visibleMembers exists { vis =>
(member.name == vis.name) &&
(member.tpe <:< vis.tpe.substThis(vis.owner, classDef.symbol))
}
// The block is an anonymous class definitions/instantiation pair
// -> members that are hidden by the type of the block are made private
classDecls foreach { toHide =>
if (toHide.isTerm
&& toHide.isPossibleInRefinement
&& toHide.isPublic
&& !matchesVisibleMember(toHide)) {
(toHide
resetFlag (PROTECTED | LOCAL)
setFlag (PRIVATE | SYNTHETIC_PRIVATE)
setPrivateWithin NoSymbol)
syntheticPrivates += toHide
}
}
case _ =>
}
}
val statsTyped = typedStats(block.stats, context.owner, warnPure = false)
val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt)
// sanity check block for unintended expr placement
if (!isPastTyper) {
val (count, result0, adapted) =
expr1 match {
case Block(expr :: Nil, Literal(Constant(()))) => (1, expr, true)
case Literal(Constant(())) => (0, EmptyTree, false)
case _ => (1, EmptyTree, false)
}
def checkPure(t: Tree, supple: Boolean): Unit =
if (treeInfo.isPureExprForWarningPurposes(t)) {
val msg = "a pure expression does nothing in statement position"
val parens = if (statsTyped.length + count > 1) "multiline expressions might require enclosing parentheses" else ""
val discard = if (adapted) "; a value can be silently discarded when Unit is expected" else ""
val text =
if (supple) s"${parens}${discard}"
else if (!parens.isEmpty) s"${msg}; ${parens}" else msg
context.warning(t.pos, text)
}
statsTyped.foreach(checkPure(_, supple = false))
if (result0.nonEmpty) checkPure(result0, supple = true)
}
treeCopy.Block(block, statsTyped, expr1)
.setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
} finally {
// enable escaping privates checking from the outside and recycle
// transient flag
syntheticPrivates foreach (_ resetFlag SYNTHETIC_PRIVATE)
}
}
def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
// verify no _* except in last position
for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
StarPositionInPatternError(x)
// withoutAnnotations - see continuations-run/z1673.scala
// This adjustment is awfully specific to continuations, but AFAICS the
// whole AnnotationChecker framework is.
val pat1 = typedPattern(cdef.pat, pattpe.withoutAnnotations)
// When case classes have more than two parameter lists, the pattern ends
// up typed as a method. We only pattern match on the first parameter
// list, so substitute the final result type of the method, i.e. the type
// of the case class.
if (pat1.tpe.paramSectionCount > 0)
pat1 modifyType (_.finalResultType)
for (bind @ Bind(name, _) <- cdef.pat) {
val sym = bind.symbol
if (name.toTermName != nme.WILDCARD && sym != null) {
if (sym == NoSymbol) {
if (context.scope.lookup(name) == NoSymbol)
namer.enterInScope(context.owner.newErrorSymbol(name))
} else
namer.enterIfNotThere(sym)
}
}
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanTpe)
var body1: Tree = typed(cdef.body, pt)
if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) {
body1 modifyType context.enclosingCaseDef.restoreTypeBounds
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
log(s"Adding cast to pattern because ${body1.tpe} does not conform to expected type $pt")
body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
}
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
def packedTypes(trees: List[Tree]): List[Type] = trees map (c => packedType(c, context.owner).deconst)
// takes untyped sub-trees of a match and type checks them
def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = {
val selector1 = checkDead(typedByValueExpr(selector))
val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
val casesTyped = typedCases(cases, selectorTp, pt)
def finish(cases: List[CaseDef], matchType: Type) =
treeCopy.Match(tree, selector1, cases) setType matchType
if (isFullyDefined(pt))
finish(casesTyped, pt)
else packedTypes(casesTyped) match {
case packed if sameWeakLubAsLub(packed) => finish(casesTyped, lub(packed))
case packed =>
val lub = weakLub(packed)
finish(casesTyped map (adaptCase(_, mode, lub)), lub)
}
}
// match has been typed -- virtualize it during type checking so the full context is available
def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = {
import patmat.{ vpmName, PureMatchTranslator }
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
if (!(settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
else
match_ // will be translated in phase `patmat`
}
/** synthesize and type check a PartialFunction implementation based on the match in `tree`
*
* `param => sel match { cases }` becomes:
*
* new AbstractPartialFunction[$argTp, $matchResTp] {
* def applyOrElse[A1 <: $argTp, B1 >: $matchResTp]($param: A1, default: A1 => B1): B1 =
* $selector match { $cases }
* def isDefinedAt(x: $argTp): Boolean =
* $selector match { $casesTrue }
* }
*
* TODO: it would be nicer to generate the tree specified above at once and type it as a whole,
* there are two gotchas:
* - matchResTp may not be known until we've typed the match (can only use resTp when it's fully defined),
* - if we typed the match in isolation first, you'd know its result type, but would have to re-jig the owner structure
* - could we use a type variable for matchResTp and backpatch it?
* - occurrences of `this` in `cases` or `sel` must resolve to the this of the class originally enclosing the match,
* not of the anonymous partial function subclass
*
* an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
* however, note that pattern matching codegen is designed to run *before* uncurry
*/
def synthesizePartialFunction(paramName: TermName, paramPos: Position, paramSynthetic: Boolean,
tree: Tree, mode: Mode, pt: Type): Tree = {
assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
val targs = pt.dealiasWiden.typeArgs
// if targs.head isn't fully defined, we can't translate --> error
targs match {
case argTp :: _ if isFullyDefined(argTp) => // ok
case _ => // uh-oh
MissingParameterTypeAnonMatchError(tree, pt)
return setError(tree)
}
// NOTE: resTp still might not be fully defined
val argTp :: resTp :: Nil = targs
// targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
val targsValidParams = targs forall (_ <:< AnyTpe)
val anonClass = context.owner newAnonymousFunctionClass tree.pos addAnnotation SerialVersionUIDAnnotation
import CODE._
val Match(sel, cases) = tree
// need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef])
// must generate a new tree every time
def selector(paramSym: Symbol): Tree = gen.mkUnchecked(
if (sel != EmptyTree) sel.duplicate
else atPos(tree.pos.focusStart)(
// SI-6925: subsume type of the selector to `argTp`
// we don't want/need the match to see the `A1` type that we must use for variance reasons in the method signature
//
// this failed: replace `selector` by `Typed(selector, TypeTree(argTp))` -- as it's an upcast, this should never fail,
// `(x: A1): A` doesn't always type check, even though `A1 <: A`, due to singleton types (test/files/pos/t4269.scala)
// hence the cast, which will be erased in posterasure
// (the cast originally caused extremely weird types to show up
// in test/scaladoc/run/SI-5933.scala because `variantToSkolem` was missing `tpSym.initialize`)
gen.mkCastPreservingAnnotations(Ident(paramSym), argTp)
))
def mkParam(methodSym: Symbol, tp: Type = argTp) =
methodSym.newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp
def mkDefaultCase(body: Tree) =
atPos(tree.pos.makeTransparent) {
CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), body)
}
// `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 =
// ${`$selector match { $cases; case default$ => default(x) }`
def applyOrElseMethodDef = {
val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE)
// create the parameter that corresponds to the function's parameter
val A1 = methodSym newTypeParameter (newTypeName("A1")) setInfo TypeBounds.upper(argTp)
val x = mkParam(methodSym, A1.tpe)
// applyOrElse's default parameter:
val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty
val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
val paramSyms = List(x, default)
methodSym setInfo genPolyType(List(A1, B1), MethodType(paramSyms, B1.tpe))
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
if (!paramSynthetic) methodBodyTyper.context.scope enter x
// First, type without the default case; only the cases provided
// by the user are typed. The LUB of these becomes `B`, the lower
// bound of `B1`, which in turn is the result type of the default
// case
val match0 = methodBodyTyper.typedMatch(selector(x), cases, mode, resTp)
val matchResTp = match0.tpe
B1 setInfo TypeBounds.lower(matchResTp) // patch info
// the default uses applyOrElse's first parameter since the scrut's type has been widened
val match_ = {
val cdef = mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe))
val List(defaultCase) = methodBodyTyper.typedCases(List(cdef), argTp, B1.tpe)
treeCopy.Match(match0, match0.selector, match0.cases :+ defaultCase)
}
match_ setType B1.tpe
// SI-6187 Do you really want to know? Okay, here's what's going on here.
//
// Well behaved trees satisfy the property:
//
// typed(tree) == typed(resetAttrs(typed(tree))
//
// Trees constructed without low-level symbol manipulation get this for free;
// references to local symbols are cleared by `ResetAttrs`, but bind to the
// corresponding symbol in the re-typechecked tree. But PartialFunction synthesis
// doesn't play by these rules.
//
// During typechecking of method bodies, references to method type parameter from
// the declared types of the value parameters should bind to a fresh set of skolems,
// which have been entered into scope by `Namer#methodSig`. A comment therein:
//
// "since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams"
//
// But, if we retypecheck the reset `applyOrElse`, the TypeTree of the `default`
// parameter contains no type. Somehow (where?!) it recovers a type that is _almost_ okay:
// `A1 => B1`. But it should really be `A1&0 => B1&0`. In the test, run/t6187.scala, this
// difference results in a type error, as `default.apply(x)` types as `B1`, which doesn't
// conform to the required `B1&0`
//
// I see three courses of action.
//
// 1) synthesize a `asInstanceOf[B1]` below (I tried this first. But... ewwww.)
// 2) install an 'original' TypeTree that will used after ResetAttrs (the solution below)
// 3) Figure out how the almost-correct type is recovered on re-typechecking, and
// substitute in the skolems.
//
// For 2.11, we'll probably shift this transformation back a phase or two, so macros
// won't be affected. But in any case, we should satisfy retypecheckability.
//
val originals: Map[Symbol, Tree] = {
def typedIdent(sym: Symbol) = methodBodyTyper.typedType(Ident(sym), mode)
val A1Tpt = typedIdent(A1)
val B1Tpt = typedIdent(B1)
Map(
x -> A1Tpt,
default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
)
}
def newParam(param: Symbol): ValDef = {
val vd = ValDef(param, EmptyTree)
val tt @ TypeTree() = vd.tpt
tt setOriginal (originals(param) setPos param.pos.focus)
vd
}
val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
val defdef = newDefDef(methodSym, rhs)(vparamss = mapParamss(methodSym)(newParam), tpt = TypeTree(B1.tpe))
(defdef, matchResTp)
}
// `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue; case default$ => false } }`
def isDefinedAtMethod = {
val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL)
val paramSym = mkParam(methodSym)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym
methodSym setInfo MethodType(List(paramSym), BooleanTpe)
val defaultCase = mkDefaultCase(FALSE)
val match_ = methodBodyTyper.typedMatch(selector(paramSym), casesTrue :+ defaultCase, mode, BooleanTpe)
DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe))
}
// only used for @cps annotated partial functions
// `def apply(x: $argTp): $matchResTp = $selector match { $cases }`
def applyMethod = {
val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
val paramSym = mkParam(methodSym)
methodSym setInfo MethodType(List(paramSym), AnyTpe)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym
val match_ = methodBodyTyper.typedMatch(selector(paramSym), cases, mode, resTp)
val matchResTp = match_.tpe
methodSym setInfo MethodType(List(paramSym), matchResTp) // patch info
(DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, matchResTp)), matchResTp)
}
def parents(resTp: Type) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, List(argTp, resTp)))
val members = {
val (applyMeth, matchResTp) = {
// rig the show so we can get started typing the method body -- later we'll correct the infos...
// targs were type arguments for PartialFunction, so we know they will work for AbstractPartialFunction as well
anonClass setInfo ClassInfoType(parents(resTp), newScope, anonClass)
// somehow @cps annotations upset the typer when looking at applyOrElse's signature, but not apply's
// TODO: figure out the details (T @cps[U] is not a subtype of Any, but then why does it work for the apply method?)
if (targsValidParams) applyOrElseMethodDef
else applyMethod
}
// patch info to the class's definitive info
anonClass setInfo ClassInfoType(parents(matchResTp), newScope, anonClass)
List(applyMeth, isDefinedAtMethod)
}
members foreach (m => anonClass.info.decls enter m.symbol)
val typedBlock = typedPos(tree.pos, mode, pt) {
Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
))
}
if (typedBlock.isErrorTyped) typedBlock
else // Don't leak implementation details into the type, see SI-6575
typedPos(tree.pos, mode, pt) {
Typed(typedBlock, TypeTree(typedBlock.tpe baseType PartialFunctionClass))
}
}
/** Synthesize and type check the implementation of a type with a Single Abstract Method.
*
* Based on a type checked Function node `{ (p1: T1, ..., pN: TN) => body } : S`
* where `S` is the expected type that defines a single abstract method (call it `apply` for the example),
* that has signature `(p1: T1', ..., pN: TN'): T'`, synthesize the instantiation of the following anonymous class
*
* ```
* new S {
* def apply$body(p1: T1, ..., pN: TN): T = body
* def apply(p1: T1', ..., pN: TN'): T' = apply$body(p1,..., pN)
* }
* ```
*
* The `apply` method is identified by the argument `sam`; `S` corresponds to the argument `pt`,
* If `pt` is not fully defined, we derive `samClassTpFullyDefined` by inferring any unknown type parameters.
*
* The types T1' ... TN' and T' are derived from the method signature of the sam method,
* as seen from the fully defined `samClassTpFullyDefined`.
*
* The function's body is put in a (static) method in the class definition to enforce scoping.
* S's members should not be in scope in `body`. (Putting it in the block outside the class runs into implementation problems described below)
*
* The restriction on implicit arguments (neither S's constructor, nor sam may take an implicit argument list),
* is to keep the implementation of type inference (the computation of `samClassTpFullyDefined`) simple.
*
* Impl notes:
* - `fun` has a FunctionType, but the expected type `pt` is some SAM type -- let's remedy that
* - `fun` is fully attributed, so we'll have to wrangle some symbols into shape (owner change, vparam syms)
* - after experimentation, it works best to type check function literals fully first and then adapt to a sam type,
* as opposed to a sam-specific code paths earlier on in type checking (in typedFunction).
* For one, we want to emit the same bytecode regardless of whether the expected
* function type is a built-in FunctionN or some SAM type
*
*/
def inferSamType(fun: Tree, pt: Type, mode: Mode): SAMFunction = {
val sam =
if (fun.isInstanceOf[Function] && !isFunctionType(pt)) {
// TODO: can we ensure there's always a SAMFunction attachment, instead of looking up the sam again???
// seems like overloading complicates things?
val sam = samOf(pt)
if (samMatchesFunctionBasedOnArity(sam, fun.asInstanceOf[Function].vparams)) sam
else NoSymbol
} else NoSymbol
def fullyDefinedMeetsExpectedFunTp(pt: Type): Boolean = isFullyDefined(pt) && {
val samMethType = pt memberInfo sam
fun.tpe <:< functionType(samMethType.paramTypes, samMethType.resultType)
}
SAMFunction(
if (!sam.exists) NoType
else if (fullyDefinedMeetsExpectedFunTp(pt)) pt
else try {
val samClassSym = pt.typeSymbol
// we're trying to fully define the type arguments for this type constructor
val samTyCon = samClassSym.typeConstructor
// the unknowns
val tparams = samClassSym.typeParams
// ... as typevars
val tvars = tparams map freshVar
val ptVars = appliedType(samTyCon, tvars)
// carry over info from pt
ptVars <:< pt
val samInfoWithTVars = ptVars.memberInfo(sam)
// use function type subtyping, not method type subtyping (the latter is invariant in argument types)
fun.tpe <:< functionType(samInfoWithTVars.paramTypes, samInfoWithTVars.finalResultType)
val variances = tparams map varianceInType(sam.info)
// solve constraints tracked by tvars
val targs = solvedTypes(tvars, tparams, variances, upper = false, lubDepth(sam.info :: Nil))
debuglog(s"sam infer: $pt --> ${appliedType(samTyCon, targs)} by ${fun.tpe} <:< $samInfoWithTVars --> $targs for $tparams")
val ptFullyDefined = appliedType(samTyCon, targs)
if (ptFullyDefined <:< pt && fullyDefinedMeetsExpectedFunTp(ptFullyDefined)) {
debuglog(s"sam fully defined expected type: $ptFullyDefined from $pt for ${fun.tpe}")
ptFullyDefined
} else {
debuglog(s"Could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)")
NoType
}
} catch {
case e@(_: NoInstance | _: TypeError) =>
debuglog(s"Error during SAM synthesis: could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)\\n$e")
NoType
}, sam)
}
/** Type check a function literal.
*
* Based on the expected type pt, potentially synthesize an instance of
* - PartialFunction,
* - a type with a Single Abstract Method (under -Xexperimental for now).
*/
private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = {
val vparams = fun.vparams
val numVparams = vparams.length
val FunctionSymbol =
if (numVparams > definitions.MaxFunctionArity) NoSymbol
else FunctionClass(numVparams)
val ptSym = pt.typeSymbol
/* The Single Abstract Member of pt, unless pt is the built-in function type of the expected arity,
* as `(a => a): Int => Int` should not (yet) get the sam treatment.
*/
val sam =
if (ptSym == NoSymbol || ptSym == FunctionSymbol || ptSym == PartialFunctionClass) NoSymbol
else samOf(pt)
/* The SAM case comes first so that this works:
* abstract class MyFun extends (Int => Int)
* (a => a): MyFun
*
* Note that the arity of the sam must correspond to the arity of the function.
* TODO: handle vararg sams?
*/
val ptNorm =
if (samMatchesFunctionBasedOnArity(sam, vparams)) samToFunctionType(pt, sam)
else pt
val (argpts, respt) =
ptNorm baseType FunctionSymbol match {
case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
case _ => (vparams map (if (pt == ErrorType) (_ => ErrorType) else (_ => NoType)), WildcardType)
}
if (!FunctionSymbol.exists) MaxFunctionArityError(fun)
else if (argpts.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argpts)
else {
val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22
// first, try to define param types from expected function's arg types if needed
foreach2(vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
if (isFullyDefined(argpt)) vparam.tpt setType argpt
else paramsMissingType += vparam
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
}
}
// If we're typing `(a1: T1, ..., aN: TN) => m(a1,..., aN)`, where some Ti are not fully defined,
// type `m` directly (undoing eta-expansion of method m) to determine the argument types.
// This tree is the result from one of:
// - manual eta-expansion with named arguments (x => f(x));
// - wildcard-style eta expansion (`m(_, _,)`);
// - instantiateToMethodType adapting a tree of method type to a function type using etaExpand.
//
// Note that method values are a separate thing (`m _`): they have the idiosyncratic shape
// of `Typed(expr, Function(Nil, EmptyTree))`
val ptUnrollingEtaExpansion =
if (paramsMissingType.nonEmpty && pt != ErrorType) fun.body match {
// we can compare arguments and parameters by name because there cannot be a binder between
// the function's valdefs and the Apply's arguments
case Apply(meth, args) if (vparams corresponds args) { case (p, Ident(name)) => p.name == name case _ => false } =>
// We're looking for a method (as indicated by FUNmode in the silent typed below),
// so let's make sure our expected type is a MethodType
val methArgs = NoSymbol.newSyntheticValueParams(argpts map { case NoType => WildcardType case tp => tp })
silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, respt))) filter (isMonoContext) map { methTyped =>
// if context.undetparams is not empty, the method was polymorphic,
// so we need the missing arguments to infer its type. See #871
val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams)
// println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt")
// If we are sure this function type provides all the necesarry info, so that we won't have
// any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`)
// and rest assured we won't end up right back here (and keep recursing)
if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) funPt
else null
} orElse { _ => null }
case _ => null
} else null
if (ptUnrollingEtaExpansion ne null) typedFunction(fun, mode, ptUnrollingEtaExpansion)
else {
// we ran out of things to try, missing parameter types are an irrevocable error
var issuedMissingParameterTypeError = false
paramsMissingType.foreach { vparam =>
vparam.tpt setType ErrorType
MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError)
issuedMissingParameterTypeError = true
}
fun.body match {
// translate `x => x match { <cases> }` : PartialFunction to
// `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) =>
// go to outer context -- must discard the context that was created for the Function since we're discarding the function
// thus, its symbol, which serves as the current context.owner, is not the right owner
// you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
val outerTyper = newTyper(context.outer)
val p = vparams.head
if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt)
case _ =>
val vparamSyms = vparams map { vparam =>
enterSym(context, vparam)
if (context.retyping) context.scope enter vparam.symbol
vparam.symbol
}
val vparamsTyped = vparams mapConserve typedValDef
val formals = vparamSyms map (_.tpe)
val body1 = typed(fun.body, respt)
val restpe = packedType(body1, fun.symbol).deconst.resultType
val funtpe = phasedAppliedType(FunctionSymbol, formals :+ restpe)
treeCopy.Function(fun, vparamsTyped, body1) setType funtpe
}
}
}
}
def typedRefinement(templ: Template) {
val stats = templ.body
namer.enterSyms(stats)
// need to delay rest of typedRefinement to avoid cyclic reference errors
unit.toCheck += { () =>
val stats1 = typedStats(stats, NoSymbol)
// this code kicks in only after typer, so `stats` will never be filled in time
// as a result, most of compound type trees with non-empty stats will fail to reify
// todo. investigate whether something can be done about this
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
templ updateAttachment att.copy(stats = stats1)
for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol)
stat.symbol setFlag OVERRIDE
}
}
def typedImport(imp : Import) : Import = (transformed remove imp) match {
case Some(imp1: Import) => imp1
case _ => log("unhandled import: "+imp+" in "+unit); imp
}
def typedStats(stats: List[Tree], exprOwner: Symbol, warnPure: Boolean = true): List[Tree] = {
val inBlock = exprOwner == context.owner
def includesTargetPos(tree: Tree) =
tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos)
val localTarget = stats exists includesTargetPos
def typedStat(stat: Tree): Tree = stat match {
case s if context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(s) => OnlyDeclarationsError(s)
case imp @ Import(_, _) =>
imp.symbol.initialize
if (!imp.symbol.isError) {
context = context.make(imp)
typedImport(imp)
} else EmptyTree
// skip typechecking of statements in a sequence where some other statement includes the targetposition
case s if localTarget && !includesTargetPos(s) => s
case _ =>
val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
else newTyper(context.make(stat, exprOwner))
// XXX this creates a spurious dead code warning if an exception is thrown
// in a constructor, even if it is the only thing in the constructor.
val result = checkDead(localTyper.typedByValueExpr(stat))
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
if (treeInfo.isSelfConstrCall(result)) {
if (result.symbol == exprOwner.enclMethod)
ConstructorRecursesError(stat)
else if (result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
ConstructorsOrderError(stat)
}
}
if (warnPure && !isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) {
val msg = "a pure expression does nothing in statement position"
val clause = if (stats.lengthCompare(1) > 0) "; multiline expressions may require enclosing parentheses" else ""
context.warning(stat.pos, s"${msg}${clause}")
}
result
}
// TODO: adapt to new trait field encoding, figure out why this exaemption is made
// 'accessor' and 'accessed' are so similar it becomes very difficult to
//follow the logic, so I renamed one to something distinct.
def accesses(looker: Symbol, accessed: Symbol) = accessed.isLocalToThis && (
(accessed.isParamAccessor)
|| (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
)
def checkNoDoubleDefs: Unit = {
val scope = if (inBlock) context.scope else context.owner.info.decls
var e = scope.elems
while ((e ne null) && e.owner == scope) {
var e1 = scope.lookupNextEntry(e)
while ((e1 ne null) && e1.owner == scope) {
val sym = e.sym
val sym1 = e1.sym
/** From the spec (refchecks checks other conditions regarding erasing to the same type and default arguments):
*
* A block expression [... its] statement sequence may not contain two definitions or
* declarations that bind the same name --> `inBlock`
*
* It is an error if a template directly defines two matching members.
*
* A member definition $M$ _matches_ a member definition $M'$, if $M$ and $M'$ bind the same name,
* and one of following holds:
* 1. Neither $M$ nor $M'$ is a method definition.
* 2. $M$ and $M'$ define both monomorphic methods with equivalent argument types.
* 3. $M$ defines a parameterless method and $M'$ defines a method with an empty parameter list `()` or _vice versa_.
* 4. $M$ and $M'$ define both polymorphic methods with equal number of argument types $\\overline T$, $\\overline T'$
* and equal numbers of type parameters $\\overline t$, $\\overline t'$, say,
* and $\\overline T' = [\\overline t'/\\overline t]\\overline T$.
*/
if (!(accesses(sym, sym1) || accesses(sym1, sym)) // TODO: does this purely defer errors until later?
&& (inBlock || !(sym.isMethod || sym1.isMethod) || (sym.tpe matches sym1.tpe))
// default getters are defined twice when multiple overloads have defaults.
// The error for this is deferred until RefChecks.checkDefaultsInOverloaded
&& (!sym.isErroneous && !sym1.isErroneous && !sym.hasDefault &&
!sym.hasAnnotation(BridgeClass) && !sym1.hasAnnotation(BridgeClass))) {
log("Double definition detected:\\n " +
((sym.getClass, sym.info, sym.ownerChain)) + "\\n " +
((sym1.getClass, sym1.info, sym1.ownerChain)))
DefDefinedTwiceError(sym, sym1)
scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
}
e1 = scope.lookupNextEntry(e1)
}
e = e.next
}
}
def addSynthetics(stats: List[Tree]): List[Tree] = {
val scope = if (inBlock) context.scope else context.owner.info.decls
var newStats = new ListBuffer[Tree]
var moreToAdd = true
while (moreToAdd) {
val initElems = scope.elems
// SI-5877 The decls of a package include decls of the package object. But we don't want to add
// the corresponding synthetics to the package class, only to the package object class.
def shouldAdd(sym: Symbol) =
inBlock || !context.isInPackageObject(sym, context.owner)
for (sym <- scope)
for (tree <- context.unit.synthetics get sym if shouldAdd(sym)) { // OPT: shouldAdd is usually true. Call it here, rather than in the outer loop
newStats += typedStat(tree) // might add even more synthetics to the scope
context.unit.synthetics -= sym
}
// the type completer of a synthetic might add more synthetics. example: if the
// factory method of a case class (i.e. the constructor) has a default.
moreToAdd = scope.elems ne initElems
}
if (newStats.isEmpty) stats
else {
// put default getters next to the method they belong to,
// same for companion objects. fixes #2489 and #4036.
// [Martin] This is pretty ugly. I think we could avoid
// this code by associating defaults and companion objects
// with the original tree instead of the new symbol.
def matches(stat: Tree, synt: Tree) = (stat, synt) match {
// synt is default arg for stat
case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) =>
mods.hasDefault && syntName.toString.startsWith(statName.toString)
// synt is companion module
case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) =>
className.toTermName == moduleName
// synt is implicit def for implicit class (#6278)
case (ClassDef(cmods, cname, _, _), DefDef(dmods, dname, _, _, _, _)) =>
cmods.isImplicit && dmods.isImplicit && cname.toTermName == dname
// ValDef and Accessor
case (ValDef(_, cname, _, _), DefDef(_, dname, _, _, _, _)) =>
cname.getterName == dname.getterName
case _ => false
}
def matching(stat: Tree): List[Tree] = {
val (pos, neg) = newStats.partition(synt => matches(stat, synt))
newStats = neg
pos.toList
}
(stats foldRight List[Tree]())((stat, res) => {
stat :: matching(stat) ::: res
}) ::: newStats.toList
}
}
val stats1 = stats mapConserve typedStat
if (phase.erasedTypes) stats1
else {
// As packages are open, it doesn't make sense to check double definitions here. Furthermore,
// it is expensive if the package is large. Instead, such double definitions are checked in `Namers.enterInScope`
if (!context.owner.isPackageClass)
checkNoDoubleDefs
addSynthetics(stats1)
}
}
def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = {
val typedMode = mode.onlySticky | newmode
val t = withCondConstrTyper(mode.inSccMode)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
def typedArgs(args: List[Tree], mode: Mode) =
args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
/** Does function need to be instantiated, because a missing parameter
* in an argument closure overlaps with an uninstantiated formal?
*/
def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = {
def isLowerBounded(tparam: Symbol) = !tparam.info.bounds.lo.typeSymbol.isBottomClass
exists2(formals, args) {
case (formal, Function(vparams, _)) =>
(vparams exists (_.tpt.isEmpty)) &&
vparams.length <= MaxFunctionArity &&
(formal baseType FunctionClass(vparams.length) match {
case TypeRef(_, _, formalargs) =>
( exists2(formalargs, vparams)((formal, vparam) =>
vparam.tpt.isEmpty && (tparams exists formal.contains))
&& (tparams forall isLowerBounded)
)
case _ =>
false
})
case _ =>
false
}
}
/** Is `tree` a block created by a named application?
*/
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
def callToCompanionConstr(context: Context, calledFun: Symbol) = {
calledFun.isConstructor && {
val methCtx = context.enclMethod
(methCtx != NoContext) && {
val contextFun = methCtx.tree.symbol
contextFun.isPrimaryConstructor && contextFun.owner.isModuleClass &&
companionSymbolOf(calledFun.owner, context).moduleClass == contextFun.owner
}
}
}
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { context.issue(err); duplErrTree }
def preSelectOverloaded(fun: Tree): Tree = {
if (fun.hasSymbolField && fun.symbol.isOverloaded) {
// remove alternatives with wrong number of parameters without looking at types.
// less expensive than including them in inferMethodAlternative (see below).
def shapeType(arg: Tree): Type = arg match {
case Function(vparams, body) =>
// No need for phasedAppliedType, as we don't get here during erasure --
// overloading resolution happens during type checking.
// During erasure, the condition above (fun.symbol.isOverloaded) is false.
functionType(vparams map (_ => AnyTpe), shapeType(body))
case AssignOrNamedArg(Ident(name), rhs) =>
NamedType(name, shapeType(rhs))
case _ =>
NothingTpe
}
val argtypes = args map shapeType
val pre = fun.symbol.tpe.prefix
var sym = fun.symbol filter { alt =>
// must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
// now fixed by using isWeaklyCompatible in exprTypeArgs
// TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
// (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
//
// @PP responds: I changed it to pass WildcardType instead of pt and only one line in
// trunk (excluding scalacheck, which had another) failed to compile. It was this line in
// Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
// least two invariant type parameters. See the test case I checked in to help backstop:
// pos/isApplicableSafe.scala.
isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt)
}
if (sym.isOverloaded) {
// eliminate functions that would result from tupling transforms
// keeps alternatives with repeated params
val sym1 = sym filter (alt =>
isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false)
|| alt.tpe.params.exists(_.hasDefault)
)
if (sym1 != NoSymbol) sym = sym1
}
if (sym == NoSymbol) fun
else adaptAfterOverloadResolution(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode)
} else fun
}
val fun = preSelectOverloaded(fun0)
val argslen = args.length
fun.tpe match {
case OverloadedType(pre, alts) =>
def handleOverloaded = {
val undetparams = context.undetparams
def funArgTypes(tps: List[Type]) = tps.map { tp =>
val relTp = tp.asSeenFrom(pre, fun.symbol.owner)
val argTps = functionOrSamArgTypes(relTp)
//println(s"funArgTypes $argTps from $relTp")
argTps.map(approximateAbstracts)
}
def functionProto(argTps: List[Type]): Type =
try functionType(funArgTypes(argTps).transpose.map(lub), WildcardType)
catch { case _: IllegalArgumentException => WildcardType }
// To propagate as much information as possible to typedFunction, which uses the expected type to
// infer missing parameter types for Function trees that we're typing as arguments here,
// we expand the parameter types for all alternatives to the expected argument length,
// then transpose to get a list of alternative argument types (push down the overloading to the arguments).
// Thus, for each `arg` in `args`, the corresponding `argPts` in `altArgPts` is a list of expected types
// for `arg`. Depending on which overload is picked, only one of those expected types must be met, but
// we're in the process of figuring that out, so we'll approximate below by normalizing them to function types
// and lubbing the argument types (we treat SAM and FunctionN types equally, but non-function arguments
// do not receive special treatment: they are typed under WildcardType.)
val altArgPts =
if (settings.isScala212 && args.exists(treeInfo.isFunctionMissingParamType))
try alts.map(alt => formalTypes(alt.info.paramTypes, argslen)).transpose // do least amount of work up front
catch { case _: IllegalArgumentException => args.map(_ => Nil) } // fail safe in case formalTypes fails to align to argslen
else args.map(_ => Nil) // will type under argPt == WildcardType
val (args1, argTpes) = context.savingUndeterminedTypeParams() {
val amode = forArgMode(fun, mode)
map2(args, altArgPts) { (arg, argPts) =>
def typedArg0(tree: Tree) = {
// if we have an overloaded HOF such as `(f: Int => Int)Int <and> (f: Char => Char)Char`,
// and we're typing a function like `x => x` for the argument, try to collapse
// the overloaded type into a single function type from which `typedFunction`
// can derive the argument type for `x` in the function literal above
val argPt =
if (argPts.nonEmpty && treeInfo.isFunctionMissingParamType(tree)) functionProto(argPts)
else WildcardType
val argTyped = typedArg(tree, amode, BYVALmode, argPt)
(argTyped, argTyped.tpe.deconst)
}
arg match {
// SI-8197/SI-4592 call for checking whether this named argument could be interpreted as an assign
// infer.checkNames must not use UnitType: it may not be a valid assignment, or the setter may return another type from Unit
// TODO: just make it an error to refer to a non-existent named arg, as it's far more likely to be
// a typo than an assignment passed as an argument
case AssignOrNamedArg(lhs@Ident(name), rhs) =>
// named args: only type the righthand sides ("unknown identifier" errors otherwise)
// the assign is untyped; that's ok because we call doTypedApply
typedArg0(rhs) match {
case (rhsTyped, tp) => (treeCopy.AssignOrNamedArg(arg, lhs, rhsTyped), NamedType(name, tp))
}
case treeInfo.WildcardStarArg(_) =>
typedArg0(arg) match {
case (argTyped, tp) => (argTyped, RepeatedType(tp))
}
case _ =>
typedArg0(arg)
}
}.unzip
}
if (context.reporter.hasErrors)
setError(tree)
else {
inferMethodAlternative(fun, undetparams, argTpes, pt)
doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
handleOverloaded
case _ if isPolymorphicSignature(fun.symbol) =>
// Mimic's Java's treatment of polymorphic signatures as described in
// https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3
//
// One can think of these methods as being infinitely overloaded. We create
// a fictitious new cloned method symbol for each call site that takes on a signature
// governed by a) the argument types and b) the expected type
val args1 = typedArgs(args, forArgMode(fun, mode))
val pts = args1.map(_.tpe.deconst)
val clone = fun.symbol.cloneSymbol
val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt))
val resultType = if (isFullyDefined(pt)) pt else ObjectTpe
clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType))
val fun1 = fun.setSymbol(clone).setType(clone.info)
doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType)
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
val formals = formalTypes(paramTypes, argslen)
/* Try packing all arguments into a Tuple and apply `fun`
* to that. This is the last thing which is tried (after
* default arguments)
*/
def tryTupleApply: Tree = {
if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
// Depending on user options, may warn or error here if
// a Unit or tuple was inserted.
val keepTree = (
!mode.typingExprNotFun // why? introduced in 4e488a60, doc welcome
|| t.symbol == null // ditto
|| checkValidAdaptation(t, args)
)
if (keepTree) t else EmptyTree
} orElse { _ => context.undetparams = savedUndetparams ; EmptyTree }
}
else EmptyTree
}
/* Treats an application which uses named or default arguments.
* Also works if names + a vararg used: when names are used, the vararg
* parameter has to be specified exactly once. Note that combining varargs
* and defaults is ruled out by typedDefDef.
*/
def tryNamesDefaults: Tree = {
val lencmp = compareLengths(args, formals)
def checkNotMacro() = {
if (treeInfo.isMacroApplication(fun))
tryTupleApply orElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
}
if (mt.isErroneous) duplErrTree
else if (mode.inPatternMode) {
// #2064
duplErrorTree(WrongNumberOfArgsError(tree, fun))
} else if (lencmp > 0) {
tryTupleApply orElse duplErrorTree {
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
TooManyArgsNamesDefaultsError(tree, fun, formals, args, namelessArgs, argPos)
}
} else if (lencmp == 0) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
duplErrTree
} else if (!allArgsArePositional(argPos) && !sameLength(formals, params))
// !allArgsArePositional indicates that named arguments are used to re-order arguments
duplErrorTree(MultipleVarargError(tree))
else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
checkNotMacro()
doTypedApply(tree, fun, namelessArgs, mode, pt)
} else {
checkNotMacro()
transformNamedApplication(Typer.this, mode, pt)(
treeCopy.Apply(tree, fun, namelessArgs), argPos)
}
} else {
// defaults are needed. they are added to the argument list in named style as
// calls to the default getters. Example:
// foo[Int](a)() ==> foo[Int](a)(b = foo$qual.foo$default$2[Int](a))
// SI-8111 transformNamedApplication eagerly shuffles around the application to preserve
// evaluation order. During this process, it calls `changeOwner` on symbols that
// are transplanted underneath synthetic temporary vals.
//
// Here, we keep track of the symbols owned by `context.owner` to enable us to
// rollback, so that we don't end up with "orphaned" symbols.
//
// TODO: Find a better way!
//
// Note that duplicating trees would not be enough to fix this problem, we would also need to
// clone local symbols in the duplicated tree to truly isolate things (in the spirit of BodyDuplicator),
// or, better yet, disentangle the logic in `transformNamedApplication` so that we could
// determine whether names/defaults is viable *before* transforming trees.
def ownerOf(sym: Symbol) = if (sym == null || sym == NoSymbol) NoSymbol else sym.owner
val symsOwnedByContextOwner = tree.collect {
case t @ (_: DefTree | _: Function) if ownerOf(t.symbol) == context.owner => t.symbol
}
def rollbackNamesDefaultsOwnerChanges() {
symsOwnedByContextOwner foreach (_.owner = context.owner)
}
val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x)
if (fun1.isErroneous) duplErrTree
else {
assert(isNamedApplyBlock(fun1), fun1)
val NamedApplyInfo(qual, targs, previousArgss, _) = context.namedApplyBlockInfo.get._2
val blockIsEmpty = fun1 match {
case Block(Nil, _) =>
// if the block does not have any ValDef we can remove it. Note that the call to
// "transformNamedApplication" is always needed in order to obtain targs/previousArgss
context.namedApplyBlockInfo = None
true
case _ => false
}
val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context)
val funSym = fun1 match { case Block(_, expr) => expr.symbol }
val lencmp2 = compareLengths(allArgs, formals)
if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) {
duplErrorTree(ModuleUsingCompanionClassDefaultArgsError(tree))
} else if (lencmp2 > 0) {
removeNames(Typer.this)(allArgs, params) // #3818
duplErrTree
} else if (lencmp2 == 0) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
checkNotMacro()
context.diagUsedDefaults = true
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
rollbackNamesDefaultsOwnerChanges()
tryTupleApply orElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
}
}
}
}
if (!sameLength(formals, args) || // wrong nb of arguments
(args exists isNamedArg) || // uses a named argument
isNamedApplyBlock(fun)) { // fun was transformed to a named apply block =>
// integrate this application into the block
if (dyna.isApplyDynamicNamed(fun) && isDynamicRewrite(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
else tryNamesDefaults
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
def handleMonomorphicCall: Tree = {
// no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
// ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
// I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
// (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
// casting breaks SI-6145,
// not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
def noExpectedType = !phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol)
val args1 = (
if (noExpectedType)
typedArgs(args, forArgMode(fun, mode))
else
typedArgsForFormals(args, paramTypes, forArgMode(fun, mode))
)
// instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
// val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
// precise(foo) : foo.type => foo.type
val restpe = mt.resultType(mapList(args1)(arg => gen stableTypeFor arg orElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
}
/*
* This is translating uses of List() into Nil. This is less
* than ideal from a consistency standpoint, but it shouldn't be
* altered without due caution.
* ... this also causes bootstrapping cycles if List_apply is
* forced during kind-arity checking, so it is guarded by additional
* tests to ensure we're sufficiently far along.
*/
if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
atPos(tree.pos)(gen.mkNil setType restpe)
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
}
checkDead.updateExpr(fun) {
handleMonomorphicCall
}
} else if (needsInstantiation(tparams, formals, args)) {
//println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info)))
inferExprInstance(fun, tparams)
doTypedApply(tree, fun, args, mode, pt)
} else {
def handlePolymorphicCall = {
assert(!mode.inPatternMode, mode) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
if (targ == WildcardType) tparam.tpeHK else targ)
var remainingParams = paramTypes
def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
val newmode =
if (isByNameParamType(remainingParams.head)) POLYmode
else POLYmode | BYVALmode
if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
val arg1 = typedArg(arg, forArgMode(fun, mode), newmode, lenientPt)
val argtparams = context.extractUndetparams()
if (!argtparams.isEmpty) {
val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
arg1
} else arg1
}
val args1 = map2(args, formals)(typedArgToPoly)
if (args1 exists { _.isErrorTyped }) duplErrTree
else {
debuglog("infer method inst " + fun + ", tparams = " + tparams + ", args = " + args1.map(_.tpe) + ", pt = " + pt + ", lobounds = " + tparams.map(_.tpe.bounds.lo) + ", parambounds = " + tparams.map(_.info)) //debug
// define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
// returns those undetparams which have not been instantiated.
val undetparams = inferMethodInstance(fun, tparams, args1, pt)
try doTypedApply(tree, fun, args1, mode, pt)
finally context.undetparams = undetparams
}
}
handlePolymorphicCall
}
}
case SingleType(_, _) =>
doTypedApply(tree, fun setType fun.tpe.widen, args, mode, pt)
case ErrorType =>
if (!tree.isErrorTyped) setError(tree) else tree
// @H change to setError(treeCopy.Apply(tree, fun, args))
// SI-7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>`
case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm =>
doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
else duplErrorTree(ApplyWithoutArgsError(tree, fun))
}
}
/**
* Convert an annotation constructor call into an AnnotationInfo.
*/
def typedAnnotation(ann: Tree, mode: Mode = EXPRmode): AnnotationInfo = {
var hasError: Boolean = false
val pending = ListBuffer[AbsTypeError]()
def ErroneousAnnotation = new ErroneousAnnotation().setOriginal(ann)
def finish(res: AnnotationInfo): AnnotationInfo = {
if (hasError) {
pending.foreach(ErrorUtils.issueTypeError)
ErroneousAnnotation
}
else res
}
def reportAnnotationError(err: AbsTypeError) = {
pending += err
hasError = true
ErroneousAnnotation
}
/* Calling constfold right here is necessary because some trees (negated
* floats and literals in particular) are not yet folded.
*/
def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = {
// The typed tree may be relevantly different than the tree `tr`,
// e.g. it may have encountered an implicit conversion.
val ttree = typed(constfold(tr), pt)
val const: Constant = ttree match {
case l @ Literal(c) if !l.isErroneous => c
case tree => tree.tpe match {
case ConstantType(c) => c
case tpe => null
}
}
if (const == null) {
reportAnnotationError(AnnotationNotAConstantError(ttree)); None
} else if (const.value == null) {
reportAnnotationError(AnnotationArgNullError(tr)); None
} else
Some(LiteralAnnotArg(const))
}
/* Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
* an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
reportAnnotationError(ArrayConstantsError(tree)); None
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
val annInfo = typedAnnotation(ann, mode)
val annType = annInfo.tpe
if (!annType.typeSymbol.isSubClass(pt.typeSymbol))
reportAnnotationError(AnnotationTypeMismatchError(tpt, annType, annType))
else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass))
reportAnnotationError(NestedAnnotationError(ann, annType))
if (annInfo.atp.isErroneous) { hasError = true; None }
else Some(NestedAnnotArg(annInfo))
// use of Array.apply[T: ClassTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
val typedFun = typed(fun, mode.forFunMode)
if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
pt match {
case TypeRef(_, ArrayClass, targ :: _) =>
trees2ConstArg(args, targ)
case _ =>
// For classfile annotations, pt can only be T:
// BT = Int, .., String, Class[_], JavaAnnotClass
// T = BT | Array[BT]
// So an array literal as argument can only be valid if pt is Array[_]
reportAnnotationError(ArrayConstantsTypeMismatchError(tree, pt))
None
}
else tryConst(tree, pt)
case Typed(t, _) =>
tree2ConstArg(t, pt)
case tree =>
tryConst(tree, pt)
}
def trees2ConstArg(trees: List[Tree], pt: Type): Option[ArrayAnnotArg] = {
val args = trees.map(tree2ConstArg(_, pt))
if (args.exists(_.isEmpty)) None
else Some(ArrayAnnotArg(args.flatten.toArray))
}
// begin typedAnnotation
val treeInfo.Applied(fun0, targs, argss) = ann
if (fun0.isErroneous)
return finish(ErroneousAnnotation)
val typedFun0 = typed(fun0, mode.forFunMode)
val typedFunPart = (
// If there are dummy type arguments in typeFun part, it suggests we
// must type the actual constructor call, not only the select. The value
// arguments are how the type arguments will be inferred.
if (targs.isEmpty && typedFun0.exists(t => t.tpe != null && isDummyAppliedType(t.tpe)))
logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _))))
else
typedFun0
)
val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart
val annType = annTpt.tpe
finish(
if (typedFun.isErroneous || annType == null)
ErroneousAnnotation
else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) {
// annotation to be saved as java classfile annotation
val isJava = typedFun.symbol.owner.isJavaDefined
if (argss.length > 1) {
reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
}
else {
val annScopeJava =
if (isJava) annType.decls.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
else EmptyScope // annScopeJava is only used if isJava
val names = mutable.Set[Symbol]()
names ++= (if (isJava) annScopeJava.iterator
else typedFun.tpe.params.iterator)
def hasValue = names exists (_.name == nme.value)
val args = argss match {
case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil
case args :: Nil => args
}
val nvPairs = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
val sym = if (isJava) annScopeJava.lookup(name)
else findSymbol(typedFun.tpe.params)(_.name == name)
if (sym == NoSymbol) {
reportAnnotationError(UnknownAnnotationNameError(arg, name))
(nme.ERROR, None)
} else if (!names.contains(sym)) {
reportAnnotationError(DuplicateValueAnnotationError(arg, name))
(nme.ERROR, None)
} else {
names -= sym
if (isJava) sym.cookJavaRawInfo() // #3429
val annArg = tree2ConstArg(rhs, sym.tpe.resultType)
(sym.name, annArg)
}
case arg =>
reportAnnotationError(ClassfileAnnotationsAsNamedArgsError(arg))
(nme.ERROR, None)
}
for (sym <- names) {
// make sure the flags are up to date before erroring (jvm/t3415 fails otherwise)
sym.initialize
if (!sym.hasAnnotation(AnnotationDefaultAttr) && !sym.hasDefault)
reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
}
if (hasError) ErroneousAnnotation
else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
}
else {
val typedAnn: Tree = {
// local dummy fixes SI-5544
val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
localTyper.typed(ann, mode, annType)
}
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
AnnotationInfo(annType, args, List()).setOriginal(typedAnn).setPos(t.pos)
case Block(stats, expr) =>
context.warning(t.pos, "Usage of named or default arguments transformed this annotation\\n"+
"constructor call into a block. The corresponding AnnotationInfo\\n"+
"will contain references to local values and default getters instead\\n"+
"of the actual argument trees")
annInfo(expr)
case Apply(fun, args) =>
context.warning(t.pos, "Implementation limitation: multiple argument lists on annotations are\\n"+
"currently not supported; ignoring arguments "+ args)
annInfo(fun)
case _ =>
reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn))
}
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.", "2.11.0")
if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
else annInfo(typedAnn)
}
)
}
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context0.owner)
def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = (
ctx.owner.isTerm && (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || {
var ctx1 = ctx.outer
while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope))
ctx1 = ctx1.outer
(ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
}
)
def isCapturedExistential(sym: Symbol) = (
(sym hasAllFlags EXISTENTIAL | CAPTURED) && {
val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
try !isReferencedFrom(context, sym)
finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
}
)
def packCaptured(tpe: Type): Type = {
val captured = mutable.Set[Symbol]()
for (tp <- tpe)
if (isCapturedExistential(tp.typeSymbol))
captured += tp.typeSymbol
existentialAbstraction(captured.toList, tpe)
}
/** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
def defines(tree: Tree, sym: Symbol) = (
sym.isExistentialSkolem && sym.unpackLocation == tree
|| tree.isDef && tree.symbol == sym
)
def isVisibleParameter(sym: Symbol) = (
sym.isParameter
&& (sym.owner == owner)
&& (sym.isType || !owner.isAnonymousFunction)
)
def containsDef(owner: Symbol, sym: Symbol): Boolean =
(!sym.hasPackageFlag) && {
var o = sym.owner
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
var localSyms = immutable.Set[Symbol]()
var boundSyms = immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym)
def containsLocal(tp: Type): Boolean =
tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol))
val dealiasLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias)
else {
if (pre.isVolatile) pre match {
case SingleType(_, sym) if sym.isSynthetic && isPastTyper =>
debuglog(s"ignoring volatility of prefix in pattern matcher generated inferred type: $tp") // See pos/t7459c.scala
case _ =>
InferTypeWithVolatileTypeSelectionError(tree, pre)
}
mapOver(tp)
}
case _ =>
mapOver(tp)
}
}
// add all local symbols of `tp` to `localSyms`
// TODO: expand higher-kinded types into individual copies for each instance.
def addLocals(tp: Type) {
val remainingSyms = new ListBuffer[Symbol]
def addIfLocal(sym: Symbol, tp: Type) {
if (isLocal(sym) && !localSyms(sym) && !boundSyms(sym)) {
if (sym.typeParams.isEmpty) {
localSyms += sym
remainingSyms += sym
} else {
AbstractExistentiallyOverParamerizedTpeError(tree, tp)
}
}
}
for (t <- tp) {
t match {
case ExistentialType(tparams, _) =>
boundSyms ++= tparams
case AnnotatedType(annots, _) =>
for (annot <- annots; arg <- annot.args) {
arg match {
case Ident(_) =>
// Check the symbol of an Ident, unless the
// Ident's type is already over an existential.
// (If the type is already over an existential,
// then remap the type, not the core symbol.)
if (!arg.tpe.typeSymbol.hasFlag(EXISTENTIAL))
addIfLocal(arg.symbol, arg.tpe)
case _ => ()
}
}
case _ =>
}
addIfLocal(t.termSymbol, t)
addIfLocal(t.typeSymbol, t)
}
for (sym <- remainingSyms) addLocals(sym.existentialBound)
}
val dealiasedType = dealiasLocals(tree.tpe)
addLocals(dealiasedType)
packSymbols(localSyms.toList, dealiasedType)
}
def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
if (!checkClassType(tpt) && noGen) tpt
else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = {
for (wc <- tree.whereClauses)
if (wc.symbol == NoSymbol) { namer enterSym wc; wc.symbol setFlag EXISTENTIAL }
else context.scope enter wc.symbol
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
for (vd @ ValDef(_, _, _, _) <- whereClauses1)
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => {
val original = tpt1 match {
case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
case _ => {
debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree")
tree
}
}
TypeTree(newExistentialType(tparams, tp)) setOriginal original
}
)
}
// lifted out of typed1 because it's needed in typedImplicit0
protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
inferPolyAlternatives(fun, mapList(args)(treeTpe))
// SI-8267 `memberType` can introduce existentials *around* a PolyType/MethodType, see AsSeenFromMap#captureThis.
// If we had selected a non-overloaded symbol, `memberType` would have been called in `makeAccessible`
// and the resulting existential type would have been skolemized in `adapt` *before* we typechecked
// the enclosing type-/ value- application.
//
// However, if the selection is overloaded, we defer calling `memberType` until we can select a single
// alternative here. It is therefore necessary to skolemize the existential here.
//
val fun1 = adaptAfterOverloadResolution(fun, mode.forFunMode | TAPPmode)
val tparams = fun1.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
val args1 = if (sameLength(args, tparams)) {
//@M: in case TypeApply we can't check the kind-arities of the type arguments,
// as we don't know which alternative to choose... here we do
map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
(arg, tparam) => typedHigherKindedType(arg, mode, Kind.FromParams(tparam.typeParams))
}
} else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
// Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
// inferPolyAlternatives loops... -- I have no idea why :-(
// ...actually this was looping anyway, see bug #278.
return TypedApplyWrongNumberOfTpeParametersError(fun, fun)
typedTypeApply(tree, mode, fun1, args1)
case SingleType(_, _) =>
typedTypeApply(tree, mode, fun setType fun.tpe.widen, args)
case PolyType(tparams, restpe) if tparams.nonEmpty =>
if (sameLength(tparams, args)) {
val targs = mapList(args)(treeTpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (isPredefClassOf(fun.symbol))
typedClassOf(tree, args.head, noGen = true)
else {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
val scrutineeType = fun match {
case Select(qual, _) => qual.tpe
case _ => AnyTpe
}
checkCheckable(tree, targs.head, scrutineeType, inPattern = false)
}
val resultpe = restpe.instantiateTypeParams(tparams, targs)
//@M substitution in instantiateParams needs to be careful!
//@M example: class Foo[a] { def foo[m[x]]: m[a] = error("") } (new Foo[Int]).foo[List] : List[Int]
//@M --> first, m[a] gets changed to m[Int], then m gets substituted for List,
// this must preserve m's type argument, so that we end up with List[Int], and not List[a]
//@M related bug: #1438
//println("instantiating type params "+restpe+" "+tparams+" "+targs+" = "+resultpe)
treeCopy.TypeApply(tree, fun, args) setType resultpe
}
}
else {
TypedApplyWrongNumberOfTpeParametersError(tree, fun)
}
case ErrorType =>
setError(treeCopy.TypeApply(tree, fun, args))
case _ =>
fun match {
// drop the application for an applyDynamic or selectDynamic call since it has been pushed down
case treeInfo.DynamicApplication(_, _) => fun
case _ => TypedApplyDoesNotTakeTpeParametersError(tree, fun)
}
}
object dyna {
import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed}
def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass
/** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not.
* `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType)
* NOTE: currently either returns None or Some(NoType) (scala-virtualized extends this to Some(t) for selections on staged Structs)
*/
def acceptsApplyDynamicWithType(qual: Tree, name: Name): Option[Type] =
// don't selectDynamic selectDynamic, do select dynamic at unknown type,
// in scala-virtualized, we may return a Some(tp) where tp ne NoType
if (!isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen)) Some(NoType)
else None
def isDynamicallyUpdatable(tree: Tree) = tree match {
case DynamicUpdate(qual, name) =>
// if the qualifier is a Dynamic, that's all we need to know
acceptsApplyDynamic(qual.tpe)
case _ => false
}
def isApplyDynamicNamed(fun: Tree): Boolean = fun match {
case DynamicApplicationNamed(qual, _) if acceptsApplyDynamic(qual.tpe.widen) => true
case _ => false
// look deeper?
// val treeInfo.Applied(methPart, _, _) = fun
// println("methPart of "+ fun +" is "+ methPart)
// if (methPart ne fun) isApplyDynamicNamed(methPart)
// else false
}
def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def argToBinding(arg: Tree): Tree = arg match {
case AssignOrNamedArg(i @ Ident(name), rhs) =>
atPos(i.pos.withEnd(rhs.pos.end)) {
gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs))
}
case _ =>
gen.mkTuple(List(CODE.LIT(""), arg))
}
val t = treeCopy.Apply(orig, unmarkDynamicRewrite(fun), args map argToBinding)
wrapErrors(t, _.typed(t, mode, pt))
}
/** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic.
*
* foo.method("blah") ~~> foo.applyDynamic("method")("blah")
* foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah"))
* foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
* foo.field ~~> foo.selectDynamic("field")
* foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
*
* what if we want foo.field == foo.selectDynamic("field") == 1, but `foo.field = 10` == `foo.selectDynamic("field").update(10)` == ()
* what would the signature for selectDynamic be? (hint: it needs to depend on whether an update call is coming or not)
*
* need to distinguish selectDynamic and applyDynamic somehow: the former must return the selected value, the latter must accept an apply or an update
* - could have only selectDynamic and pass it a boolean whether more is to come,
* so that it can either return the bare value or something that can handle the apply/update
* HOWEVER that makes it hard to return unrelated values for the two cases
* --> selectDynamic's return type is now dependent on the boolean flag whether more is to come
* - simplest solution: have two method calls
*
*/
def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
val cxTree = context.enclosingNonImportContext.tree // SI-8364
debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
val treeInfo.Applied(treeSelection, _, _) = tree
def isDesugaredApply = {
val protoQual = macroExpandee(qual) orElse qual
treeSelection match {
case Select(`protoQual`, nme.apply) => true
case _ => false
}
}
acceptsApplyDynamicWithType(qual, name) map { tp =>
// If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all
// here - it is for scala-virtualized, where tp will be passed as an argument (for
// selection on a staged Struct)
def hasNamed(args: List[Tree]): Boolean = args exists (_.isInstanceOf[AssignOrNamedArg])
// not supported: foo.bar(a1,..., an: _*)
def hasStar(args: List[Tree]) = treeInfo.isWildcardStarArgList(args)
def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic
def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection
/* Note that the trees which arrive here are potentially some distance from
* the trees of direct interest. `cxTree` is some enclosing expression which
* may apparently be arbitrarily larger than `tree`; and `tree` itself is
* too small, having at least in some cases lost its explicit type parameters.
* This logic is designed to use `tree` to pinpoint the immediately surrounding
* Apply/TypeApply/Select node, and only then creates the dynamic call.
* See SI-6731 among others.
*/
def findSelection(t: Tree): Option[(TermName, Tree)] = t match {
case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None
case Apply(fn, args) if matches(fn) => Some((applyOp(args), fn))
case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs))
case _ if matches(t) => Some((nme.selectDynamic, t))
case _ => (t.children flatMap findSelection).headOption
}
findSelection(cxTree) match {
case Some((opName, treeInfo.Applied(_, targs, _))) =>
val fun = gen.mkTypeApply(Select(qual, opName), targs)
if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // SI-7617
val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) {
Literal(Constant(name.decode))
}
markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit))))
case _ =>
setError(tree)
}
}
}
def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head))
}
def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
// Lookup in the given class using the root mirror.
def lookupInOwner(owner: Symbol, name: Name): Symbol =
if (mode.inQualMode) rootMirror.missingHook(owner, name) else NoSymbol
// Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect.
def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name)
def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name
def lookupInQualifier(qual: Tree, name: Name): Symbol = (
if (name == nme.ERROR || qual.tpe.widen.isErroneous)
NoSymbol
else lookupInOwner(qual.tpe.typeSymbol, name) orElse {
NotAMemberError(tree, qual, name)
NoSymbol
}
)
def typedAnnotated(atd: Annotated): Tree = {
val ann = atd.annot
val arg1 = typed(atd.arg, mode, pt)
/* mode for typing the annotation itself */
val annotMode = (mode &~ TYPEmode) | EXPRmode
def resultingTypeTree(tpe: Type) = {
// we need symbol-ful originals for reification
// hence we go the extra mile to hand-craft this guy
val original = arg1 match {
case tt @ TypeTree() if tt.original != null => Annotated(ann, tt.original)
// this clause is needed to correctly compile stuff like "new C @D" or "@(inline @getter)"
case _ => Annotated(ann, arg1)
}
original setType ann.tpe
TypeTree(tpe) setOriginal original setPos tree.pos.focus
}
if (arg1.isType) {
// make sure the annotation is only typechecked once
if (ann.tpe == null) {
val ainfo = typedAnnotation(ann, annotMode)
val atype = arg1.tpe.withAnnotation(ainfo)
if (ainfo.isErroneous)
// Erroneous annotations were already reported in typedAnnotation
arg1 // simply drop erroneous annotations
else {
ann setType atype
resultingTypeTree(atype)
}
} else {
// the annotation was typechecked before
resultingTypeTree(ann.tpe)
}
}
else {
if (ann.tpe == null) {
val annotInfo = typedAnnotation(ann, annotMode)
ann setType arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
// For `f(): @inline/noinline` callsites, add the InlineAnnotatedAttachment. TypeApplys
// are eliminated by erasure, so add it to the underlying function in this case.
def setInlineAttachment(t: Tree, att: InlineAnnotatedAttachment): Unit = t match {
case TypeApply(fun, _) => setInlineAttachment(fun, att)
case _ => t.updateAttachment(att)
}
if (atype.hasAnnotation(definitions.ScalaNoInlineClass)) setInlineAttachment(arg1, NoInlineCallsiteAttachment)
else if (atype.hasAnnotation(definitions.ScalaInlineClass)) setInlineAttachment(arg1, InlineCallsiteAttachment)
Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
}
}
def typedBind(tree: Bind) = {
val name = tree.name
val body = tree.body
name match {
case name: TypeName => assert(body == EmptyTree, context.unit + " typedBind: " + name.debugString + " " + body + " " + body.getClass)
val sym =
if (tree.symbol != NoSymbol) tree.symbol
else {
if (isFullyDefined(pt))
context.owner.newAliasType(name, tree.pos) setInfo pt
else
context.owner.newAbstractType(name, tree.pos) setInfo TypeBounds.empty
}
if (name != tpnme.WILDCARD) namer.enterInScope(sym)
else context.scope.enter(sym)
tree setSymbol sym setType sym.tpeHK
case name: TermName =>
val sym =
if (tree.symbol != NoSymbol) tree.symbol
else context.owner.newValue(name, tree.pos)
if (name != nme.WILDCARD) {
if (context.inPatAlternative)
VariableInPatternAlternativeError(tree)
namer.enterInScope(sym)
}
val body1 = typed(body, mode, pt)
val impliedType = patmat.binderTypeImpliedByPattern(body1, pt, sym) // SI-1503, SI-5204
val symTp =
if (treeInfo.isSequenceValued(body)) seqType(impliedType)
else impliedType
sym setInfo symTp
// have to imperatively set the symbol for this bind to keep it in sync with the symbols used in the body of a case
// when type checking a case we imperatively update the symbols in the body of the case
// those symbols are bound by the symbols in the Binds in the pattern of the case,
// so, if we set the symbols in the case body, but not in the patterns,
// then re-type check the casedef (for a second try in typedApply for example -- SI-1832),
// we are no longer in sync: the body has symbols set that do not appear in the patterns
// since body1 is not necessarily equal to body, we must return a copied tree,
// but we must still mutate the original bind
tree setSymbol sym
treeCopy.Bind(tree, name, body1) setSymbol sym setType body1.tpe
}
}
def typedArrayValue(tree: ArrayValue) = {
val elemtpt1 = typedType(tree.elemtpt, mode)
val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
// see run/t6126 for an example where `pt` does not suffice (tagged types)
val tpe1 = if (isFullyDefined(pt) && !phase.erasedTypes) pt else arrayType(elemtpt1.tpe)
treeCopy.ArrayValue(tree, elemtpt1, elems1) setType tpe1
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
// see SI-7617 for an explanation of why macro expansion is suppressed
def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode)
val lhs1 = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs)))
val varsym = lhs1.symbol
// see #2494 for double error message example
def fail() =
if (lhs1.isErrorTyped) lhs1
else AssignmentError(tree, varsym)
if (varsym == null)
return fail()
if (treeInfo.mayBeVarGetter(varsym)) {
lhs1 match {
case treeInfo.Applied(Select(qual, name), _, _) =>
val sel = Select(qual, name.setterName) setPos lhs.pos
val app = Apply(sel, List(rhs)) setPos tree.pos
return typed(app, mode, pt)
case _ =>
}
}
// if (varsym.isVariable ||
// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
if (varsym.isVariable || varsym.isValue && phase.assignsFields) {
val rhs1 = typedByValueExpr(rhs, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
val rhs1 = typedByValueExpr(rhs)
val t = atPos(lhs1.pos.withEnd(rhs1.pos.end)) {
Apply(lhs1, List(rhs1))
}
dyna.wrapErrors(t, _.typed1(t, mode, pt))
}
else fail()
}
def typedIf(tree: If): If = {
val cond1 = checkDead(typedByValueExpr(tree.cond, BooleanTpe))
// One-legged ifs don't need a lot of analysis
if (tree.elsep.isEmpty)
return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe
val thenp1 = typed(tree.thenp, pt)
val elsep1 = typed(tree.elsep, pt)
// in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
// in the special (though common) case where the types are equal, it pays to pack before comparing
// especially virtpatmat needs more aggressive unification of skolemized types
// this breaks src/library/scala/collection/immutable/TrieIterator.scala
// annotated types need to be lubbed regardless (at least, continuations break if you bypass them like this)
def samePackedTypes = (
!isPastTyper
&& thenp1.tpe.annotations.isEmpty
&& elsep1.tpe.annotations.isEmpty
&& packedType(thenp1, context.owner) =:= packedType(elsep1, context.owner)
)
def finish(ownType: Type) = treeCopy.If(tree, cond1, thenp1, elsep1) setType ownType
// TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
// @PP: This was doing the samePackedTypes check BEFORE the isFullyDefined check,
// which based on everything I see everywhere else was a bug. I reordered it.
if (isFullyDefined(pt))
finish(pt)
// Important to deconst, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
else thenp1.tpe.deconst :: elsep1.tpe.deconst :: Nil match {
case tp :: _ if samePackedTypes => finish(tp)
case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
case tpes =>
val lub = weakLub(tpes)
treeCopy.If(tree, cond1, adapt(thenp1, mode, lub), adapt(elsep1, mode, lub)) setType lub
}
}
// When there's a suitable __match in scope, virtualize the pattern match
// otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
// empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
def typedVirtualizedMatch(tree: Match): Tree = {
val selector = tree.selector
val cases = tree.cases
if (selector == EmptyTree) {
if (pt.typeSymbol == PartialFunctionClass)
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt)
else {
val arity = functionArityFromType(pt) match { case -1 => 1 case arity => arity } // SI-8429: consider sam and function type equally in determining function arity
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
ValDef(Modifiers(PARAM | SYNTHETIC),
unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree)
}
val ids = for (p <- params) yield Ident(p.name)
val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
// SI-8120 If we don't duplicate the cases, the original Match node will share trees with ones that
// receive symbols owned by this function. However if, after a silent mode session, we discard
// this Function and try a different approach (e.g. applying a view to the receiver) we end up
// with orphaned symbols which blows up far down the pipeline (or can be detected with -Ycheck:typer).
val body = treeCopy.Match(tree, selector1, (cases map duplicateAndKeepPositions).asInstanceOf[List[CaseDef]])
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
}
} else
virtualizedMatch(typedMatch(selector, cases, mode, pt, tree), mode, pt)
}
def typedReturn(tree: Return) = {
val expr = tree.expr
val enclMethod = context.enclMethod
if (enclMethod == NoContext ||
enclMethod.owner.isConstructor ||
context.enclClass.enclMethod == enclMethod // i.e., we are in a constructor of a local class
) {
ReturnOutsideOfDefError(tree)
} else {
val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
if (restpt.tpe eq null) {
ReturnWithoutTypeError(tree, enclMethod.owner)
}
else {
val expr1 = context withinReturnExpr typedByValueExpr(expr, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
// it is non-Unit) so we have to retype it. Fortunately it won't come up much
// unless the warning is legitimate.
if (typed(expr).tpe.typeSymbol != UnitClass)
context.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe)
res.setType(tp)
}
}
}
def typedNew(tree: New) = {
val tpt = tree.tpt
val tpt1 = {
// This way typedNew always returns a dealiased type. This used to happen by accident
// for instantiations without type arguments due to ad hoc code in typedTypeConstructor,
// and annotations depended on it (to the extent that they worked, which they did
// not when given a parameterized type alias which dealiased to an annotation.)
// typedTypeConstructor dealiases nothing now, but it makes sense for a "new" to always be
// given a dealiased type.
val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
if (checkStablePrefixClassType(tpt0))
if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
notifyUndetparamsAdded(context.undetparams)
TypeTree().setOriginal(tpt0)
.setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347.
} else tpt0
else tpt0
}
/* If current tree <tree> appears in <val x(: T)? = <tree>>
* return `tp with x.type' else return `tp`.
*/
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
case ValDef(mods, _, _, Apply(Select(`tree`, _), _)) if !mods.isMutable && sym != null && sym != NoSymbol =>
val sym1 =
if (sym.owner.isClass && sym.getterIn(sym.owner) != NoSymbol) sym.getterIn(sym.owner)
else sym
val pre = if (sym1.owner.isClass) sym1.owner.thisType else NoPrefix
intersectionType(List(tp, singleType(pre, sym1)))
case _ => tp
}}
val tp = tpt1.tpe
val sym = tp.typeSymbol.initialize
if (sym.isAbstractType || sym.hasAbstractFlag)
IsAbstractError(tree, sym)
else if (isPrimitiveValueClass(sym)) {
NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
setError(tpt)
}
else if (!( tp == sym.typeOfThis // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
)) {
DoesNotConformToSelfTypeError(tree, sym, tp.typeOfThis)
} else
treeCopy.New(tree, tpt1).setType(tp)
}
def functionTypeWildcard(arity: Int): Type =
functionType(List.fill(arity)(WildcardType), WildcardType)
def checkArity(tree: Tree)(tp: Type): tp.type = tp match {
case NoType => MaxFunctionArityError(tree); tp
case _ => tp
}
/** Eta expand an expression like `m _`, where `m` denotes a method or a by-name argument
*
* The spec says:
* The expression `$e$ _` is well-formed if $e$ is of method type or if $e$ is a call-by-name parameter.
* (1) If $e$ is a method with parameters, `$e$ _` represents $e$ converted to a function type
* by [eta expansion](#eta-expansion).
* (2) If $e$ is a parameterless method or call-by-name parameter of type `=>$T$`, `$e$ _` represents
* the function of type `() => $T$`, which evaluates $e$ when it is applied to the empty parameterlist `()`.
*/
def typedEta(methodValue: Tree, original: Tree): Tree = methodValue.tpe match {
case tp@(MethodType(_, _) | PolyType(_, MethodType(_, _))) => // (1)
val formals = tp.params
if (isFunctionType(pt) || samMatchesFunctionBasedOnArity(samOf(pt), formals)) methodValue
else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length)), original)
case TypeRef(_, ByNameParamClass, _) | NullaryMethodType(_) => // (2)
val pos = methodValue.pos
// must create it here to change owner (normally done by typed's typedFunction)
val funSym = context.owner.newAnonymousFunctionValue(pos)
new ChangeOwnerTraverser(context.owner, funSym) traverse methodValue
typed(Function(List(), methodValue) setSymbol funSym setPos pos, mode, pt)
case ErrorType =>
methodValue
case _ =>
UnderscoreEtaError(methodValue)
}
def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = {
val c = context.makeSilent(reportAmbiguousErrors = false)
c.retyping = true
try {
val res = newTyper(c).typedArgs(args, mode)
if (c.reporter.hasErrors) None else Some(res)
} catch {
case ex: CyclicReference =>
throw ex
case te: TypeError =>
// @H some of typer errors can still leak,
// for instance in continuations
None
}
}
/* Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String)]): Tree = {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, convert to existentials and try again.
// See #4712 for a case where this situation arises,
if ((fun.symbol ne null) && fun.symbol.isJavaDefined) {
val newtpe = rawToExistential(fun.tpe)
if (fun.tpe ne newtpe) {
// println("late cooking: "+fun+":"+fun.tpe) // DEBUG
return tryTypedApply(fun setType newtpe, args)
}
}
def treesInResult(tree: Tree): List[Tree] = tree :: (tree match {
case Block(_, r) => treesInResult(r)
case Match(_, cases) => cases
case CaseDef(_, _, r) => treesInResult(r)
case Annotated(_, r) => treesInResult(r)
case If(_, t, e) => treesInResult(t) ++ treesInResult(e)
case Try(b, catches, _) => treesInResult(b) ++ catches
case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r) // a method value
case Select(qual, name) => treesInResult(qual)
case Apply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult)
case TypeApply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult)
case _ => Nil
})
def errorInResult(tree: Tree) = treesInResult(tree) exists (err => typeErrors.exists(_.errPos == err.pos))
val retry = (typeErrors.forall(_.errPos != null)) && (fun :: tree :: args exists errorInResult)
typingStack.printTyping({
val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
if (retry) "second try: " + funStr
else "no second try: " + funStr + " because error not in result: " + typeErrors.head.errPos+"!="+tree.pos
})
if (retry) {
val Select(qual, name) = fun
tryTypedArgs(args, forArgMode(fun, mode)) match {
case Some(args1) if !args1.exists(arg => arg.exists(_.isErroneous)) =>
val qual1 =
if (!pt.isError) adaptToArguments(qual, name, args1, pt)
else qual
if (qual1 ne qual) {
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
return context withinSecondTry typed1(tree1, mode, pt)
}
case _ => ()
}
}
typeErrors foreach context.issue
warnings foreach { case (p, m) => context.warning(p, m) }
setError(treeCopy.Apply(tree, fun, args))
}
silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
case SilentResultValue(value) => value
case e: SilentTypeError => onError(e.errors, e.warnings)
}
}
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
// TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)`
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
val funpt = if (mode.inPatternMode) pt else WildcardType
val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
def onError(reportError: => Tree): Tree = fun match {
case Select(qual, name)
if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) && !qual.exists(_.isErroneous) =>
val qual1 = typedQualifier(qual)
if (treeInfo.isVariableOrGetter(qual1)) {
if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
convertToAssignment(fun, qual1, name, args)
}
else {
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
case _ =>
if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
reportError
}
val silentResult = silent(
op = _.typed(fun, mode.forFunMode, funpt),
reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
newtree = if (mode.inExprMode) tree else context.tree
)
silentResult match {
case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
val noSecondTry = (
isPastTyper
|| context.inSecondTry
|| (fun2.symbol ne null) && fun2.symbol.isConstructor
|| isImplicitMethodType(fun2.tpe)
)
val isFirstTry = fun2 match {
case Select(_, _) => !noSecondTry && mode.inExprMode
case _ => false
}
if (isFirstTry)
tryTypedApply(fun2, args)
else
doTypedApply(tree, fun2, args, mode, pt)
case err: SilentTypeError =>
onError({
err.reportableErrors foreach context.issue
err.warnings foreach { case (p, m) => context.warning(p, m) }
args foreach (arg => typed(arg, mode, ErrorType))
setError(tree)
})
}
}
// convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
// convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
// where Array HK gets applied (N-1) times
object ArrayInstantiation {
def unapply(tree: Apply) = tree match {
case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass =>
Some(tpt.tpe) collect {
case erasure.GenericArray(level, componentType) =>
val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res))
resolveClassTag(tree.pos, tagType) match {
case EmptyTree => MissingClassTagError(tree, tagType)
case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil))
}
}
case _ => None
}
}
def typedApply(tree: Apply) = tree match {
case Apply(Block(stats, expr), args) =>
typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
case Apply(fun, args) =>
normalTypedApply(tree, fun, args) match {
case ArrayInstantiation(tree1) => if (tree1.isErrorTyped) tree1 else typed(tree1, mode, pt)
case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //SI-5696
case tree1 => tree1
}
}
def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
val prefix = name.toTermName stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
Assign(
vble,
Apply(
Select(vble.duplicate, prefix) setPos fun.pos.focus, args) setPos tree.pos.makeTransparent
) setPos tree.pos
def mkUpdate(table: Tree, indices: List[Tree]) = {
gen.evalOnceAll(table :: indices, context.owner, context.unit) {
case tab :: is =>
def mkCall(name: Name, extraArgs: Tree*) = (
Apply(
Select(tab(), name) setPos table.pos,
is.map(i => i()) ++ extraArgs
) setPos tree.pos
)
mkCall(
nme.update,
Apply(Select(mkCall(nme.apply), prefix) setPos fun.pos, args) setPos tree.pos
)
case _ => EmptyTree
}
}
val tree1 = qual match {
case Ident(_) =>
mkAssign(qual)
case Select(qualqual, vname) =>
gen.evalOnce(qualqual, context.owner, context.unit) { qq =>
val qq1 = qq()
mkAssign(Select(qq1, vname) setPos qual.pos)
}
case Apply(fn, indices) =>
fn match {
case treeInfo.Applied(Select(table, nme.apply), _, _) => mkUpdate(table, indices)
case _ => UnexpectedTreeAssignmentConversionError(qual)
}
}
typed1(tree1, mode, pt)
}
def typedSuper(tree: Super) = {
val mix = tree.mix
val qual1 = typed(tree.qual)
val clazz = qual1 match {
case This(_) => qual1.symbol
case _ => qual1.tpe.typeSymbol
}
def findMixinSuper(site: Type): Type = {
var ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty)
ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty) {
debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
if (phase.erasedTypes && context.enclClass.owner.isTrait) {
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
ErrorType
} else {
MixinMissingParentClassNameError(tree, mix, clazz)
ErrorType
}
} else if (!ps.tail.isEmpty) {
AmbiguousParentClassError(tree)
ErrorType
} else {
ps.head
}
}
val owntype = (
if (!mix.isEmpty) findMixinSuper(clazz.tpe)
else if (context.inSuperInit) clazz.info.firstParent
else intersectionType(clazz.info.parents)
)
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
}
def typedThis(tree: This) =
tree.symbol orElse qualifyingClass(tree, tree.qual, packageOK = false) match {
case NoSymbol => tree
case clazz =>
tree setSymbol clazz setType clazz.thisType.underlying
if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
// For Java, instance and static members are in the same scope, but we put the static ones in the companion object
// so, when we can't find a member in the class scope, check the companion
def inCompanionForJavaStatic(pre: Type, cls: Symbol, name: Name): Symbol =
if (!(context.unit.isJava && cls.isClass && !cls.isModuleClass)) NoSymbol else {
val companion = companionSymbolOf(cls, context)
if (!companion.exists) NoSymbol
else member(gen.mkAttributedRef(pre, companion), name) // assert(res.isStatic, s"inCompanionJavaStatic($pre, $cls, $name) = $res ${res.debugFlagString}")
}
/* Attribute a selection where `tree` is `qual.name`.
* `qual` is already attributed.
*/
def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
val t = typedSelectInternal(tree, qual, name)
// Checking for OverloadedTypes being handed out after overloading
// resolution has already happened.
if (isPastTyper) t.tpe match {
case OverloadedType(pre, alts) =>
if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) ()
else if (settings.debug) printCaller(
s"""|Select received overloaded type during $phase, but typer is over.
|If this type reaches the backend, we are likely doomed to crash.
|$t has these overloads:
|${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\\n"}
|""".stripMargin
)("")
case _ =>
}
t
}
def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = {
def asDynamicCall = dyna.mkInvoke(context, tree, qual, name) map { t =>
dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
}
val sym = tree.symbol orElse member(qual, name) orElse inCompanionForJavaStatic(qual.tpe.prefix, qual.symbol, name) orElse {
// symbol not found? --> try to convert implicitly to a type that does have the required
// member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
// xml member to StringContext, which in turn has an unapply[Seq] method)
if (name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) {
val qual1 = adaptToMemberWithArgs(tree, qual, name, mode)
if ((qual1 ne qual) && !qual1.isErrorTyped)
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
NoSymbol
}
if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
qual setType tree.symbol.owner.tpe
if (!reallyExists(sym)) {
def handleMissing: Tree = {
def errorTree = missingSelectErrorTree(tree, qual, name)
def asTypeSelection = (
if (context.unit.isJava && name.isTypeName) {
// SI-3120 Java uses the same syntax, A.B, to express selection from the
// value A and from the type A. We have to try both.
atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match {
case EmptyTree => None
case tree1 => Some(typed1(tree1, mode, pt))
}
}
else None
)
debuglog(s"""
|qual=$qual:${qual.tpe}
|symbol=${qual.tpe.termSymbol.defString}
|scope-id=${qual.tpe.termSymbol.info.decls.hashCode}
|members=${qual.tpe.members mkString ", "}
|name=$name
|found=$sym
|owner=${context.enclClass.owner}
""".stripMargin)
// 1) Try converting a term selection on a java class into a type selection.
// 2) Try expanding according to Dynamic rules.
// 3) Try looking up the name in the qualifier.
asTypeSelection orElse asDynamicCall getOrElse (lookupInQualifier(qual, name) match {
case NoSymbol => setError(errorTree)
case found => typed1(tree setSymbol found, mode, pt)
})
}
handleMissing
}
else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
case SilentTypeError(err: AccessTypeError) =>
(tree1, Some(err))
case SilentTypeError(err) =>
SelectWithUnderlyingError(tree, err)
return tree
case SilentResultValue(treeAndPre) =>
(stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
}
result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks
treeCopy.SelectFromTypeTree(
result,
(TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
// will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one?
checkBounds(qual, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
qual // you only get to see the wrapped tree after running this check :-p
}) setType qual.tpe setPos qual.pos,
name)
case _ if accessibleError.isDefined =>
// don't adapt constructor, SI-6074
val qual1 = if (name == nme.CONSTRUCTOR) qual
else adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false)
if (!qual1.isErrorTyped && (qual1 ne qual))
typed(Select(qual1, name) setPos tree.pos, mode, pt)
else
// before failing due to access, try a dynamic call.
asDynamicCall getOrElse {
context.issue(accessibleError.get)
setError(tree)
}
case _ =>
result
}
}
}
def typedSelectOrSuperCall(tree: Select) = tree match {
case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
// the qualifier type of a supercall constructor is its first parent class
typedSelect(tree, typedSelectOrSuperQualifier(qual), nme.CONSTRUCTOR)
case Select(qual, name) =>
if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
val qualTyped = checkDead(typedQualifier(qual, mode))
val qualStableOrError = (
if (qualTyped.isErrorTyped || !name.isTypeName || treeInfo.admitsTypeSelection(qualTyped))
qualTyped
else
UnstableTreeError(qualTyped)
)
val tree1 = typedSelect(tree, qualStableOrError, name)
def sym = tree1.symbol
if (tree.isInstanceOf[PostfixSelect])
checkFeature(tree.pos, PostfixOpsFeature, name.decode)
if (sym != null && sym.isOnlyRefinementMember && !sym.isMacro)
checkFeature(tree1.pos, ReflectiveCallsFeature, sym.toString)
qualStableOrError.symbol match {
case s: Symbol if s.isRootPackage => treeCopy.Ident(tree1, name)
case _ => tree1
}
}
/* A symbol qualifies if:
* - it exists
* - it is not stale (stale symbols are made to disappear here)
* - if we are in a constructor pattern, method definitions do not qualify
* unless they are stable. Otherwise, 'case x :: xs' would find the :: method.
*/
def qualifies(sym: Symbol) = (
sym.hasRawInfo
&& reallyExists(sym)
&& !(mode.typingConstructorPattern && sym.isMethod && !sym.isStable)
)
/* Attribute an identifier consisting of a simple name or an outer reference.
*
* @param tree The tree representing the identifier.
* @param name The name of the identifier.
* Transformations: (1) Prefix class members with this.
* (2) Change imported symbols to selections
*/
def typedIdent(tree: Tree, name: Name): Tree = {
// setting to enable unqualified idents in empty package (used by the repl)
def inEmptyPackage = if (settings.exposeEmptyPackage) lookupInEmpty(name) else NoSymbol
def issue(err: AbsTypeError) = {
// Avoiding some spurious error messages: see SI-2388.
val suppress = reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)
if (!suppress)
ErrorUtils.issueTypeError(err)
setError(tree)
}
// ignore current variable scope in patterns to enforce linearity
val startContext = if (mode.typingPatternOrTypePat) context.outer else context
val nameLookup = tree.symbol match {
case NoSymbol => startContext.lookupSymbol(name, qualifies)
case sym => LookupSucceeded(EmptyTree, sym)
}
import InferErrorGen._
nameLookup match {
case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg))
case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg))
case LookupNotFound =>
inEmptyPackage orElse lookupInRoot(name) match {
case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext))
case sym => typed1(tree setSymbol sym, mode, pt)
}
case LookupSucceeded(qual, sym) =>
(// this -> Foo.this
if (sym.isThisSym)
typed1(This(sym.owner) setPos tree.pos, mode, pt)
else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) {
// Inferring classOf type parameter from expected type. Otherwise an
// actual call to the stubbed classOf method is generated, returning null.
typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus))
}
else {
val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual)
// SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
// inference errors in pattern matching.
stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes
}) setAttachments tree.attachments
}
}
def typedIdentOrWildcard(tree: Ident) = {
val name = tree.name
if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) ||
(name == tpnme.WILDCARD && mode.inTypeMode))
tree setType makeFullyDefined(pt)
else
typedIdent(tree, name)
}
def typedCompoundTypeTree(tree: CompoundTypeTree) = {
val templ = tree.templ
val parents1 = templ.parents mapConserve (typedType(_, mode))
// This is also checked later in typedStats, but that is too late for SI-5361, so
// we eagerly check this here.
for (stat <- templ.body if !treeInfo.isDeclarationOrTypeDef(stat))
OnlyDeclarationsError(stat)
if ((parents1 ++ templ.body) exists (_.isErrorTyped)) tree setType ErrorType
else {
val decls = newScope
//Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id)
val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos)
newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ)
templ updateAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
tree setType (if (templ.exists(_.isErroneous)) ErrorType else self) // Being conservative to avoid SI-5361
}
}
def typedAppliedTypeTree(tree: AppliedTypeTree) = {
val tpt = tree.tpt
val args = tree.args
val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
def isPoly = tpt1.tpe.isInstanceOf[PolyType]
def isComplete = tpt1.symbol.rawInfo.isComplete
if (tpt1.isErrorTyped) {
tpt1
} else if (!tpt1.hasSymbolField) {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
if (sameLength(tparams, args)) {
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 = map2Conserve(args, tparams) { (arg, tparam) =>
def ptParams = Kind.FromParams(tparam.typeParams)
// if symbol hasn't been fully loaded, can't check kind-arity except when we're in a pattern,
// where we can (we can't take part in F-Bounds) and must (SI-8023)
val pt = if (mode.typingPatternOrTypePat) {
tparam.initialize; ptParams
}
else if (isComplete) ptParams
else Kind.Wildcard
typedHigherKindedType(arg, mode, pt)
}
val argtypes = mapList(args1)(treeTpe)
foreach2(args, tparams) { (arg, tparam) =>
// note: can't use args1 in selector, because Binds got replaced
val asym = arg.symbol
def abounds = asym.info.bounds
def tbounds = tparam.info.bounds
def enhanceBounds(): Unit = {
val TypeBounds(lo0, hi0) = abounds
val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes)
val lo = lub(List(lo0, lo1))
val hi = glb(List(hi0, hi1))
if (!(lo =:= lo0 && hi =:= hi0))
asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi))
}
if (asym != null && asym.isAbstractType) {
arg match {
// I removed the Ident() case that partially fixed SI-1786,
// because the stricter bounds being inferred broke e.g., slick
// worse, the fix was compilation order-dependent
// sharpenQuantifierBounds (used in skolemizeExistential) has an alternative fix (SI-6169) that's less invasive
case Bind(_, _) => enhanceBounds()
case _ =>
}
}
}
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
if (isPoly) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
// (and the only typed trees we have been mangled so they're not quite the original tree anymore)
checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
result // you only get to see the wrapped tree after running this check :-p
} setType (result.tpe) setPos(result.pos)
else result
} else if (tparams.isEmpty) {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
//Console.println("\\{tpt1}:\\{tpt1.symbol}:\\{tpt1.symbol.info}")
if (settings.debug) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams)
}
}
}
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
def typedPackageDef(pdef0: PackageDef) = {
val pdef = treeCopy.PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats))
val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree]
assert(sym.moduleClass ne NoSymbol, sym)
val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
.typedStats(pdef.stats, NoSymbol)
treeCopy.PackageDef(tree, pid1, stats1) setType NoType
}
/*
* The typer with the correct context for a method definition. If the method is a default getter for
* a constructor default, the resulting typer has a constructor context (fixes SI-5543).
*/
def defDefTyper(ddef: DefDef) = {
val isConstrDefaultGetter = ddef.mods.hasDefault && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
context withinPatAlternative (
treeCopy.Alternative(tree, alt.trees mapConserve (alt => typed(alt, mode, pt))) setType pt
)
}
def typedStar(tree: Star) = {
if (!context.starPatterns && !isPastTyper)
StarPatternWithVarargParametersError(tree)
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
def issueTryWarnings(tree: Try): Try = {
def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
def warn(name: Name) = {
val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning."
context.warning(cdef.pat.pos, msg)
}
if (cdef.guard.isEmpty) cdef.pat match {
case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
case i @ Ident(name) if unbound(i) => warn(name)
case _ =>
}
}
if (!isPastTyper) tree match {
case Try(_, Nil, fin) =>
if (fin eq EmptyTree)
context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.")
case Try(_, catches, _) =>
catches foreach checkForCatchAll
}
tree
}
def typedTry(tree: Try) = {
val Try(block, catches, fin) = tree
val block1 = typed(block, pt)
val catches1 = typedCases(catches, ThrowableTpe, pt)
val fin1 = if (fin.isEmpty) fin else typed(fin, UnitTpe)
def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType
issueTryWarnings(
if (isFullyDefined(pt))
finish(pt)
else block1 :: catches1 map (_.tpe.deconst) match {
case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
case tpes =>
val lub = weakLub(tpes)
val block2 = adapt(block1, mode, lub)
val catches2 = catches1 map (adaptCase(_, mode, lub))
treeCopy.Try(tree, block2, catches2, fin1) setType lub
}
)
}
def typedThrow(tree: Throw) = {
val expr1 = typedByValueExpr(tree.expr, ThrowableTpe)
treeCopy.Throw(tree, expr1) setType NothingTpe
}
def typedTyped(tree: Typed) = {
if (treeInfo isWildcardStarType tree.tpt)
typedStarInPattern(tree, mode.onlySticky, pt)
else if (mode.inPatternMode)
typedInPattern(tree, mode.onlySticky, pt)
else tree match {
// find out whether the programmer is trying to eta-expand a macro def
// to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
// that typecheck must not trigger macro expansions, so we explicitly prohibit them
// however we cannot do `context.withMacrosDisabled`
// because `expr` might contain nested macro calls (see SI-6673)
//
// Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker
// which means trailing underscore -- denoting a method value. See makeMethodValue in TreeBuilder.
case Typed(expr, Function(Nil, EmptyTree)) =>
typed1(suppressMacroExpansion(expr), mode, pt) match {
case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef)
case methodValue => typedEta(checkDead(methodValue), tree)
}
case Typed(expr, tpt) =>
val tpt1 = typedType(tpt, mode) // type the ascribed type first
val expr1 = typed(expr, mode.onlySticky, tpt1.tpe.deconst) // then type the expression with tpt1 as the expected type
treeCopy.Typed(tree, expr1, tpt1) setType tpt1.tpe
}
}
def typedTypeApply(tree: TypeApply) = {
val fun = tree.fun
val args = tree.args
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
//@M! we must type fun in order to type the args, as that requires the kinds of fun's type parameters.
// However, args should apparently be done first, to save context.undetparams. Unfortunately, the args
// *really* have to be typed *after* fun. We escape from this classic Catch-22 by simply saving&restoring undetparams.
// @M TODO: the compiler still bootstraps&all tests pass when this is commented out..
//val undets = context.undetparams
// @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
val fun1 = typed(fun, mode.forFunMode | TAPPmode)
val tparams = if (fun1.symbol == null) Nil else fun1.symbol.typeParams
//@M TODO: val undets_fun = context.undetparams ?
// "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
// @M TODO: the compiler still bootstraps when this is commented out.. TODO: run tests
//context.undetparams = undets
// @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
(arg, tparam) => typedHigherKindedType(arg, mode, Kind.FromParams(tparam.typeParams))
}
else {
//@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
// Until the right alternative for an overloaded method is known, be very liberal,
// typedTypeApply will find the right alternative and then do the same check as
// in the then-branch above. (see pos/tcpoly_overloaded.scala)
// this assert is too strict: be tolerant for errors like trait A { def foo[m[x], g]=error(""); def x[g] = foo[g/*ERR: missing argument type*/] }
//assert(fun1.symbol.info.isInstanceOf[OverloadedType] || fun1.symbol.isError) //, (fun1.symbol,fun1.symbol.info,fun1.symbol.info.getClass,args,tparams))
args mapConserve (typedHigherKindedType(_, mode))
}
//@M TODO: context.undetparams = undets_fun ?
Typer.this.typedTypeApply(tree, mode, fun1, args1)
}
def typedApplyDynamic(tree: ApplyDynamic) = {
assert(phase.erasedTypes)
val qual1 = typed(tree.qual, AnyRefTpe)
val args1 = tree.args mapConserve (arg => typed(arg, AnyRefTpe))
treeCopy.ApplyDynamic(tree, qual1, args1) setType AnyRefTpe
}
def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
val id = tree.ident
val id1 = typed1(id, mode, pt) match { case id: Ident => id }
// [Eugene] am I doing it right?
val erasedTypes = phaseId(currentPeriod) >= currentRun.erasurePhase.id
val tpe = capturedVariableType(id.symbol, erasedTypes = erasedTypes)
treeCopy.ReferenceToBoxed(tree, id1) setType tpe
}
// Warn about likely interpolated strings which are missing their interpolators
def warnMissingInterpolator(lit: Literal): Unit = if (!isPastTyper) {
// attempt to avoid warning about trees munged by macros
def isMacroExpansion = {
// context.tree is not the expandee; it is plain new SC(ps).m(args)
//context.tree exists (t => (t.pos includes lit.pos) && hasMacroExpansionAttachment(t))
// testing pos works and may suffice
//openMacros exists (_.macroApplication.pos includes lit.pos)
// tests whether the lit belongs to the expandee of an open macro
openMacros exists (_.macroApplication.attachments.get[MacroExpansionAttachment] match {
case Some(MacroExpansionAttachment(_, t: Tree)) => t exists (_ == lit)
case _ => false
})
}
// attempt to avoid warning about the special interpolated message string
// for implicitNotFound or any standard interpolation (with embedded $$).
def isRecognizablyNotForInterpolation = context.enclosingApply.tree match {
case Apply(Select(Apply(RefTree(_, nme.StringContext), _), _), _) => true
case Apply(Select(New(RefTree(_, tpnme.implicitNotFound)), _), _) => true
case _ => isMacroExpansion
}
def requiresNoArgs(tp: Type): Boolean = tp match {
case PolyType(_, restpe) => requiresNoArgs(restpe)
case MethodType(Nil, restpe) => requiresNoArgs(restpe) // may be a curried method - can't tell yet
case MethodType(p :: _, _) => p.isImplicit // implicit method requires no args
case _ => true // catches all others including NullaryMethodType
}
def isPlausible(m: Symbol) = !m.isPackage && m.alternatives.exists(x => requiresNoArgs(x.info))
def maybeWarn(s: String): Unit = {
def warn(message: String) = context.warning(lit.pos, s"possible missing interpolator: $message")
def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol
val suspiciousExprs = InterpolatorCodeRegex findAllMatchIn s
def suspiciousIdents = InterpolatorIdentRegex findAllIn s map (s => suspiciousSym(TermName(s drop 1)))
def isCheapIdent(expr: String) = (Character.isJavaIdentifierStart(expr.charAt(0)) &&
expr.tail.forall(Character.isJavaIdentifierPart))
def warnableExpr(expr: String) = !expr.isEmpty && (!isCheapIdent(expr) || isPlausible(suspiciousSym(TermName(expr))))
if (suspiciousExprs.nonEmpty) {
val exprs = (suspiciousExprs map (_ group 1)).toList
// short-circuit on leading ${}
if (!exprs.head.isEmpty && exprs.exists(warnableExpr))
warn("detected an interpolated expression") // "${...}"
} else
suspiciousIdents find isPlausible foreach (sym => warn(s"detected interpolated identifier `$$${sym.name}`")) // "$id"
}
lit match {
case Literal(Constant(s: String)) if !isRecognizablyNotForInterpolation => maybeWarn(s)
case _ =>
}
}
def typedLiteral(tree: Literal) = {
if (settings.warnMissingInterpolator) warnMissingInterpolator(tree)
tree setType (if (tree.value.tag == UnitTag) UnitTpe else ConstantType(tree.value))
}
def typedSingletonTypeTree(tree: SingletonTypeTree) = {
val refTyped =
context.withImplicitsDisabled {
typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
}
if (refTyped.isErrorTyped) {
setError(tree)
} else {
tree setType refTyped.tpe.resultType.deconst
if (refTyped.isErrorTyped || treeInfo.admitsTypeSelection(refTyped)) tree
else UnstableTreeError(tree)
}
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
val qual1 = typedType(tree.qualifier, mode)
if (qual1.isErrorTyped) setError(treeCopy.SelectFromTypeTree(tree, qual1, tree.name))
else if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
else typedSelect(tree, qual1, tree.name)
}
def typedTypeBoundsTree(tree: TypeBoundsTree) = {
val lo1 = if (tree.lo.isEmpty) TypeTree(NothingTpe) else typedType(tree.lo, mode)
val hi1 = if (tree.hi.isEmpty) TypeTree(AnyTpe) else typedType(tree.hi, mode)
treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
}
def typedExistentialTypeTree(tree: ExistentialTypeTree) = {
val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
typer =>
if (context.inTypeConstructorAllowed)
typer.context.withinTypeConstructorAllowed(typer.typedExistentialTypeTree(tree, mode))
else
typer.typedExistentialTypeTree(tree, mode)
}
checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
tree1
}
def typedTypeTree(tree: TypeTree) = {
if (tree.original != null) {
val newTpt = typedType(tree.original, mode)
tree setType newTpt.tpe
newTpt match {
case tt @ TypeTree() => tree setOriginal tt.original
case _ => tree
}
}
else {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
devWarning(tree.pos, s"Assigning Any type to TypeTree because tree.original is null: tree is $tree/${System.identityHashCode(tree)}, sym=${tree.symbol}, tpe=${tree.tpe}")
tree setType AnyTpe
}
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
fun.symbol = context.owner.newAnonymousFunctionValue(fun.pos)
typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
}
// Trees only allowed during pattern mode.
def typedInPatternMode(tree: Tree): Tree = tree match {
case tree: Alternative => typedAlternative(tree)
case tree: Star => typedStar(tree)
case _ => abort(s"unexpected tree in pattern mode: ${tree.getClass}\\n$tree")
}
def typedTypTree(tree: TypTree): Tree = tree match {
case tree: TypeTree => typedTypeTree(tree)
case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\\n$tree")
}
def typedMemberDef(tree: MemberDef): Tree = tree match {
case tree: ValDef => typedValDef(tree)
case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
case tree: TypeDef => typedTypeDef(tree)
case tree: PackageDef => typedPackageDef(tree)
case _ => abort(s"unexpected member def: ${tree.getClass}\\n$tree")
}
// Trees not allowed during pattern mode.
def typedOutsidePatternMode(tree: Tree): Tree = tree match {
case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
case tree: If => typedIf(tree)
case tree: TypeApply => typedTypeApply(tree)
case tree: Function => typedFunction(tree)
case tree: Match => typedVirtualizedMatch(tree)
case tree: New => typedNew(tree)
case tree: Assign => typedAssign(tree.lhs, tree.rhs)
case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
case tree: Super => typedSuper(tree)
case tree: Annotated => typedAnnotated(tree)
case tree: Return => typedReturn(tree)
case tree: Try => typedTry(tree)
case tree: Throw => typedThrow(tree)
case tree: ArrayValue => typedArrayValue(tree)
case tree: ApplyDynamic => typedApplyDynamic(tree)
case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
case tree: DocDef => typedDocDef(tree, mode, pt)
case _ => abort(s"unexpected tree: ${tree.getClass}\\n$tree")
}
// Trees allowed in or out of pattern mode.
def typedInAnyMode(tree: Tree): Tree = tree match {
case tree: Ident => typedIdentOrWildcard(tree)
case tree: Bind => typedBind(tree)
case tree: Apply => typedApply(tree)
case tree: Select => typedSelectOrSuperCall(tree)
case tree: Literal => typedLiteral(tree)
case tree: Typed => typedTyped(tree)
case tree: This => typedThis(tree) // SI-6104
case tree: UnApply => abort(s"unexpected UnApply $tree") // turns out UnApply never reaches here
case _ =>
if (mode.inPatternMode)
typedInPatternMode(tree)
else
typedOutsidePatternMode(tree)
}
// begin typed1
tree match {
case tree: TypTree => typedTypTree(tree)
case tree: MemberDef => typedMemberDef(tree)
case _ => typedInAnyMode(tree)
}
}
def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
lastTreeToTyper = tree
def body = (
if (printTypings && !phase.erasedTypes && !noPrintTyping(tree))
typingStack.nextTyped(tree, mode, pt, context)(typedInternal(tree, mode, pt))
else
typedInternal(tree, mode, pt)
)
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try body
finally if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
}
private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = {
val ptPlugins = pluginsPt(pt, this, tree, mode)
def retypingOk = (
context.retyping
&& (tree.tpe ne null)
&& (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))
)
def runTyper(): Tree = {
if (retypingOk) {
tree.setType(null)
if (tree.hasSymbolField) tree.symbol = NoSymbol
}
val alreadyTyped = tree.tpe ne null
val shouldPrint = !alreadyTyped && !phase.erasedTypes
val ptWild = if (mode.inPatternMode)
ptPlugins // SI-5022 don't widen pt for patterns as types flow from it to the case body.
else
dropExistential(ptPlugins) // FIXME: document why this is done.
val tree1: Tree = if (alreadyTyped) tree else typed1(tree, mode, ptWild)
if (shouldPrint)
typingStack.showTyped(tree1)
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
if (tree1.tpe eq null)
return setError(tree)
tree1 modifyType (pluginsTyped(_, this, tree1, mode, ptPlugins))
val result =
if (tree1.isEmpty) tree1
else {
val result = adapt(tree1, mode, ptPlugins, tree)
if (hasPendingMacroExpansions) macroExpandAll(this, result) else result
}
if (shouldPrint)
typingStack.showAdapt(tree1, result, ptPlugins, context)
if (!isPastTyper)
signalDone(context.asInstanceOf[analyzer.Context], tree, result)
if (mode.inPatternMode && !mode.inPolyMode && result.isType)
PatternMustBeValue(result, pt)
result
}
try runTyper() catch {
case ex: TypeError =>
tree.clearType()
// The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
typingStack.printTyping(tree, "caught %s: while typing %s".format(ex, tree)) //DEBUG
reportTypeError(context, tree.pos, ex)
setError(tree)
case ex: Exception =>
// @M causes cyclic reference error
devWarning(s"exception when typing $tree, pt=$ptPlugins")
if (context != null && context.unit.exists && tree != null)
logError("AT: " + tree.pos, ex)
throw ex
}
}
def atOwner(owner: Symbol): Typer =
newTyper(context.make(owner = owner))
def atOwner(tree: Tree, owner: Symbol): Typer =
newTyper(context.make(tree, owner))
/** Types expression or definition `tree`.
*/
def typed(tree: Tree): Tree = {
val ret = typed(tree, context.defaultModeForTyped, WildcardType)
ret
}
def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt)
def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
// TODO: see if this formulation would impose any penalty, since
// it makes for a lot less casting.
// def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T]
/** Types expression `tree` with given prototype `pt`.
*/
def typed(tree: Tree, pt: Type): Tree =
typed(tree, context.defaultModeForTyped, pt)
def typed(tree: Tree, mode: Mode): Tree =
typed(tree, mode, WildcardType)
/** Types qualifier `tree` of a select node.
* E.g. is tree occurs in a context like `tree.m`.
*/
def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree =
typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
/** Types qualifier `tree` of a select node.
* E.g. is tree occurs in a context like `tree.m`.
*/
def typedQualifier(tree: Tree, mode: Mode): Tree =
typedQualifier(tree, mode, WildcardType)
def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
/** Types function part of an application */
def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes)
// the qualifier type of a supercall constructor is its first parent class
private def typedSelectOrSuperQualifier(qual: Tree) =
context withinSuperInit typed(qual, PolyQualifierModes)
/** Types a pattern with prototype `pt` */
def typedPattern(tree: Tree, pt: Type): Tree = {
// We disable implicits because otherwise some constructs will
// type check which should not. The pattern matcher does not
// perform implicit conversions in an attempt to consummate a match.
// on the one hand,
// "abc" match { case Seq('a', 'b', 'c') => true }
// should be ruled out statically, otherwise this is a runtime
// error both because there is an implicit from String to Seq
// (even though such implicits are not used by the matcher) and
// because the typer is fine with concluding that "abc" might
// be of type "String with Seq[T]" and thus eligible for a call
// to unapplySeq.
// on the other hand, we want to be able to use implicits to add members retro-actively (e.g., add xml to StringContext)
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
}
/** Types a (fully parameterized) type tree */
def typedType(tree: Tree, mode: Mode): Tree =
typed(tree, mode.forTypeMode, WildcardType)
/** Types a (fully parameterized) type tree */
def typedType(tree: Tree): Tree = typedType(tree, NOmode)
/** Types a higher-kinded type tree -- pt denotes the expected kind and must be one of `Kind.WildCard` and `Kind.FromParams` */
def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree =
if (pt != Kind.Wildcard && pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
else context withinTypeConstructorAllowed typed(tree, NOmode, pt)
def typedHigherKindedType(tree: Tree, mode: Mode): Tree =
context withinTypeConstructorAllowed typed(tree)
/** Types a type constructor tree used in a new or supertype */
def typedTypeConstructor(tree: Tree, mode: Mode): Tree = {
val result = typed(tree, mode.forTypeMode | FUNmode, WildcardType)
// get rid of type aliases for the following check (#1241)
result.tpe.dealias match {
case restpe @ TypeRef(pre, _, _) if !phase.erasedTypes && !pre.isStable && !context.unit.isJava =>
// The isJava exception if OK only because the only type constructors scalac gets
// to see are those in the signatures. These do not need a unique object as a prefix.
// The situation is different for new's and super's, but scalac does not look deep
// enough to see those. See #3938
ConstructorPrefixError(tree, restpe)
case _ =>
// must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
// during uncurry (after refchecks), all types are normalized
result
}
}
def typedTypeConstructor(tree: Tree): Tree = typedTypeConstructor(tree, NOmode)
def computeType(tree: Tree, pt: Type): Type = {
// macros employ different logic of `computeType`
assert(!context.owner.isMacro, context.owner)
val tree1 = typed(tree, pt)
transformed(tree) = tree1
val tpe = packedType(tree1, context.owner)
checkExistentialsFeature(tree.pos, tpe, "inferred existential type")
tpe
}
def computeMacroDefType(ddef: DefDef, pt: Type): Type = {
assert(context.owner.isMacro, context.owner)
assert(ddef.symbol.isMacro, ddef.symbol)
val rhs1 =
if (transformed contains ddef.rhs) {
// macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap
// if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree`
// here we guard against this case
transformed(ddef.rhs)
} else {
val rhs1 = typedMacroBody(this, ddef)
transformed(ddef.rhs) = rhs1
rhs1
}
val isMacroBodyOkay = !ddef.symbol.isErroneous && !(rhs1 exists (_.isErroneous)) && rhs1 != EmptyTree
val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
if (isMacroBodyOkay && shouldInheritMacroImplReturnType) {
val commonMessage = "macro defs must have explicitly specified return types"
def reportFailure() = {
ddef.symbol.setFlag(IS_ERROR)
context.error(ddef.pos, commonMessage)
}
def reportWarning(inferredType: Type) = {
val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12"
context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)", "2.12.0")
}
computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match {
case ErrorType => ErrorType
case NothingTpe => NothingTpe
case NoType => reportFailure(); AnyTpe
case tpe => reportWarning(tpe); tpe
}
} else AnyTpe
}
def transformedOr(tree: Tree, op: => Tree): Tree = transformed remove tree match {
case Some(tree1) => tree1
case _ => op
}
def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed remove tree match {
case Some(tree1) => tree1
case _ => typed(tree, mode, pt)
}
}
}
object TypersStats {
import scala.reflect.internal.TypesStats._
val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
val typedSelectCount = Statistics.newCounter("#typechecked selections")
val typedApplyCount = Statistics.newCounter("#typechecked applications")
val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos)
val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter(""))
val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos))
val byTypeStack = Statistics.newTimerStack()
}
| slothspot/scala | src/compiler/scala/tools/nsc/typechecker/Typers.scala | Scala | bsd-3-clause | 280,186 |
package gitbucket.core.api
import gitbucket.core.util.JGitUtil.CommitInfo
import java.util.Date
case class ApiPersonIdent(name: String, email: String, date: Date)
object ApiPersonIdent {
def author(commit: CommitInfo): ApiPersonIdent =
ApiPersonIdent(name = commit.authorName, email = commit.authorEmailAddress, date = commit.authorTime)
def committer(commit: CommitInfo): ApiPersonIdent =
ApiPersonIdent(name = commit.committerName, email = commit.committerEmailAddress, date = commit.commitTime)
}
| McFoggy/gitbucket | src/main/scala/gitbucket/core/api/ApiPersonIdent.scala | Scala | apache-2.0 | 516 |
/*
* Copyright 2010 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.logging
import java.util.concurrent.atomic.AtomicReference
import java.util.{logging => javalog}
import scala.annotation.tailrec
import scala.collection.mutable
import com.twitter.conversions.time._
import com.twitter.util.{Duration, Time}
object ThrottledHandler {
/**
* Generates a HandlerFactory that returns a ThrottledHandler
*
* @param handler
* Wrapped handler.
*
* @param duration
* Timespan to consider duplicates. After this amount of time, duplicate entries will be logged
* again.
*
* @param maxToDisplay
* Maximum duplicate log entries to pass before suppressing them.
*/
def apply(
handler: HandlerFactory,
duration: Duration = 0.seconds,
maxToDisplay: Int = Int.MaxValue
) = () => new ThrottledHandler(handler(), duration, maxToDisplay)
}
/**
* @param handler
* Wrapped handler.
*
* @param duration
* Timespan to consider duplicates. After this amount of time, duplicate entries will be logged
* again.
*
* @param maxToDisplay
* Maximum duplicate log entries to pass before suppressing them.
*/
class ThrottledHandler(
handler: Handler,
val duration: Duration,
val maxToDisplay: Int
) extends ProxyHandler(handler) {
private class Throttle(startTime: Time, name: String, level: javalog.Level) {
private[this] var expired = false
private[this] var count = 0
override def toString = "Throttle: startTime=" + startTime + " count=" + count
final def add(record: javalog.LogRecord, now: Time): Boolean = {
val (shouldPublish, added) = synchronized {
if (!expired) {
count += 1
(count <= maxToDisplay, true)
} else {
(false, false)
}
}
if (shouldPublish) doPublish(record)
added
}
final def removeIfExpired(now: Time): Boolean = {
val didExpire = synchronized {
expired = (now - startTime >= duration)
expired
}
if (didExpire && count > maxToDisplay) publishSwallowed()
didExpire
}
private[this] def publishSwallowed() {
val throttledRecord = new javalog.LogRecord(
level, "(swallowed %d repeating messages)".format(count - maxToDisplay))
throttledRecord.setLoggerName(name)
doPublish(throttledRecord)
}
}
private val lastFlushCheck = new AtomicReference(Time.epoch)
private val throttleMap = new mutable.HashMap[String, Throttle]
@deprecated("Use flushThrottled() instead", "5.3.13")
def reset() {
flushThrottled()
}
/**
* Force printing any "swallowed" messages.
*/
def flushThrottled() {
synchronized {
val now = Time.now
throttleMap retain {
case (_, throttle) => !throttle.removeIfExpired(now)
}
}
}
/**
* Log a message, with sprintf formatting, at the desired level, and
* attach an exception and stack trace.
*/
override def publish(record: javalog.LogRecord) {
val now = Time.now
val last = lastFlushCheck.get
if (now - last > 1.second && lastFlushCheck.compareAndSet(last, now)) {
flushThrottled()
}
val key = record match {
case r: LazyLogRecordUnformatted => r.preformatted
case _ => record.getMessage
}
@tailrec def tryPublish() {
val throttle = synchronized {
throttleMap.getOrElseUpdate(
key,
new Throttle(now, record.getLoggerName(), record.getLevel())
)
}
// catch the case where throttle is removed before we had a chance to add
if (!throttle.add(record, now)) tryPublish()
}
tryPublish()
}
private def doPublish(record: javalog.LogRecord) = {
super.publish(record)
}
}
| travisbrown/util | util-logging/src/main/scala/com/twitter/logging/ThrottledHandler.scala | Scala | apache-2.0 | 4,287 |
/*
* Copyright (c) 2017 Richard Hull
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package byok3.primitives
import byok3.annonation.Documentation
import byok3.data_structures.Context._
import byok3.data_structures.Error
import byok3.data_structures.Stack._
import byok3.implicits._
import byok3.types.Stack
import cats.data.StateT._
import cats.instances.try_._
import scala.util.Try
object StackManipulation {
@Documentation("the number of single-cell values contained in the data stack before n was placed on the stack", stackEffect = "( -- n )")
val DEPTH = dataStack {
for {
stack <- get[Try, Stack[Int]]
n = stack.length
_ <- push(n)
} yield ()
}
@Documentation("drop top stack element", stackEffect = "( x -- )")
val DROP = dataStack {
pop.map(_ => ())
}
@Documentation("drop cell pair x1 x2 from the stack", stackEffect = "( x1 x2 -- )")
val `2DROP` = dataStack {
for {
_ <- pop
_ <- pop
} yield ()
}
@Documentation("copy NOS (next of stack) to top of stack", stackEffect = "( x1 x2 -- x1 x2 x1)")
val OVER = dataStack {
for {
x2 <- pop
x1 <- pop
_ <- push(x1)
_ <- push(x2)
_ <- push(x1)
} yield ()
}
@Documentation("copy cell pair x1 x2 to the top of the stack", stackEffect = "( x1 x2 x3 x4 -- x1 x2 x3 x4 x1 x2)")
val `2OVER` = dataStack {
for {
x4 <- pop
x3 <- pop
x2 <- pop
x1 <- pop
_ <- push(x1)
_ <- push(x2)
_ <- push(x3)
_ <- push(x4)
_ <- push(x1)
_ <- push(x2)
} yield ()
}
@Documentation("remove NOS", stackEffect = "( x1 x2 -- x2 )")
val NIP = dataStack {
for {
x2 <- pop
_ <- pop
_ <- push(x2)
} yield ()
}
@Documentation("copy the first (top) stack item below the second stack item", stackEffect = "( x1 x2 -- x2 x1 x2 )")
val TUCK = dataStack {
for {
x2 <- pop
x1 <- pop
_ <- push(x2)
_ <- push(x1)
_ <- push(x2)
} yield ()
}
@Documentation("rotate the top three stack entries", stackEffect = "( x1 x2 x3 -- x2 x3 x1 )")
val ROT = dataStack {
for {
x3 <- pop
x2 <- pop
x1 <- pop
_ <- push(x2)
_ <- push(x3)
_ <- push(x1)
} yield ()
}
@Documentation("rotate the top three stack entries", stackEffect = "( x1 x2 x3 -- x3 x1 x2 )")
val `-ROT` = dataStack {
for {
x3 <- pop
x2 <- pop
x1 <- pop
_ <- push(x3)
_ <- push(x1)
_ <- push(x2)
} yield ()
}
@Documentation("swap top two stack elements", stackEffect = "( x1 x2 -- x2 x1)")
val SWAP = dataStack {
for {
x2 <- pop
x1 <- pop
_ <- push(x2)
_ <- push(x1)
} yield ()
}
@Documentation("exchange the top two cell pairs", stackEffect = "( x1 x2 x3 x4 -- x3 x4 x1 x2)")
val `2SWAP` = dataStack {
for {
x4 <- pop
x3 <- pop
x2 <- pop
x1 <- pop
_ <- push(x3)
_ <- push(x4)
_ <- push(x1)
_ <- push(x2)
} yield ()
}
@Documentation("duplicate top stack element", stackEffect = "( x -- x x )")
val DUP = dataStack {
for {
x <- peek
_ <- push(x)
} yield ()
}
@Documentation("duplicate cell pair x1 x2", stackEffect = "( x1 x2 -- x1 x2 x1 x2 )")
val `2DUP` = dataStack {
for {
x2 <- pop
x1 <- pop
_ <- push(x1)
_ <- push(x2)
_ <- push(x1)
_ <- push(x2)
} yield ()
}
@Documentation("duplicate top stack element if it is non-zero", stackEffect = "( x -- 0 | x x )")
val `?DUP` = dataStack {
peek.flatMap(x => conditional(x != 0, push(x)))
}
@Documentation("remove u. Copy the xu to the top of the stack", stackEffect = "( xu ... x1 x0 u -- xu ... x1 x0 xu )")
val PICK = dataStack {
for {
u <- pop
stack <- get[Try, Stack[Int]]
xu = Try(stack(u)).getOrElse(throw Error(-11)) // result out of range
_ <- push(xu)
} yield ()
}
@Documentation("Remove u. Rotate u+1 items on the top of the stack. An ambiguous condition exists if there are less than u+2 items on the stack before ROLL is executed", stackEffect = "( xu xu-1 ... x0 u -- xu-1 ... x0 xu )")
val ROLL = dataStack {
for {
u <- pop
stack <- get[Try, Stack[Int]]
xu = Try(stack(u)).getOrElse(throw Error(-11)) // result out of range
_ <- set(stack.remove(u))
_ <- push(xu)
} yield ()
}
@Documentation("the number of single-cell values contained in the return stack", stackEffect = "( -- n )")
val RDEPTH = for {
stack <- returnStack(get[Try, Stack[Int]])
n = stack.length
_ <- dataStack(push(n))
} yield ()
@Documentation("drop top return stack element", stackEffect = "( -- ) ( R: x -- )")
val RDROP = for {
_ <- returnStackNotEmpty
_ <- returnStack(pop)
} yield ()
@Documentation("move x to the return stack", stackEffect = "( x -- ) ( R: -- x)")
val `>R` = for {
x <- dataStack(pop)
_ <- returnStack(push(x))
} yield ()
@Documentation("move x from the return stack to the data stack", stackEffect = "( -- x ) ( R: x -- )")
val `R>` = for {
_ <- returnStackNotEmpty
x <- returnStack(pop)
_ <- dataStack(push(x))
} yield ()
@Documentation("copy x from the return stack to the data stack", stackEffect = "( -- x ) ( R: x -- x)")
val `R@` = for {
_ <- returnStackNotEmpty
x <- returnStack(peek)
_ <- dataStack(push(x))
} yield ()
} | rm-hull/byok3 | core/src/main/scala/byok3/primitives/StackManipulation.scala | Scala | mit | 6,516 |
package org.aprsdroid.app
import _root_.android.Manifest
import _root_.android.os.Bundle
import _root_.android.content.{Context, Intent, SharedPreferences}
import _root_.android.content.SharedPreferences.OnSharedPreferenceChangeListener
import _root_.android.preference.{CheckBoxPreference, Preference, PreferenceActivity, PreferenceManager}
import android.location.LocationManager
import android.preference.Preference.OnPreferenceClickListener
import android.widget.Toast
class BackendPrefs extends PreferenceActivity
with OnSharedPreferenceChangeListener
with PermissionHelper {
def loadXml() {
val prefs = new PrefsWrapper(this)
addPreferencesFromResource(R.xml.backend)
addPreferencesFromResource(AprsBackend.prefxml_proto(prefs))
val additional_xml = AprsBackend.prefxml_backend(prefs)
if (additional_xml != 0) {
addPreferencesFromResource(additional_xml)
hookPasscode()
hookGpsPermission()
}
}
def hookPasscode(): Unit = {
val p = findPreference("passcode")
if (p != null) {
p.setOnPreferenceClickListener(new OnPreferenceClickListener() {
def onPreferenceClick(preference: Preference) = {
new PasscodeDialog(BackendPrefs.this, false).show()
true
}
});
}
}
def hookGpsPermission(): Unit = {
val p = findPreference("kenwood.gps")
if (p != null) {
p.setOnPreferenceClickListener(new OnPreferenceClickListener() {
def onPreferenceClick(preference: Preference) = {
if (preference.asInstanceOf[CheckBoxPreference].isChecked) {
preference.asInstanceOf[CheckBoxPreference].setChecked(false)
checkPermissions(Array(Manifest.permission.ACCESS_FINE_LOCATION), REQUEST_GPS)
}
true
}
});
}
}
override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
loadXml()
getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this)
}
override def onDestroy() {
super.onDestroy()
getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this)
}
override def onSharedPreferenceChanged(sp: SharedPreferences, key : String) {
if (key == "proto" || key == "link" || key == "aprsis") {
setPreferenceScreen(null)
loadXml()
}
}
val REQUEST_GPS = 1010
override def getActionName(action: Int): Int = R.string.p_conn_kwd_gps
override def onAllPermissionsGranted(action: Int): Unit = {
findPreference("kenwood.gps").asInstanceOf[CheckBoxPreference].setChecked(true)
}
}
| ge0rg/aprsdroid | src/BackendPrefs.scala | Scala | gpl-2.0 | 2,479 |
/*
* Copyright (c) 2012, 2013, 2014, 2015, 2016 SURFnet BV
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the SURFnet BV nor the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package models
import java.net.URI
case class Terminate(
connectionId: String,
correlationId: String,
replyTo: Option[URI],
requesterNsa: String,
provider: Provider) extends NsiRequest(correlationId, replyTo, requesterNsa, provider) {
override def soapActionSuffix = "terminate"
override def nsiV2Body =
<type:terminate>
<connectionId>{ connectionId }</connectionId>
</type:terminate>
}
| BandwidthOnDemand/nsi-requester | app/models/Terminate.scala | Scala | bsd-3-clause | 1,994 |
package mesosphere.marathon
package core.storage.store.impl.cache
import java.util.UUID
import akka.Done
import akka.http.scaladsl.marshalling.Marshaller
import akka.http.scaladsl.unmarshalling.Unmarshaller
import akka.stream.scaladsl.Sink
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.storage.store.impl.InMemoryTestClass1Serialization
import mesosphere.marathon.core.storage.store.impl.memory.InMemoryPersistenceStore
import mesosphere.marathon.core.storage.store.impl.zk.{ ZkPersistenceStore, ZkTestClass1Serialization }
import mesosphere.marathon.core.storage.store.{ IdResolver, PersistenceStoreTest, TestClass1 }
import mesosphere.marathon.integration.setup.ZookeeperServerTest
import mesosphere.marathon.storage.store.InMemoryStoreSerialization
import mesosphere.marathon.test.SettableClock
import scala.concurrent.duration._
class LazyCachingPersistenceStoreTest extends AkkaUnitTest
with PersistenceStoreTest with ZkTestClass1Serialization with ZookeeperServerTest
with InMemoryStoreSerialization with InMemoryTestClass1Serialization {
private def cachedInMemory = {
val store = LazyCachingPersistenceStore(new InMemoryPersistenceStore())
store.markOpen()
store
}
private def withLazyVersionCaching = {
val store = LazyVersionCachingPersistentStore(new InMemoryPersistenceStore())
store.markOpen()
store
}
private def cachedZk = {
val root = UUID.randomUUID().toString
val client = zkClient(namespace = Some(root))
val store = LazyCachingPersistenceStore(new ZkPersistenceStore(client, Duration.Inf, 8))
store.markOpen()
store
}
behave like basicPersistenceStore("LazyCache(InMemory)", cachedInMemory)
behave like basicPersistenceStore("LazyCache(Zk)", cachedZk)
behave like basicPersistenceStore("LazyVersionedCache(Zk)", withLazyVersionCaching)
// TODO: Mock out the backing store.
behave like cachingPersistenceStore("cache internals(InMemory)", withLazyVersionCaching)
def cachingPersistenceStore[K, C, Serialized](
name: String,
newStore: => LazyVersionCachingPersistentStore[K, C, Serialized])(
implicit
ir: IdResolver[String, TestClass1, C, K],
m: Marshaller[TestClass1, Serialized],
um: Unmarshaller[Serialized, TestClass1]): Unit = {
name should {
"purge the cache appropriately" in {
implicit val clock = new SettableClock()
val store = newStore
1.to(100).foreach { i =>
val obj = TestClass1("abc", i)
clock.plus(1.second)
store.store("task-1", obj).futureValue should be(Done)
}
store.versionedValueCache.size should be(100) // sanity
store.maybePurgeCachedVersions(maxEntries = 50, purgeCount = 10)
store.versionedValueCache.size > 40 should be(true)
store.versionedValueCache.size <= 50 should be(true)
}
"caches versions independently" in {
implicit val clock = new SettableClock()
val store = newStore
val original = TestClass1("abc", 1)
clock.plus(1.minute)
val updated = TestClass1("def", 2)
store.store("task-1", original).futureValue should be(Done)
store.store("task-1", updated).futureValue should be(Done)
store.store("task-1", updated).futureValue should be(Done) // redundant store should not lead to dup data
val storageId = ir.toStorageId("task-1", None)
val cacheKey = (ir.category, storageId)
store.versionedValueCache.size should be(2)
store.versionedValueCache((storageId, original.version)) should be(Some(original))
store.versionedValueCache((storageId, updated.version)) should be(Some(updated))
}
"invalidates all cached versions upon deletion" in {
implicit val clock = new SettableClock()
val store = newStore
val original = TestClass1("abc", 1)
clock.plus(1.minute)
val updated = TestClass1("def", 2)
store.store("task-1", original).futureValue should be(Done)
store.store("task-1", updated).futureValue should be(Done)
store.deleteVersion("task-1", original.version).futureValue should be(Done)
val storageId = ir.toStorageId("task-1", None)
val cacheKey = (ir.category, storageId)
store.versionCache.size should be(0)
store.versionedValueCache.size should be(0)
}
"reload versionCache upon versions request" in {
implicit val clock = new SettableClock()
val store = newStore
val original = TestClass1("abc", 1)
clock.plus(1.minute)
val updated = TestClass1("def", 2)
store.store("task-1", original).futureValue should be(Done)
store.store("task-1", updated).futureValue should be(Done)
store.versionCache.clear()
store.versionedValueCache.clear()
store.versions("task-1").runWith(Sink.seq).futureValue should contain
theSameElementsAs(Seq(original.version, updated.version))
store.versionedValueCache.size should be(0)
}
"reload versionedValueCache upon versioned get requests" in {
implicit val clock = new SettableClock()
val store = newStore
val original = TestClass1("abc", 1)
clock.plus(1.minute)
val updated = TestClass1("def", 2)
store.store("task-1", original).futureValue should be(Done)
store.store("task-1", updated).futureValue should be(Done)
store.versionCache.clear()
store.versionedValueCache.clear()
store.get("task-1", original.version).futureValue should be(Some(original)) // sanity check
val storageId = ir.toStorageId("task-1", None)
store.versionedValueCache.size should be(1)
store.versionedValueCache.contains((storageId, original.version)) should be(true)
store.versionCache.size should be(0)
}
"reload versionedValueCache upon unversioned get requests" in {
implicit val clock = new SettableClock()
val store = newStore
val original = TestClass1("abc", 1)
clock.plus(1.minute)
val updated = TestClass1("def", 2)
store.store("task-1", original).futureValue should be(Done)
store.store("task-1", updated).futureValue should be(Done)
store.versionCache.clear()
store.versionedValueCache.clear()
store.get("task-1").futureValue should be(Some(updated)) // sanity check
val storageId = ir.toStorageId("task-1", None)
store.versionedValueCache.size should be(1)
store.versionedValueCache.contains((storageId, updated.version)) should be(true)
}
"versions available in the persistence store are cached correctly" in {
implicit val clock = new SettableClock()
val store = newStore
val underlying = store.store
// 1 version available in the cache and 2 in the underlying store
store.store("test", TestClass1("abc", 1)).futureValue should be(Done)
clock.plus(1.minute)
underlying.store("test", TestClass1("abc", 2)).futureValue should be(Done)
clock.plus(1.minute)
underlying.store("test", TestClass1("abc", 3)).futureValue should be(Done)
store.versionCache.size should be(0)
// a call to versions will update the cache
store.versions("test").runWith(Sink.seq).futureValue should have size 3
store.versionCache should have size 1
store.versionCache((ir.category, ir.toStorageId("test", None))) should have size 3
}
}
}
}
| janisz/marathon | src/test/scala/mesosphere/marathon/core/storage/store/impl/cache/LazyCachingPersistenceStoreTest.scala | Scala | apache-2.0 | 7,548 |
import java.io.{ OutputStream, PrintStream }
trait T {
val text: String
val stream = new PrintStream(new OutputStream {
def write(b: Int) =
if b != 13 then // this filters out \\r on Windows
Console.println(s"text: $b")
}) {
override def println(x: Any) = ???
}
}
@main def Test =
val t = new T { val text = "hello" }
t.stream.write(22)
t.stream.println('A')
| dotty-staging/dotty | tests/run/i8425.scala | Scala | apache-2.0 | 396 |
package ml.sparkling.graph.api.operators.algorithms.community
import org.apache.spark.graphx.Graph
import scala.reflect.ClassTag
/**
* Created by Roman Bartusiak (roman.bartusiak@pwr.edu.pl http://riomus.github.io).
*/
object CommunityDetection {
type ComponentID=Long
trait CommunityDetectionAlgorithm {
def detectCommunities[VD:ClassTag,ED:ClassTag](graph:Graph[VD,ED]):Graph[ComponentID,ED]
}
type CommunityDetectionMethod[VD,ED]=(Graph[VD,ED])=>Graph[ComponentID,ED]
}
| sparkling-graph/sparkling-graph | api/src/main/scala/ml/sparkling/graph/api/operators/algorithms/community/CommunityDetection.scala | Scala | bsd-2-clause | 493 |
// Solution-9.scala
// Solution to Exercise 9 in "Vectors"
import com.atomicscala.AtomicTest._
val rangeSum = Range.inclusive(0, 10).sum
rangeSum is 55
/* OUTPUT_SHOULD_BE
55
*/
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/18_Vectors/Solution-9.scala | Scala | apache-2.0 | 180 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.theguide.text
import net.minecraft.util.ResourceLocation
import net.minecraft.client.gui.FontRenderer
import org.lwjgl.opengl.GL11
import com.castlebravostudios.theguide.gui.TheGuideGui
case class Link( target : ResourceLocation, startX : Int, endX : Int )
trait RenderableElement {
def height( calc: TextSizeCalculator ) : Int = calc.textHeight + 1
def render( x : Int, y : Int, renderer : FontRenderer ) : Unit
def clicked( x : Int, y : Int, gui : TheGuideGui ) : Unit = ()
val color = 0x404040
}
case class RenderableHeader( text: String, level : Int ) extends RenderableElement {
val formattedText = level match {
case 1 => "§l§n" + text + "§r"
case _ => "§n" + text + "§r"
}
def render( x: Int, y : Int, renderer: FontRenderer ) : Unit = {
val textWidth = renderer.getStringWidth(formattedText)
val horizOffset = ( 190 - textWidth ) / 2
renderer.drawString(formattedText, x + horizOffset, y, color)
}
}
case class TextLine( text : String, links: Set[Link] ) extends RenderableElement {
def render(x : Int, y : Int, renderer : FontRenderer ) : Unit =
renderer.drawString(text, x, y, color)
override def clicked( x : Int, y : Int, gui : TheGuideGui ) : Unit =
links.find( link => link.startX < x && x < link.endX )
.foreach( link => gui.loadPage( link.target ) )
}
case object BlankLine extends RenderableElement {
def render(x : Int, y : Int, renderer : FontRenderer ) : Unit = ()
}
| Redattack34/TheGuide | src/main/scala/com/castlebravostudios/theguide/text/RenderableElement.scala | Scala | bsd-3-clause | 3,085 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.core.output.types.jdbc
import java.sql.PreparedStatement
import com.bwsw.sj.common.engine.core.output.EntityBuilder
import com.bwsw.sj.engine.core.output.types.jdbc._
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by diryavkin_dn on 07.03.17.
*/
class JdbcCommandBuilderTests extends FlatSpec with Matchers {
it should "work properly" in {
val jdbcMock = new JdbcMock()
val eb = new EntityBuilder[(PreparedStatement, Int) => Unit]()
val e = eb
.field(new LongField("id"))
.field(new JavaStringField("name"))
.field(new IntegerField("age"))
.field(new BooleanField("married", false))
.build()
val jdbccb = new JdbcCommandBuilder(jdbcMock, "txn", e)
val data = Map[String, Object]().empty +
("id" -> new java.lang.Long(0)) +
("name" -> "John Smith") +
("age" -> new java.lang.Integer(32)) +
("married" -> new java.lang.Boolean(true))
jdbccb.buildInsert(1, data).isInstanceOf[PreparedStatement] shouldBe true
}
it should "exists work" in {
val jdbcMock = new JdbcMock()
val eb = new EntityBuilder[(PreparedStatement, Int) => Unit]()
val e = eb
.field(new LongField("id"))
.field(new JavaStringField("name"))
.field(new IntegerField("age"))
.field(new BooleanField("married", false))
.build()
val jdbccb = new JdbcCommandBuilder(jdbcMock, "txn", e)
jdbccb.exists(1).isInstanceOf[PreparedStatement] shouldBe true
}
}
| bwsw/sj-platform | core/sj-engine-core/src/test/scala-2.12/com/bwsw/sj/engine/core/output/types/jdbc/JdbcCommandBuilderTests.scala | Scala | apache-2.0 | 2,307 |
package one.lockstep.monolock
import one.lockstep.monolock.protocol._
import one.lockstep.util._
import one.lockstep.util.crypto._
import one.lockstep.util.streams._
import one.lockstep.util.protocol._
import akka.NotUsed
import akka.stream.scaladsl._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
object DecapsulationStage {
private case class ExchangeState(ephemeralKey: SecretKey, version: Version)
def apply(transportSecurity: TransportSecurityModule,
decapsulationParallelism: Int = 1, encapsulationParallelism: Int = 1)
(implicit executionContext: ExecutionContext)
: BidiFlow[(Long, Try[Bytes]), (Long, Try[Bytes]), (Long, Try[Bytes]), (Long, Try[Bytes]), NotUsed] = {
implicit val ciphersuite = transportSecurity.transportCiphersuite
def decapsulateRequest(request: Bytes): (Bytes, ExchangeState) = {
val version = Preface.decode(request).version
val encrypted: EncryptedRequest = Protocol.decode[EncryptedRequest](request)
val ephemeralKey = transportSecurity.decap(encrypted.publicKeyFingerprint, encrypted.encapsulatedSecretKey)
val state = ExchangeState(ephemeralKey, version)
val decapsulated = ciphersuite.authCipher.decrypt(ephemeralKey)(Bytes.empty, encrypted.iv, encrypted.ciphertext, encrypted.tag)
(decapsulated, state)
}
def encapsulateResponse(response: Bytes, state: ExchangeState): Bytes = {
val iv = ciphersuite.authCipher.iv()
val (ciphertext, tag) = ciphersuite.authCipher.encrypt(state.ephemeralKey)(Bytes.empty, iv, response)
val encrypted = EncryptedResponse(iv, ciphertext, tag)
Protocol.encode[EncryptedResponse](state.version, encrypted)
}
val incoming: Flow[(Long, Try[Bytes]), (Long, Try[(Bytes, ExchangeState)]), NotUsed] =
Flow[(Long, Try[Bytes])].mapAsync(decapsulationParallelism) {
case (id, Failure(cause)) => Future.successful(id, Failure(cause))
case (id, Success(input)) => Future((id, Try(decapsulateRequest(input))))
}
val outgoing: Flow[(Long, Try[(Bytes, ExchangeState)]), (Long, Try[Bytes]), NotUsed] =
Flow[(Long, Try[(Bytes, ExchangeState)])].mapAsync(encapsulationParallelism) {
case (id, Failure(cause)) => Future.successful(id, Failure(cause))
case (id, Success((output, state))) => Future((id, Try(encapsulateResponse(output, state))))
}
StateCorrelator.fromFlowsTry(incoming, outgoing)
}
}
| lockstep-one/vault | monolock-server/src/main/scala/one/lockstep/monolock/DecapsulationStage.scala | Scala | agpl-3.0 | 2,480 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.types.{DataType, IntegerType}
class SubexpressionEliminationSuite extends SparkFunSuite {
test("Semantic equals and hash") {
val a: AttributeReference = AttributeReference("name", IntegerType)()
val id = {
// Make sure we use a "ExprId" different from "a.exprId"
val _id = ExprId(1)
if (a.exprId == _id) ExprId(2) else _id
}
val b1 = a.withName("name2").withExprId(id)
val b2 = a.withExprId(id)
val b3 = a.withQualifier(Seq("qualifierName"))
assert(b1 != b2)
assert(a != b1)
assert(b1.semanticEquals(b2))
assert(!b1.semanticEquals(a))
assert(a.hashCode != b1.hashCode)
assert(b1.hashCode != b2.hashCode)
assert(b1.semanticHash() == b2.semanticHash())
assert(a != b3)
assert(a.hashCode != b3.hashCode)
assert(a.semanticEquals(b3))
}
test("Expression Equivalence - basic") {
val equivalence = new EquivalentExpressions
assert(equivalence.getAllEquivalentExprs.isEmpty)
val oneA = Literal(1)
val oneB = Literal(1)
val twoA = Literal(2)
var twoB = Literal(2)
assert(equivalence.getEquivalentExprs(oneA).isEmpty)
assert(equivalence.getEquivalentExprs(twoA).isEmpty)
// Add oneA and test if it is returned. Since it is a group of one, it does not.
assert(!equivalence.addExpr(oneA))
assert(equivalence.getEquivalentExprs(oneA).size == 1)
assert(equivalence.getEquivalentExprs(twoA).isEmpty)
assert(equivalence.addExpr((oneA)))
assert(equivalence.getEquivalentExprs(oneA).size == 2)
// Add B and make sure they can see each other.
assert(equivalence.addExpr(oneB))
// Use exists and reference equality because of how equals is defined.
assert(equivalence.getEquivalentExprs(oneA).exists(_ eq oneB))
assert(equivalence.getEquivalentExprs(oneA).exists(_ eq oneA))
assert(equivalence.getEquivalentExprs(oneB).exists(_ eq oneA))
assert(equivalence.getEquivalentExprs(oneB).exists(_ eq oneB))
assert(equivalence.getEquivalentExprs(twoA).isEmpty)
assert(equivalence.getAllEquivalentExprs.size == 1)
assert(equivalence.getAllEquivalentExprs.head.size == 3)
assert(equivalence.getAllEquivalentExprs.head.contains(oneA))
assert(equivalence.getAllEquivalentExprs.head.contains(oneB))
val add1 = Add(oneA, oneB)
val add2 = Add(oneA, oneB)
equivalence.addExpr(add1)
equivalence.addExpr(add2)
assert(equivalence.getAllEquivalentExprs.size == 2)
assert(equivalence.getEquivalentExprs(add2).exists(_ eq add1))
assert(equivalence.getEquivalentExprs(add2).size == 2)
assert(equivalence.getEquivalentExprs(add1).exists(_ eq add2))
}
test("Expression Equivalence - Trees") {
val one = Literal(1)
val two = Literal(2)
val add = Add(one, two)
val abs = Abs(add)
val add2 = Add(add, add)
var equivalence = new EquivalentExpressions
equivalence.addExprTree(add)
equivalence.addExprTree(abs)
equivalence.addExprTree(add2)
// Should only have one equivalence for `one + two`
assert(equivalence.getAllEquivalentExprs.count(_.size > 1) == 1)
assert(equivalence.getAllEquivalentExprs.filter(_.size > 1).head.size == 4)
// Set up the expressions
// one * two,
// (one * two) * (one * two)
// sqrt( (one * two) * (one * two) )
// (one * two) + sqrt( (one * two) * (one * two) )
equivalence = new EquivalentExpressions
val mul = Multiply(one, two)
val mul2 = Multiply(mul, mul)
val sqrt = Sqrt(mul2)
val sum = Add(mul2, sqrt)
equivalence.addExprTree(mul)
equivalence.addExprTree(mul2)
equivalence.addExprTree(sqrt)
equivalence.addExprTree(sum)
// (one * two), (one * two) * (one * two) and sqrt( (one * two) * (one * two) ) should be found
assert(equivalence.getAllEquivalentExprs.count(_.size > 1) == 3)
assert(equivalence.getEquivalentExprs(mul).size == 3)
assert(equivalence.getEquivalentExprs(mul2).size == 3)
assert(equivalence.getEquivalentExprs(sqrt).size == 2)
assert(equivalence.getEquivalentExprs(sum).size == 1)
}
test("Expression equivalence - non deterministic") {
val sum = Add(Rand(0), Rand(0))
val equivalence = new EquivalentExpressions
equivalence.addExpr(sum)
equivalence.addExpr(sum)
assert(equivalence.getAllEquivalentExprs.isEmpty)
}
test("Children of CodegenFallback") {
val one = Literal(1)
val two = Add(one, one)
val fallback = CodegenFallbackExpression(two)
val add = Add(two, fallback)
val equivalence = new EquivalentExpressions
equivalence.addExprTree(add)
// the `two` inside `fallback` should not be added
assert(equivalence.getAllEquivalentExprs.count(_.size > 1) == 0)
assert(equivalence.getAllEquivalentExprs.count(_.size == 1) == 3) // add, two, explode
}
test("Children of conditional expressions: If") {
val add = Add(Literal(1), Literal(2))
val condition = GreaterThan(add, Literal(3))
val ifExpr1 = If(condition, add, add)
val equivalence1 = new EquivalentExpressions
equivalence1.addExprTree(ifExpr1)
// `add` is in both two branches of `If` and predicate.
assert(equivalence1.getAllEquivalentExprs.count(_.size == 2) == 1)
assert(equivalence1.getAllEquivalentExprs.filter(_.size == 2).head == Seq(add, add))
// one-time expressions: only ifExpr and its predicate expression
assert(equivalence1.getAllEquivalentExprs.count(_.size == 1) == 2)
assert(equivalence1.getAllEquivalentExprs.filter(_.size == 1).contains(Seq(ifExpr1)))
assert(equivalence1.getAllEquivalentExprs.filter(_.size == 1).contains(Seq(condition)))
// Repeated `add` is only in one branch, so we don't count it.
val ifExpr2 = If(condition, Add(Literal(1), Literal(3)), Add(add, add))
val equivalence2 = new EquivalentExpressions
equivalence2.addExprTree(ifExpr2)
assert(equivalence2.getAllEquivalentExprs.count(_.size > 1) == 0)
assert(equivalence2.getAllEquivalentExprs.count(_.size == 1) == 3)
val ifExpr3 = If(condition, ifExpr1, ifExpr1)
val equivalence3 = new EquivalentExpressions
equivalence3.addExprTree(ifExpr3)
// `add`: 2, `condition`: 2
assert(equivalence3.getAllEquivalentExprs.count(_.size == 2) == 2)
assert(equivalence3.getAllEquivalentExprs.filter(_.size == 2).contains(Seq(add, add)))
assert(
equivalence3.getAllEquivalentExprs.filter(_.size == 2).contains(Seq(condition, condition)))
// `ifExpr1`, `ifExpr3`
assert(equivalence3.getAllEquivalentExprs.count(_.size == 1) == 2)
assert(equivalence3.getAllEquivalentExprs.filter(_.size == 1).contains(Seq(ifExpr1)))
assert(equivalence3.getAllEquivalentExprs.filter(_.size == 1).contains(Seq(ifExpr3)))
}
test("Children of conditional expressions: CaseWhen") {
val add1 = Add(Literal(1), Literal(2))
val add2 = Add(Literal(2), Literal(3))
val conditions1 = (GreaterThan(add2, Literal(3)), add1) ::
(GreaterThan(add2, Literal(4)), add1) ::
(GreaterThan(add2, Literal(5)), add1) :: Nil
val caseWhenExpr1 = CaseWhen(conditions1, None)
val equivalence1 = new EquivalentExpressions
equivalence1.addExprTree(caseWhenExpr1)
// `add2` is repeatedly in all conditions.
assert(equivalence1.getAllEquivalentExprs.count(_.size == 2) == 1)
assert(equivalence1.getAllEquivalentExprs.filter(_.size == 2).head == Seq(add2, add2))
val conditions2 = (GreaterThan(add1, Literal(3)), add1) ::
(GreaterThan(add2, Literal(4)), add1) ::
(GreaterThan(add2, Literal(5)), add1) :: Nil
val caseWhenExpr2 = CaseWhen(conditions2, None)
val equivalence2 = new EquivalentExpressions
equivalence2.addExprTree(caseWhenExpr2)
// `add1` is repeatedly in all branch values, and first predicate.
assert(equivalence2.getAllEquivalentExprs.count(_.size == 2) == 1)
assert(equivalence2.getAllEquivalentExprs.filter(_.size == 2).head == Seq(add1, add1))
// Negative case. `add1` or `add2` is not commonly used in all predicates/branch values.
val conditions3 = (GreaterThan(add1, Literal(3)), add2) ::
(GreaterThan(add2, Literal(4)), add1) ::
(GreaterThan(add2, Literal(5)), add1) :: Nil
val caseWhenExpr3 = CaseWhen(conditions3, None)
val equivalence3 = new EquivalentExpressions
equivalence3.addExprTree(caseWhenExpr3)
assert(equivalence3.getAllEquivalentExprs.count(_.size == 2) == 0)
}
test("Children of conditional expressions: Coalesce") {
val add1 = Add(Literal(1), Literal(2))
val add2 = Add(Literal(2), Literal(3))
val conditions1 = GreaterThan(add2, Literal(3)) ::
GreaterThan(add2, Literal(4)) ::
GreaterThan(add2, Literal(5)) :: Nil
val coalesceExpr1 = Coalesce(conditions1)
val equivalence1 = new EquivalentExpressions
equivalence1.addExprTree(coalesceExpr1)
// `add2` is repeatedly in all conditions.
assert(equivalence1.getAllEquivalentExprs.count(_.size == 2) == 1)
assert(equivalence1.getAllEquivalentExprs.filter(_.size == 2).head == Seq(add2, add2))
// Negative case. `add1` and `add2` both are not used in all branches.
val conditions2 = GreaterThan(add1, Literal(3)) ::
GreaterThan(add2, Literal(4)) ::
GreaterThan(add2, Literal(5)) :: Nil
val coalesceExpr2 = Coalesce(conditions2)
val equivalence2 = new EquivalentExpressions
equivalence2.addExprTree(coalesceExpr2)
assert(equivalence2.getAllEquivalentExprs.count(_.size == 2) == 0)
}
}
case class CodegenFallbackExpression(child: Expression)
extends UnaryExpression with CodegenFallback {
override def dataType: DataType = child.dataType
}
| witgo/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala | Scala | apache-2.0 | 10,614 |
package wookie.view
import java.lang.{Boolean => JBoolean}
import java.util.function.{Function => JFunction}
import scala.concurrent.Promise
import scala.util.Random
trait WhenPageLoaded {
def apply()(implicit e: PageDoneEvent)
}
/**
* @author Andrey Chaschev chaschev@gmail.com
*/
class WaitArg(var name: String = "", val wookie: WookieView){
// var predicate:Option[((String, String, NavArg) => Boolean)] = None
var timeoutMs: Option[Int] = Some(30000)
private[this] var handler: Option[WhenPageLoaded] = None
var async: Boolean = true
var eventFilter: Option[(WookiePageStateChangedEvent) => Boolean] = Some(e => e.isInstanceOf[PageReadyEvent])
val eventId: Int = Random.nextInt() //currently not really used
var startedAtMs: Long = -1
var location: Option[String] = if(name.equals("")) None else Some(name)
private[this] var navigationMatcher: NavigationMatcher = NextPageReadyMatcher
def timeoutNone(): WaitArg = {this.timeoutMs = None; this}
def timeoutMs(i: Int): WaitArg = {this.timeoutMs = Some(i); this}
def getTimeoutMs: Int = timeoutMs.getOrElse(wookie.options.defaultTimeoutMs)
def timeoutSec(sec:Int): WaitArg = {this.timeoutMs = Some(sec * 1000);this}
def whenLoaded(whenPageLoaded:WhenPageLoaded): WaitArg = {this.handler = Some(whenPageLoaded); this}
def async(b: Boolean): WaitArg = {this.async = b; this}
def withName(n: String): WaitArg = {this.name = n; this}
def withMatcher(matcher: NavigationMatcher): WaitArg = {
navigationMatcher = matcher; this
}
def matchByAddress(p: (String) => Boolean): WaitArg =
withMatcher(new LocationMatcher(p))
def matchByAddress(p: JFunction[String, JBoolean]): WaitArg =
withMatcher(new LocationMatcher(p.apply))
def matchIfPageReady(): WaitArg = {
this.navigationMatcher = NextPageReadyMatcher; this
}
def filterEvents(eventFilter: (WookiePageStateChangedEvent) => Boolean): WaitArg = {this.eventFilter = Some(eventFilter); this}
private[view] def acceptsEvent(e: WookiePageStateChangedEvent): Boolean = {
if(eventFilter.isDefined)
eventFilter.get.apply(e)
else
true
}
def matchByPredicate(p:((WookieNavigationEvent, WaitArg) => Boolean)):WaitArg = {
this.navigationMatcher = new PredicateMatcher(p); this
}
def location(_s: String): WaitArg = {this.location = Some(_s); this}
def matcher = navigationMatcher
protected[wookie] def handleIfDefined(e: PageDoneEvent) = if(this.handler.isDefined) this.handler.get.apply()(e)
//todo make package local
protected[wookie] def startedAtMs(t: Long): WaitArg = {this.startedAtMs = t; this}
def isDue =
startedAtMs + getTimeoutMs < System.currentTimeMillis()
def toNavigationRecord:NavigationRecord = {
new NavigationRecord(this, Promise[PageDoneEvent]())
}
override def toString: String = {
if(!name.isEmpty) {
s"NavArg{'$name'}"
} else
if(location.isDefined){
s"NavArg{'${location.get}'}"
}
else {
super.toString
}
}
}
| chaschev/wookie-view | src/main/scala/wookie/view/WaitArg.scala | Scala | apache-2.0 | 3,013 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.model.partitions
import com.flaminem.flamy.model.PartitionColumn
import scala.collection.mutable.ListBuffer
import scala.collection.{SeqLike, mutable}
/**
* Created by fpin on 1/29/15.
*/
class Partition(override val columns: Seq[PartitionColumn])
extends TPartition
with SeqLike[PartitionColumn, Partition]
{
override def newBuilderImpl: mutable.Builder[PartitionColumn, Partition] = {
new ListBuffer[PartitionColumn] mapResult (x => new Partition(x))
}
override def newBuilder: mutable.Builder[PartitionColumn, Partition] = {
newBuilderImpl
}
def this(partitionName: String) {
this(
partitionName.split("/").map {
s =>
val a = s.split("=")
new PartitionColumn(a(0), Option(a(1)))
}
)
}
}
| flaminem/flamy | src/main/scala/com/flaminem/flamy/model/partitions/Partition.scala | Scala | apache-2.0 | 1,363 |
package com.twitter.finagle.mux.transport
import com.twitter.finagle.mux.ContextCodec
import com.twitter.finagle.tracing.{Flags, SpanId, TraceId}
import com.twitter.finagle.{Dentry, Dtab, Failure, NameTree, Path}
import com.twitter.io.{Buf, BufByteWriter, ByteReader}
import com.twitter.util.{Duration, Future, Time}
import java.nio.charset.{StandardCharsets => Charsets}
import scala.collection.mutable.ArrayBuffer
/**
* Indicates that encoding or decoding of a Mux message failed.
* Reason for failure should be provided by the `why` string.
*/
case class BadMessageException(why: String) extends Exception(why)
/**
* Documentation details are in the [[com.twitter.finagle.mux]] package object.
*/
private[finagle] sealed trait Message {
/**
* Values should correspond to the constants defined in
* [[com.twitter.finagle.mux.transport.Message.Types]]
*/
def typ: Byte
/** Only 3 of its bytes are used. */
def tag: Int
/**
* The body of the message omitting size, typ, and tag.
*/
def buf: Buf
}
private[finagle] object Message {
object Types {
def isDiscard(typ: Byte): Boolean =
typ == Message.Types.BAD_Tdiscarded ||
typ == Message.Types.Rdiscarded ||
typ == Message.Types.Tdiscarded
// Application messages:
val Treq = 1: Byte
val Rreq = -1: Byte
val Tdispatch = 2: Byte
val Rdispatch = -2: Byte
// Control messages:
val Tdrain = 64: Byte
val Rdrain = -64: Byte
val Tping = 65: Byte
val Rping = -65: Byte
val Tdiscarded = 66: Byte
val Rdiscarded = -66: Byte
val Tlease = 67: Byte
val Tinit = 68: Byte
val Rinit = -68: Byte
val Rerr = -128: Byte
// Old implementation flukes.
val BAD_Tdiscarded = -62: Byte
val BAD_Rerr = 127: Byte
}
object Tags {
val MarkerTag = 0
// We reserve a tag for control messages. This allows us to cache
// control messages and avoid encoding them every time.
val ControlTag = 1
val PingTag = ControlTag
val MinTag = PingTag + 1
val MaxTag = (1 << 23) - 1
val TagMSB = (1 << 23)
def extractType(header: Int): Byte = (header >> 24 & 0xff).toByte
def extractTag(header: Int): Int = header & 0x00ffffff
def isFragment(tag: Int): Boolean = (tag >> 23 & 1) == 1
def setMsb(tag: Int): Int = tag | TagMSB
}
private[mux] object ReplyStatus {
val Ok: Byte = 0
val Error: Byte = 1
val Nack: Byte = 2
}
private def mkByte(b: Byte) = Buf.ByteArray.Owned(Array(b))
private val bufOfChar = Array[Buf](mkByte(0), mkByte(1), mkByte(2))
abstract class EmptyMessage extends Message {
def buf: Buf = Buf.Empty
}
abstract class MarkerMessage extends Message {
def tag = 0
}
/** Marker trait for messages that can be fragmented */
sealed trait Fragmentable extends Message
private object Init {
def encode(version: Short, headers: Seq[(Buf, Buf)]): Buf = {
var size = 2 // 2 bytes for version
var iter = headers.iterator
while (iter.hasNext) {
val (k, v) = iter.next()
// 8 bytes for length encoding of k, v
size += 8 + k.length + v.length
}
val bw = BufByteWriter.fixed(size)
bw.writeShortBE(version)
iter = headers.iterator
while (iter.hasNext) {
iter.next() match {
case (k, v) =>
bw.writeIntBE(k.length)
bw.writeBytes(k)
bw.writeIntBE(v.length)
bw.writeBytes(v)
}
}
bw.owned()
}
def decode(br: ByteReader): (Short, Seq[(Buf, Buf)]) = {
val version = br.readShortBE()
val headers = new ArrayBuffer[(Buf, Buf)]
while (br.remaining > 0) {
val k = br.readBytes(br.readIntBE())
val v = br.readBytes(br.readIntBE())
headers += (k -> v)
}
(version.toShort, headers.toSeq)
}
}
case class Tinit(tag: Int, version: Short, headers: Seq[(Buf, Buf)]) extends Message {
def typ: Byte = Types.Tinit
lazy val buf: Buf = Init.encode(version, headers)
}
case class Rinit(tag: Int, version: Short, headers: Seq[(Buf, Buf)]) extends Message {
def typ: Byte = Types.Rinit
lazy val buf: Buf = Init.encode(version, headers)
}
/**
* A transmit request message.
*
* Note, Treq messages are deprecated in favor of [[Tdispatch]] and will likely
* be removed in a future version of mux.
*
* Treq does not support 128bit TraceId.
*/
case class Treq(tag: Int, traceId: Option[TraceId], req: Buf) extends Message {
import Treq._
def typ = Types.Treq
lazy val buf: Buf = {
val header = traceId match {
// Currently we require the 3-tuple, but this is not
// necessarily required.
case Some(traceId) =>
val hd = BufByteWriter.fixed(1 + 1 + 1 + 24 + 1 + 1 + 1)
hd.writeByte(2) // 2 entries
hd.writeByte(Keys.TraceId) // key 0 (traceid)
hd.writeByte(24) // key 0 size
hd.writeLongBE(traceId.spanId.toLong)
hd.writeLongBE(traceId.parentId.toLong)
hd.writeLongBE(traceId.traceId.toLong)
hd.writeByte(Keys.TraceFlag) // key 1 (traceflag)
hd.writeByte(1) // key 1 size
hd.writeByte(traceId.flags.toLong.toByte)
hd.owned()
case None =>
bufOfChar(0) // 0 keys
}
header.concat(req)
}
}
object Treq {
object Keys {
val TraceId = 1
val TraceFlag = 2
}
}
/**
* A reply to a `Treq` message.
*
* Note, Rreq messages are deprecated in favor of [[Rdispatch]] and will likely
* be removed in a future version of mux.
*/
abstract class Rreq(rreqType: Byte, body: Buf) extends Message {
def typ = Types.Rreq
lazy val buf: Buf = bufOfChar(rreqType).concat(body)
}
case class RreqOk(tag: Int, reply: Buf) extends Rreq(ReplyStatus.Ok, reply)
case class RreqError(tag: Int, error: String) extends Rreq(ReplyStatus.Error, encodeString(error))
case class RreqNack(tag: Int) extends Rreq(ReplyStatus.Nack, Buf.Empty)
private[this] val noBytes = Array.empty[Byte]
case class Tdispatch(tag: Int, contexts: Seq[(Buf, Buf)], dst: Path, dtab: Dtab, req: Buf)
extends Fragmentable {
def typ = Types.Tdispatch
lazy val buf: Buf = {
// first, compute how large the message header is (in 'n')
var n = 2
n += ContextCodec.encodedLength(contexts.iterator)
val dstbytes = if (dst.isEmpty) noBytes else dst.show.getBytes(Charsets.UTF_8)
n += 2 + dstbytes.length
n += 2
val dtabbytes = new Array[(Array[Byte], Array[Byte])](dtab.size)
var dtabidx = 0
var i = 0
while (i < dtab.length) {
val dentry = dtab(i)
val srcbytes = dentry.prefix.show.getBytes(Charsets.UTF_8)
val treebytes = dentry.dst.show.getBytes(Charsets.UTF_8)
n += srcbytes.length + 2 + treebytes.length + 2
dtabbytes(dtabidx) = (srcbytes, treebytes)
dtabidx += 1
i += 1
}
// then, allocate and populate the header
val hd = BufByteWriter.fixed(n)
hd.writeShortBE(contexts.length)
ContextCodec.encode(hd, contexts.iterator)
hd.writeShortBE(dstbytes.length)
hd.writeBytes(dstbytes)
hd.writeShortBE(dtab.size)
dtabidx = 0
while (dtabidx != dtabbytes.length) {
dtabbytes(dtabidx) match {
case (srcbytes, treebytes) =>
hd.writeShortBE(srcbytes.length)
hd.writeBytes(srcbytes)
hd.writeShortBE(treebytes.length)
hd.writeBytes(treebytes)
}
dtabidx += 1
}
hd.owned().concat(req)
}
}
/** A reply to a `Tdispatch` message */
abstract class Rdispatch(status: Byte, contexts: Seq[(Buf, Buf)], body: Buf)
extends Fragmentable {
def typ = Types.Rdispatch
lazy val buf: Buf = {
var n = 1 + 2
var iter = contexts.iterator
while (iter.hasNext) {
iter.next() match {
case (k, v) =>
n += 2 + k.length + 2 + v.length
}
}
val hd = BufByteWriter.fixed(n)
hd.writeByte(status)
hd.writeShortBE(contexts.length)
iter = contexts.iterator
while (iter.hasNext) {
iter.next() match {
case (k, v) =>
hd.writeShortBE(k.length)
hd.writeBytes(k)
hd.writeShortBE(v.length)
hd.writeBytes(v)
}
}
hd.owned().concat(body)
}
}
case class RdispatchOk(tag: Int, contexts: Seq[(Buf, Buf)], reply: Buf)
extends Rdispatch(ReplyStatus.Ok, contexts, reply)
case class RdispatchError(tag: Int, contexts: Seq[(Buf, Buf)], error: String)
extends Rdispatch(ReplyStatus.Error, contexts, encodeString(error))
case class RdispatchNack(tag: Int, contexts: Seq[(Buf, Buf)])
extends Rdispatch(ReplyStatus.Nack, contexts, Buf.Empty)
/**
* A fragment, as defined by the mux spec, is a message with its tag MSB
* set to 1.
*/
case class Fragment(typ: Byte, tag: Int, buf: Buf) extends Message
/** Indicates to the client to stop sending new requests. */
case class Tdrain(tag: Int) extends EmptyMessage { def typ = Types.Tdrain }
/** Response from the client to a `Tdrain` message */
case class Rdrain(tag: Int) extends EmptyMessage { def typ = Types.Rdrain }
/** Used to check liveness */
case class Tping(tag: Int) extends EmptyMessage { def typ = Types.Tping }
/** Representation of messages that we pre-encode for performance reasons */
final class PreEncoded private (val underlying: Message) extends Message {
def typ: Byte = underlying.typ
def tag: Int = underlying.tag
// We coerce bufs to the `ByteArray` form since they are the fastest Buf
// representation and we expect them to be reused heavily.
val buf: Buf = Buf.ByteArray.coerce(underlying.buf)
/**
* Pre-encoded representation of the parent message including type and tag.
* The resulting `Buf` is identical in content to `Message.encode(parentMessage)`.
*/
val encodedBuf: Buf = Buf.ByteArray.coerce(encode(underlying))
override def toString: String = underlying.toString
}
/**
* We pre-encode a ping messages with the reserved ping tag
* (PingTag) in order to avoid re-encoding these frequently sent
* messages.
*/
object PreEncoded {
val Tping: PreEncoded = new PreEncoded(Message.Tping(Tags.PingTag))
val Rping: PreEncoded = new PreEncoded(Message.Rping(Tags.PingTag))
val FutureRping: Future[Message] = Future.value(Rping)
}
/** Response to a `Tping` message */
case class Rping(tag: Int) extends EmptyMessage { def typ = Types.Rping }
/** Indicates that the corresponding T message produced an error. */
case class Rerr(tag: Int, error: String) extends Message {
// Use the old Rerr type in a transition period so that we
// can be reasonably sure we remain backwards compatible with
// old servers.
def typ = Types.BAD_Rerr
lazy val buf: Buf = encodeString(error)
}
/**
* Indicates that the `Treq` with the tag indicated by `which` has been discarded
* by the client.
*/
case class Tdiscarded(which: Int, why: String)
// Use the old Tdiscarded type in a transition period so that we
// can be reasonably sure we remain backwards compatible with
// old servers.
extends MarkerMessage {
def typ = Types.BAD_Tdiscarded
lazy val buf: Buf = {
val arr =
Array[Byte]((which >> 16 & 0xff).toByte, (which >> 8 & 0xff).toByte, (which & 0xff).toByte)
Buf.ByteArray.Owned(arr).concat(encodeString(why))
}
}
case class Rdiscarded(tag: Int) extends Message {
def typ = Types.Rdiscarded
def buf: Buf = Buf.Empty
}
object Tlease {
val MinLease = Duration.Zero
val MaxLease = Duration.fromMilliseconds((1L << 32) - 1) // Unsigned Int max value
val MillisDuration: Byte = 0
def apply(howLong: Duration): Tlease = {
require(howLong >= MinLease && howLong <= MaxLease, "lease out of range")
Tlease(MillisDuration, howLong.inMilliseconds)
}
def apply(end: Time): Tlease = Tlease(1, end.sinceEpoch.inMilliseconds)
}
case class Tlease(unit: Byte, howLong: Long) extends MarkerMessage {
def typ = Types.Tlease
lazy val buf: Buf = {
val bw = BufByteWriter.fixed(9)
bw.writeByte(unit)
bw.writeLongBE(howLong)
bw.owned()
}
}
object Tmessage {
def unapply(m: Message): Option[Int] =
if (m.typ > 0) Some(m.tag)
else None
}
object Rmessage {
def unapply(m: Message): Option[Int] =
if (isRmessage(m)) Some(m.tag)
else None
def isRmessage(m: Message): Boolean = m.typ < 0
}
object ControlMessage {
// TODO: Update this extractor in the event that we "fix" the control
// message flukes by removing backwards compatibility.
def unapply(m: Message): Option[Int] =
if (math.abs(m.typ) >= 64 || m.typ == Types.BAD_Tdiscarded)
Some(m.tag)
else None
}
def decodeUtf8(buf: Buf): String = buf match {
case Buf.Utf8(str) => str
}
def encodeString(str: String): Buf = Buf.Utf8(str)
private def decodeTreq(tag: Int, br: ByteReader): Treq = {
if (br.remaining < 1)
throwBadMessageException("short Treq")
var nkeys = br.readByte().toInt
if (nkeys < 0)
throwBadMessageException("Treq: too many keys")
var trace3: Option[(SpanId, SpanId, SpanId)] = None
var traceFlags = 0L
while (nkeys > 0) {
if (br.remaining < 2)
throwBadMessageException("short Treq (header)")
val key = br.readByte()
val vsize = br.readByte().toInt match {
case s if s < 0 => s + 256
case s => s
}
if (br.remaining < vsize)
throwBadMessageException("short Treq (vsize)")
// TODO: technically we should probably check for duplicate
// keys, but for now, just pick the latest one.
key match {
// NOTE: Treq is deprecated and therefore won't support 128bit TraceID. see Tdispatch/Rdispatch.
case Treq.Keys.TraceId =>
if (vsize != 24)
throwBadMessageException(s"bad traceid size $vsize")
trace3 = Some(
(
SpanId(br.readLongBE()), // spanId
SpanId(br.readLongBE()), // parentId
SpanId(br.readLongBE())
) // traceId
)
case Treq.Keys.TraceFlag =>
// We only know about bit=0, so discard
// everything but the last byte
if (vsize > 1)
br.readBytes(vsize - 1)
if (vsize > 0)
traceFlags = br.readByte().toLong
case _ =>
// discard:
br.readBytes(vsize)
}
nkeys -= 1
}
val id = trace3 match {
case Some((spanId, parentId, traceId)) =>
Some(TraceId(Some(traceId), Some(parentId), spanId, None, Flags(traceFlags)))
case None => None
}
Treq(tag, id, br.readAll())
}
private def decodeTdispatch(tag: Int, br: ByteReader) = {
val contexts = {
val n = br.readShortBE()
if (n == 0) Nil
else ContextCodec.decode(br, n)
}
val ndst = br.readShortBE()
// Path.read("") fails, so special case empty-dst.
val dst =
if (ndst == 0) Path.empty
else Path.read(decodeUtf8(br.readBytes(ndst)))
val nd = br.readShortBE()
val dtab =
if (nd == 0) Dtab.empty
else {
var i = 0
val delegations = new Array[Dentry](nd)
while (i < nd) {
val src = decodeUtf8(br.readBytes(br.readShortBE()))
val dst = decodeUtf8(br.readBytes(br.readShortBE()))
delegations(i) = Dentry(Path.read(src), NameTree.read(dst))
i += 1
}
Dtab(delegations)
}
Tdispatch(tag, contexts, dst, dtab, br.readAll())
}
private def decodeRdispatch(tag: Int, br: ByteReader) = {
val status = br.readByte()
val contexts = {
val n = br.readShortBE()
if (n == 0) Nil
else ContextCodec.decode(br, n)
}
val rest = br.readAll()
status match {
case ReplyStatus.Ok => RdispatchOk(tag, contexts, rest)
case ReplyStatus.Error => RdispatchError(tag, contexts, decodeUtf8(rest))
case ReplyStatus.Nack => RdispatchNack(tag, contexts)
case _ => throwBadMessageException("invalid Rdispatch status")
}
}
private def decodeRreq(tag: Int, br: ByteReader) = {
if (br.remaining < 1)
throwBadMessageException("short Rreq")
val status = br.readByte()
val rest = br.readAll()
status match {
case ReplyStatus.Ok => RreqOk(tag, rest)
case ReplyStatus.Error => RreqError(tag, decodeUtf8(rest))
case ReplyStatus.Nack => RreqNack(tag)
case _ => throwBadMessageException("invalid Rreq status")
}
}
private def decodeTdiscarded(br: ByteReader) = {
if (br.remaining < 3)
throwBadMessageException("short Tdiscarded message")
val which = ((br.readByte() & 0xff) << 16) |
((br.readByte() & 0xff) << 8) |
(br.readByte() & 0xff)
Tdiscarded(which, decodeUtf8(br.readAll()))
}
private def decodeTlease(br: ByteReader) = {
if (br.remaining < 9)
throwBadMessageException("short Tlease message")
val unit: Byte = br.readByte()
val howMuch: Long = br.readLongBE()
Tlease(unit, howMuch)
}
/**
* Try to decode a `buf` to [[Message]]. If [[Buf]] is backed
* by a direct buffer then that buffer will be released after decode.
*
* @note may throw a [[Failure]] wrapped [[BadMessageException]]
*/
def decode(buf: Buf): Message = {
val br = ByteReader(buf)
try decode(br)
finally br.close()
}
/**
* Try to decode the contents of a `ByteReader` to [[Message]]. This function
* assumes the content of the `ByteReader` represents exactly one message.
*
* @note This function _does not_ assume ownership of the passed `ByteReader`
* and it is up to the caller to release the underlying resources.
*
* @note may throw a [[Failure]] wrapped [[BadMessageException]]
*/
def decode(byteReader: ByteReader): Message = {
if (byteReader.remaining < 4)
throwBadMessageException("short message: " + Buf.slowHexString(byteReader.readAll()))
val head = byteReader.readIntBE()
val typ = Tags.extractType(head)
val tag = Tags.extractTag(head)
decodeMessageBody(typ, tag, byteReader)
}
/**
* Try to decode the contents of the `ByteReader` to a [[Message]]. The 4-byte
* header that represents the tag and type must already be stripped and that information
* is provided as function arguments.
*
* @note This function _does not_ assume ownership of the passed `ByteReader`
* and it is up to the caller to release the underlying resources.
*
* @note may throw a [[Failure]] wrapped [[BadMessageException]]
*/
def decodeMessageBody(typ: Byte, tag: Int, byteReader: ByteReader): Message = {
if (Tags.isFragment(tag)) Fragment(typ, tag, byteReader.readAll())
else
typ match {
case Types.Tinit =>
val (version, ctx) = Init.decode(byteReader)
Tinit(tag, version, ctx)
case Types.Rinit =>
val (version, ctx) = Init.decode(byteReader)
Rinit(tag, version, ctx)
case Types.Treq => decodeTreq(tag, byteReader)
case Types.Rreq => decodeRreq(tag, byteReader)
case Types.Tdispatch => decodeTdispatch(tag, byteReader)
case Types.Rdispatch => decodeRdispatch(tag, byteReader)
case Types.Tdrain => Tdrain(tag)
case Types.Rdrain => Rdrain(tag)
case Types.Tping => Tping(tag)
case Types.Rping => Rping(tag)
case Types.Rerr | Types.BAD_Rerr => Rerr(tag, decodeUtf8(byteReader.readAll()))
case Types.Rdiscarded => Rdiscarded(tag)
case Types.Tdiscarded | Types.BAD_Tdiscarded => decodeTdiscarded(byteReader)
case Types.Tlease => decodeTlease(byteReader)
case unknown =>
throwBadMessageException(unknownMessageDescription(unknown, tag, byteReader))
}
}
private def makeHeader(typ: Byte, tag: Int): Buf = {
val arr = Array(
typ,
(tag >> 16 & 0xff).toByte,
(tag >> 8 & 0xff).toByte,
(tag & 0xff).toByte
)
new Buf.ByteArray(arr, 0, 4)
}
def encode(msg: Message): Buf = msg match {
case msg: PreEncoded => msg.encodedBuf
case m: Message =>
if (m.tag < Tags.MarkerTag || (m.tag & ~Tags.TagMSB) > Tags.MaxTag)
throwBadMessageException(s"invalid tag number ${m.tag}")
makeHeader(m.typ, m.tag).concat(m.buf)
}
/**
* Returns an iterator over the fragments of `msg`. Each fragment is
* sized to be <= `maxSize`
* @note the returned iterator is not thread-safe.
*/
private[transport] def encodeFragments(msg: Message, maxSize: Int): Iterator[Buf] = msg match {
case m: Fragmentable => new FragmentIterator(m, maxSize)
case _ => Iterator.single(Message.encode(msg))
}
private class FragmentIterator(msg: Fragmentable, maxSize: Int) extends Iterator[Buf] {
private[this] var finished = false
private[this] var readIndex = 0
private[this] val fragmentHeader = makeHeader(msg.typ, Tags.setMsb(msg.tag))
private[this] val body = msg.buf
def hasNext: Boolean = !finished
def next(): Buf = {
if (!hasNext) Iterator.empty.next()
else {
val nextIndex = readIndex + maxSize
val chunk = body.slice(readIndex, nextIndex)
readIndex = nextIndex
if (nextIndex < body.length) fragmentHeader.concat(chunk)
else {
finished = true
// Prepend the non-fragmented header
makeHeader(msg.typ, msg.tag).concat(chunk)
}
}
}
override def toString = s"FragmentingIterator($msg, $maxSize) { hasNext = $hasNext }"
}
// Helper method to ensure conformity of BadMessageExceptions
private def throwBadMessageException(why: String): Nothing =
throw Failure.wrap(BadMessageException(why))
private def unknownMessageDescription(tpe: Byte, tag: Int, payload: ByteReader): String = {
val remaining = payload.remaining
val toWrite = payload.readBytes(16) // Limit reporting to at most 16 bytes
val bytesStr = Buf.slowHexString(toWrite)
s"unknown message type: $tpe [tag=$tag]. Payload bytes: $remaining. " +
s"First ${toWrite.length} bytes of the payload: '$bytesStr'"
}
}
| twitter/finagle | finagle-mux/src/main/scala/com/twitter/finagle/mux/transport/Message.scala | Scala | apache-2.0 | 22,500 |
package reactor.core.scala.publisher
import java.util
import java.util.{Collection => JCollection, Map => JMap}
import reactor.util.concurrent.Queues.SMALL_BUFFER_SIZE
import scala.collection.mutable
import scala.jdk.CollectionConverters._
trait VersionedSFlux[+T] {self: SFlux[T] =>
final def collectMultimap[K](keyExtractor: T => K): SMono[Map[K, Iterable[T]]] = collectMultimap(keyExtractor, (t: T) => t, ()=>mutable.HashMap.empty[K, util.Collection[T]])
final def collectMultimap[K, V](keyExtractor: T => K,
valueExtractor: T => V,
mapSupplier: () => mutable.Map[K, util.Collection[V]] = () => mutable.HashMap.empty[K, util.Collection[V]]):
SMono[Map[K, Iterable[V]]] =
new ReactiveSMono[Map[K, Iterable[V]]](coreFlux.collectMultimap[K, V](keyExtractor,
valueExtractor,
() => mapSupplier().asJava)
.map((m: JMap[K, JCollection[V]]) => m.asScala.view.mapValues((vs: JCollection[V]) => vs.asScala.toSeq).toMap))
@deprecated("Use toLazyList", since = "0.8.0, 2.13.0")
final def toStream(batchSize: Int = SMALL_BUFFER_SIZE): LazyList[T] = toLazyList(batchSize)
final def toLazyList(batchSize: Int = SMALL_BUFFER_SIZE): LazyList[T] = coreFlux.toStream(batchSize).iterator().asScala.to(LazyList)
}
| reactor/reactor-scala-extensions | src/main/scala-2.13/reactor/core/scala/publisher/VersionedSFlux.scala | Scala | apache-2.0 | 1,306 |
/*
* =========================================================================================
* Copyright © 2013-2017 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package kamon.netty
import java.net.InetSocketAddress
import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
import io.netty.bootstrap.Bootstrap
import io.netty.buffer.Unpooled
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.SocketChannel
import io.netty.channel.socket.nio.NioSocketChannel
import io.netty.channel.{Channel, ChannelHandlerContext, ChannelInboundHandlerAdapter, ChannelInitializer}
import io.netty.handler.codec.http._
import io.netty.util.CharsetUtil
class NioEventLoopBasedClient(port: Int) {
private val clientMessagesReceived = new LinkedBlockingQueue[AnyRef]()
private val group = new NioEventLoopGroup(1)
private val b = new Bootstrap
b.group(group)
.channel(classOf[NioSocketChannel])
.handler(new HttpClientInitializer(clientMessagesReceived))
val channel: Channel = b.connect(new InetSocketAddress(port)).sync.channel
def close(): Unit = {
channel.close
group.shutdownGracefully()
}
def execute(request: DefaultFullHttpRequest, timeoutMillis: Long = 2000): FullHttpResponse = {
val future = channel.write(request)
channel.flush
future.await(timeoutMillis)
response()
}
def executeWithContent(request: DefaultHttpRequest, content: Seq[HttpContent], timeoutMillis: Long = 2000): FullHttpResponse = {
val allFutures = (request +: content).map(channel.write)
channel.flush
allFutures.foreach(_.await(timeoutMillis))
response()
}
def get(path: String): DefaultFullHttpRequest = {
val request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, path)
HttpHeaders.setContentLength(request, 0)
request
}
def postWithChunks(path: String, chunks: String*): (DefaultHttpRequest, Seq[DefaultHttpContent]) = {
val request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, path)
HttpHeaders.setTransferEncodingChunked(request)
val httpChunks = chunks.map(chunk => new DefaultHttpContent(Unpooled.copiedBuffer(chunk, CharsetUtil.UTF_8)))
(request, httpChunks :+ new DefaultLastHttpContent(Unpooled.EMPTY_BUFFER))
}
private def response(): FullHttpResponse =
clientMessagesReceived.poll(2, TimeUnit.SECONDS).asInstanceOf[FullHttpResponse]
}
object NioEventLoopBasedClient {
def apply(bindAddress: Int): NioEventLoopBasedClient = new NioEventLoopBasedClient(bindAddress)
}
object Clients {
def withNioClient[A](bindAddress:Int = 9001)(thunk: NioEventLoopBasedClient => A): A = {
val client = new NioEventLoopBasedClient(bindAddress)
try thunk(client) finally client.close()
}
}
private class HttpClientInitializer(received:java.util.Queue[AnyRef]) extends ChannelInitializer[SocketChannel] {
override def initChannel(ch: SocketChannel): Unit = {
val p = ch.pipeline
p.addLast(new HttpClientCodec)
p.addLast(new HttpObjectAggregator(1024))
p.addLast(new HttpClientHandler(received))
}
}
private class HttpClientHandler(received:java.util.Queue[AnyRef]) extends ChannelInboundHandlerAdapter {
override def channelRead(ctx: ChannelHandlerContext, msg: AnyRef): Unit = {
received.add(msg)
}
}
| kamon-io/kamon-netty | src/test/scala/kamon/netty/Clients.scala | Scala | apache-2.0 | 3,939 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.receiver
import com.google.common.util.concurrent.{RateLimiter => GuavaRateLimiter}
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
/**
* Provides waitToPush() method to limit the rate at which receivers consume data.
*
* waitToPush method will block the thread if too many messages have been pushed too quickly,
* and only return when a new message has been pushed. It assumes that only one message is
* pushed at a time.
*
* The spark configuration spark.streaming.receiver.maxRate gives the maximum number of messages
* per second that each receiver will accept.
*
* @param conf spark configuration
*/
private[receiver] abstract class RateLimiter(conf: SparkConf) extends Logging {
// treated as an upper limit
private val maxRateLimit = conf.getLong("spark.streaming.receiver.maxRate", Long.MaxValue)
private lazy val rateLimiter = GuavaRateLimiter.create(getInitialRateLimit().toDouble)
def waitToPush() {
rateLimiter.acquire()
}
/**
* Return the current rate limit. If no limit has been set so far, it returns {{{Long.MaxValue}}}.
*/
def getCurrentLimit: Long = rateLimiter.getRate.toLong
/**
* Set the rate limit to `newRate`. The new rate will not exceed the maximum rate configured by
* {{{spark.streaming.receiver.maxRate}}}, even if `newRate` is higher than that.
*
* @param newRate A new rate in records per second. It has no effect if it's 0 or negative.
*/
private[receiver] def updateRate(newRate: Long): Unit =
if (newRate > 0) {
if (maxRateLimit > 0) {
rateLimiter.setRate(newRate.min(maxRateLimit))
} else {
rateLimiter.setRate(newRate)
}
}
/**
* Get the initial rateLimit to initial rateLimiter
*/
private def getInitialRateLimit(): Long = {
math.min(conf.getLong("spark.streaming.backpressure.initialRate", maxRateLimit), maxRateLimit)
}
}
| esi-mineset/spark | streaming/src/main/scala/org/apache/spark/streaming/receiver/RateLimiter.scala | Scala | apache-2.0 | 2,744 |
/**
* Original work: SecureSocial (https://github.com/jaliss/securesocial)
* Copyright 2013 Jorge Aliss (jaliss at gmail dot com) - twitter: @jaliss
*
* Derivative work: Silhouette (https://github.com/mohiva/play-silhouette)
* Modifications Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.impl.providers.oauth1.services
import com.mohiva.play.silhouette.api.Logger
import com.mohiva.play.silhouette.impl.providers.oauth1.services.PlayOAuth1Service._
import com.mohiva.play.silhouette.impl.providers.{ OAuth1Info, OAuth1Service, OAuth1Settings }
import play.api.libs.oauth.{ ConsumerKey, OAuth, RequestToken, ServiceInfo, _ }
import play.api.libs.ws.WSSignatureCalculator
import scala.concurrent.{ ExecutionContext, Future }
/**
* The OAuth1 service implementation which wraps Play Framework's OAuth implementation.
*
* @param service The Play Framework OAuth implementation.
* @param settings The service settings.
*/
class PlayOAuth1Service(service: OAuth, settings: OAuth1Settings) extends OAuth1Service with Logger {
/**
* Constructs the default Play Framework OAuth implementation.
*
* @param settings The service settings.
* @return The OAuth1 service.
*/
def this(settings: OAuth1Settings) = this(OAuth(serviceInfo(settings), use10a = true), settings)
/**
* Indicates if the service uses the safer 1.0a specification which addresses the session fixation attack
* identified in the OAuth Core 1.0 specification.
*
* @see http://oauth.net/core/1.0a/
* @see http://oauth.net/advisories/2009-1/
*
* @return True if the services uses 1.0a specification, false otherwise.
*/
override def use10a = service.use10a
/**
* Retrieves the request info and secret.
*
* @param callbackURL The URL where the provider should redirect to (usually a URL on the current app).
* @param ec The execution context to handle the asynchronous operations.
* @return A OAuth1Info in case of success, Exception otherwise.
*/
override def retrieveRequestToken(callbackURL: String)(implicit ec: ExecutionContext): Future[OAuth1Info] = {
Future(service.retrieveRequestToken(settings.callbackURL)).map(_.fold(
e => throw e,
t => OAuth1Info(t.token, t.secret)))
}
/**
* Exchange a request info for an access info.
*
* @param oAuthInfo The info/secret pair obtained from a previous call.
* @param verifier A string you got through your user, with redirection.
* @param ec The execution context to handle the asynchronous operations.
* @return A OAuth1Info in case of success, Exception otherwise.
*/
override def retrieveAccessToken(oAuthInfo: OAuth1Info, verifier: String)(implicit ec: ExecutionContext): Future[OAuth1Info] = {
Future(service.retrieveAccessToken(RequestToken(oAuthInfo.token, oAuthInfo.secret), verifier)).map(_.fold(
e => throw e,
t => OAuth1Info(t.token, t.secret)))
}
/**
* The URL to which the user needs to be redirected to grant authorization to your application.
*
* @param token The request info.
* @return The redirect URL.
*/
override def redirectUrl(token: String): String = service.redirectUrl(token)
/**
* Creates the signature calculator for the OAuth info.
*
* @param oAuthInfo The info/secret pair obtained from a previous call.
* @return The signature calculator for the OAuth1 request.
*/
override def sign(oAuthInfo: OAuth1Info): WSSignatureCalculator = {
OAuthCalculator(service.info.key, RequestToken(oAuthInfo.token, oAuthInfo.secret))
}
}
/**
* The companion object.
*/
object PlayOAuth1Service {
/**
* Builds the service info.
*
* @return The service info.
*/
def serviceInfo(settings: OAuth1Settings) = ServiceInfo(
settings.requestTokenURL,
settings.accessTokenURL,
settings.authorizationURL,
ConsumerKey(settings.consumerKey, settings.consumerSecret))
}
| cemcatik/play-silhouette | silhouette/app/com/mohiva/play/silhouette/impl/providers/oauth1/services/PlayOAuth1Service.scala | Scala | apache-2.0 | 4,511 |
object main extends App {
/*
* 1) Scala has two kinds of variable: vars and vals.
* A val variable can never be reassigned.
* It is similar to a final variable in Java.
* A var can be reassigned during its lifetime.
*/
val msg = "Hello, World!"
/*
* 2) Scala is capable of type inference.
* It means that it will automatically detect type of the information.
*/
println(msg)
println(msg.getClass)
/*
* 3) We can set specific type of the variable.
*/
val msg2: java.lang.String = "Hello again, world!"
println(msg2)
println(msg2.getClass)
/*
* 4) Java.lang types are visible with simpified names.
*/
val msg3: String = "Hello yet again, world!"
println(msg3)
println(msg3.getClass)
/*
* 5) As mentioned before, we cannot re-assign a value to a val.
* If we need to do it, we would use var.
*/
var greeting = "Hello, World!"
println(greeting)
greeting = "Leave me alone, please. I am programming in Scala!"
println(greeting)
/*
* 6) Defining a function.
* - "def" starts the function definition
* - "max" is the function name
* - "x" and "y" are the parameters of the function.
* They must be defined in the parenthesis along with their types.
* - parameters of the function are defined using colon and type
* - The type of the result is defined outside the parenthesis and the colon.
* - If the function is recursive, the result type must be defined.
* Otherwise, it can be left out.
* In the example below, we will try to be explicit.
* - Then two last things are the "=" sign and function definition inside
* the curly brackets "{}".
*
*/
def max(x: Int, y: Int): Int = {
if (x > y)
x
else
y
}
println(max(10, 11))
/*
* 7) One-line function can ommit the curly brackets.
*/
def maxOneLine(x: Int, y: Int): Int = if(x > y) x else y
println(maxOneLine(11, 12))
/*
* 8) Some functions do not need to return anything.
* In this case the return type will be Unit.
* Scala's unit type is similar to Java's void.
*/
def greet(): Unit = println("Hello!")
println(greet())
println(greet().getClass())
}
| arcyfelix/Courses | 18-10-18-Programming-in-Scala-by-Martin-Odersky-Lex-Spoon-and-Bill-Venners/00-DefineVariablesAndFunctions/src/main.scala | Scala | apache-2.0 | 2,223 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.service.assets2
import java.io.{InputStream, OutputStream}
import com.waz.model.Mime
import com.waz.service.assets2.Asset.General
import AssetTransformationsService._
import com.waz.log.BasicLogging.LogTag.DerivedLogTag
import com.waz.log.LogSE._
trait AssetTransformationsService {
def getTransformations(mime: Mime, details: AssetDetails): List[Transformation]
}
object AssetTransformationsService {
trait Transformation {
def apply(initial: () => InputStream, transformed: () => OutputStream): Mime
}
object Transformation {
def create(f: (() => InputStream, () => OutputStream) => Mime): Transformation = new Transformation {
override def apply(initial: () => InputStream, transformed: () => OutputStream): Mime = f(initial, transformed)
}
}
trait Handler {
def createTransformation(mime: Mime, details: AssetDetails): Option[Transformation]
}
}
class AssetTransformationsServiceImpl(handlers: List[Handler]) extends AssetTransformationsService {
override def getTransformations(mime: Mime, details: AssetDetails): List[Transformation] = {
handlers.map(_.createTransformation(mime, details)).collect { case Some(t) => t }
}
}
class ImageDownscalingCompressing(imageRecoder: ImageRecoder) extends Handler with DerivedLogTag {
private val MaxImageDimension = 1448
// set of mime types that should be recoded to Jpeg before uploading
val DefaultRecodeMimes = Set(
Mime.Image.WebP,
Mime.Image.Tiff,
Mime.Image.Bmp,
Mime.Unknown
)
override def createTransformation(mime: Mime, details: General): Option[Transformation] = {
Some(details)
.collect { case ImageDetails(dim) => dim }
.filter(dim => DefaultRecodeMimes.contains(mime) ||
mime != Mime.Image.Gif && (dim.height max dim.width) > MaxImageDimension)
.map { dim =>
val targetMime = mime match {
case Mime.Image.Png => Mime.Image.Png
case _ => Mime.Image.Jpg
}
verbose(l"Creating asset image downscaling and compression transformation. $mime -> $targetMime")
Transformation.create { (in, out) =>
imageRecoder.recode(dim, targetMime, MaxImageDimension, in, out)
targetMime
}
}
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/main/scala/com/waz/service/assets2/AssetTransformationsService.scala | Scala | gpl-3.0 | 2,954 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package blaze
package server
import cats.data.Kleisli
import cats.effect._
import fs2.Stream._
import org.http4s.Charset._
import org.http4s.dsl.Http4sDsl
import org.http4s.headers._
import org.http4s.implicits._
import org.typelevel.ci._
object ServerTestRoutes extends Http4sDsl[IO] {
private val textPlain = `Content-Type`(MediaType.text.plain, `UTF-8`).toRaw1
private val connClose = Connection(ci"close").toRaw1
private val connKeep = Connection(ci"keep-alive").toRaw1
private val chunked = `Transfer-Encoding`(TransferCoding.chunked).toRaw1
def length(l: Long): Header.Raw = `Content-Length`.unsafeFromLong(l).toRaw1
def testRequestResults: Seq[(String, (Status, Set[Header.Raw], String))] =
Seq(
("GET /get HTTP/1.0\\r\\n\\r\\n", (Status.Ok, Set(length(3), textPlain), "get")),
// ///////////////////////////////
("GET /get HTTP/1.1\\r\\n\\r\\n", (Status.Ok, Set(length(3), textPlain), "get")),
// ///////////////////////////////
(
"GET /get HTTP/1.0\\r\\nConnection:keep-alive\\r\\n\\r\\n",
(Status.Ok, Set(length(3), textPlain, connKeep), "get"),
),
// ///////////////////////////////
(
"GET /get HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n",
(Status.Ok, Set(length(3), textPlain), "get"),
),
// ///////////////////////////////
(
"GET /get HTTP/1.1\\r\\nConnection:close\\r\\n\\r\\n",
(Status.Ok, Set(length(3), textPlain, connClose), "get"),
),
// ///////////////////////////////
(
"GET /get HTTP/1.0\\r\\nConnection:close\\r\\n\\r\\n",
(Status.Ok, Set(length(3), textPlain, connClose), "get"),
),
// ///////////////////////////////
(
"GET /get HTTP/1.1\\r\\nConnection:close\\r\\n\\r\\n",
(Status.Ok, Set(length(3), textPlain, connClose), "get"),
),
("GET /chunked HTTP/1.1\\r\\n\\r\\n", (Status.Ok, Set(textPlain, chunked), "chunk")),
// ///////////////////////////////
(
"GET /chunked HTTP/1.1\\r\\nConnection:close\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, chunked, connClose), "chunk"),
),
// /////////////////////////////// Content-Length and Transfer-Encoding free responses for HTTP/1.0
("GET /chunked HTTP/1.0\\r\\n\\r\\n", (Status.Ok, Set(textPlain), "chunk")),
// ///////////////////////////////
(
"GET /chunked HTTP/1.0\\r\\nConnection:Close\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, connClose), "chunk"),
),
// ////////////////////////////// Requests with a body //////////////////////////////////////
(
"POST /post HTTP/1.1\\r\\nContent-Length:3\\r\\n\\r\\nfoo",
(Status.Ok, Set(textPlain, length(4)), "post"),
),
// ///////////////////////////////
(
"POST /post HTTP/1.1\\r\\nConnection:close\\r\\nContent-Length:3\\r\\n\\r\\nfoo",
(Status.Ok, Set(textPlain, length(4), connClose), "post"),
),
// ///////////////////////////////
(
"POST /post HTTP/1.0\\r\\nConnection:close\\r\\nContent-Length:3\\r\\n\\r\\nfoo",
(Status.Ok, Set(textPlain, length(4), connClose), "post"),
),
// ///////////////////////////////
(
"POST /post HTTP/1.0\\r\\nContent-Length:3\\r\\n\\r\\nfoo",
(Status.Ok, Set(textPlain, length(4)), "post"),
),
// ////////////////////////////////////////////////////////////////////
(
"POST /post HTTP/1.1\\r\\nTransfer-Encoding:chunked\\r\\n\\r\\n3\\r\\nfoo\\r\\n0\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, length(4)), "post"),
),
// ///////////////////////////////
(
"POST /post HTTP/1.1\\r\\nConnection:close\\r\\nTransfer-Encoding:chunked\\r\\n\\r\\n3\\r\\nfoo\\r\\n0\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, length(4), connClose), "post"),
),
(
"POST /post HTTP/1.1\\r\\nTransfer-Encoding:chunked\\r\\n\\r\\n3\\r\\nfoo\\r\\n3\\r\\nbar\\r\\n0\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, length(4)), "post"),
),
// ///////////////////////////////
(
"POST /post HTTP/1.1\\r\\nConnection:Close\\r\\nTransfer-Encoding:chunked\\r\\n\\r\\n3\\r\\nfoo\\r\\n0\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, length(4), connClose), "post"),
),
// /////////////////////////////// Check corner cases //////////////////
(
"GET /twocodings HTTP/1.0\\r\\nConnection:Close\\r\\n\\r\\n",
(Status.Ok, Set(textPlain, length(3), connClose), "Foo"),
),
// /////////////// Work with examples that don't have a body //////////////////////
("GET /notmodified HTTP/1.1\\r\\n\\r\\n", (Status.NotModified, Set(), "")),
(
"GET /notmodified HTTP/1.0\\r\\nConnection: Keep-Alive\\r\\n\\r\\n",
(Status.NotModified, Set(connKeep), ""),
),
)
def apply(): Kleisli[IO, Request[IO], Response[IO]] =
HttpRoutes
.of[IO] {
case req if req.method == Method.GET && req.pathInfo == path"/get" =>
Ok("get")
case req if req.method == Method.GET && req.pathInfo == path"/chunked" =>
Ok(eval(IO.cede *> IO("chu")) ++ eval(IO.cede *> IO("nk")))
case req if req.method == Method.POST && req.pathInfo == path"/post" =>
Ok("post")
case req if req.method == Method.GET && req.pathInfo == path"/twocodings" =>
Ok("Foo", `Transfer-Encoding`(TransferCoding.chunked))
case req if req.method == Method.POST && req.pathInfo == path"/echo" =>
Ok(emit("post") ++ req.bodyText)
// Kind of cheating, as the real NotModified response should have a Date header representing the current? time?
case req if req.method == Method.GET && req.pathInfo == path"/notmodified" =>
NotModified()
}
.orNotFound
}
| http4s/http4s | blaze-server/src/test/scala/org/http4s/blaze/server/ServerTestRoutes.scala | Scala | apache-2.0 | 6,301 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.cfclerk.xmlparsers
import com.normation.cfclerk.domain._
import scala.xml._
import net.liftweb.common._
import com.normation.utils.XmlUtils._
import com.normation.cfclerk.exceptions._
import CfclerkXmlConstants._
import com.normation.utils.Control.{sequence,bestEffort}
class SectionSpecParser(variableParser:VariableSpecParser) extends Loggable {
def parseSectionsInPolicy(policy: Node, id: TechniqueId, policyName: String):SectionSpec = {
val sections = policy \\\\ SECTIONS_ROOT
if (sections.size > 1) {
val err = "In %s -> %s : Only one <sections> marker is allowed in the entire file".format(id, policyName)
logger.error(err)
throw new ParsingException(err)
}
if (sections.isEmpty)
SectionSpec(SECTION_ROOT_NAME)
else {
val root = SectionSpec(SECTION_ROOT_NAME, children = parseChildren(sections.head, id, policyName))
/*
* check that all section names and all variable names are unique
*/
val variableNames = root.getAllVariables.map( _.name )
/*
* check that all variable and seciont names are unique
*/
checkUniqueness(variableNames) {
"At least two variables have the same name (case unsensitive), what is forbiden: "
}
checkUniqueness(root.getAllSections.map(_.name)) {
"At least two sections have the same name (case unsensitive), what is forbiden: "
}
/*
* Check that all section with defined component key reference existing
* variables
*/
root.getAllSections.foreach { section => section.componentKey match {
case None => //OK
case Some(key) => if(variableNames.find(key == _).isEmpty) {
throw new ParsingException("Section '%s' reference as component key variable '%s' that was not found. Know variables are: %s".format(
section.name, key, variableNames.mkString("[", "," , "]")
))
}
} }
/*
* check that root section only hold sub-section (and no variables)
*/
bestEffort(root.children) { child => child match {
case v : SectionVariableSpec => Failure("Variable declaration '%s' is not allowed here".format(v.name))
case _ => Full("OK")
} } match {
case f:Failure => throw new ParsingException("<%s> must contain only <%s> children : %s".format(SECTIONS_ROOT, SECTION, f.messageChain))
case _ => //OK
}
root
}
}
// utility method that check duplicate elements in a string sequence case-unsensitive
private[this] def checkUniqueness(seq:Seq[String])(errorMsg:String) : Unit = {
val duplicates = seq.groupBy( _.toLowerCase ).collect {
case(k, x) if x.size > 1 => x.mkString("(",",",")")
}
if(duplicates.nonEmpty) {
throw new ParsingException(errorMsg + duplicates.mkString("[", "," , "]") )
}
}
//method that actually parse a <SECTIONS> or <SECTION> tag
private[this] def parseSection(root: Node, id: TechniqueId, policyName: String): Box[SectionSpec] = {
val name = {
val n = getAttributeText(root, "name", "")
if(root.label == SECTIONS_ROOT) {
if(n.size > 0) throw new ParsingException("<%s> can not have a 'name' attribute.".format(SECTIONS_ROOT))
else SECTION_ROOT_NAME
} else {
if(n.size > 0) n
else throw new ParsingException("Section must have name. Missing name for: " + root)
}
}
val isMultivalued = "true" == getAttributeText(root, SECTION_IS_MULTIVALUED, "false").toLowerCase
val foldable = "true" == getAttributeText(root, SECTION_IS_FOLDABLE, "false").toLowerCase
val description = getUniqueNodeText(root, SECTION_DESCRIPTION, "")
val isComponent = "true" == getAttributeText(root, SECTION_IS_COMPONENT, "false").toLowerCase
val componentKey = (root \\ ("@" + SECTION_COMPONENT_KEY)).headOption.map( _.text) match {
case null | Some("") => None
case x => x
}
/**
* A key must be define if and only if we are in a multivalued, component section.
*/
if(isMultivalued && isComponent && componentKey.isEmpty) {
throw new ParsingException("Section '%s' is multivalued and is component. A componentKey attribute must be specified".format(name))
}
val children = parseChildren(root, id, policyName)
val sectionSpec = SectionSpec(name, isMultivalued, isComponent, componentKey, foldable, description, children)
if (isMultivalued)
Full(sectionSpec.cloneVariablesInMultivalued)
else
Full(sectionSpec)
}
private[this] def parseChildren(node: Node, id: TechniqueId, policyName: String): Seq[SectionChildSpec] = {
assert(node.label == SECTIONS_ROOT || node.label == SECTION)
def parseOneVariable(node: Node) = {
variableParser.parseSectionVariableSpec(node) match {
case Full(x) => x
case Empty =>
val err = "In %s -> %s, couldn't parse variable %s, no error message".format(id, policyName, node)
logger.error(err)
throw new ParsingException(err)
case Failure(m, _, _) =>
val err = "In %s -> %s, couldn't parse variable %s, error message: %s".format(id, policyName, node, m)
logger.error(err)
throw new ParsingException(err)
}
}
def parseOneSection(node: Node, id: TechniqueId, policyName: String) : SectionSpec = {
parseSection(node, id, policyName) match {
case Full(section) => section
case Failure(m, _, _) =>
val errWithMessage = "Couldn't parse Section, error message:" + m
logger.error(errWithMessage)
throw new ParsingException(errWithMessage)
case Empty =>
logger.error("Couldn't parse Section")
throw new ParsingException("Couldn't parse Section")
}
}
for {
child <- node.child
if !child.isEmpty && child.label != "#PCDATA"
} yield child.label match {
case v if SectionVariableSpec.isVariable(v) => parseOneVariable(child)
case s if SectionSpec.isSection(s) => parseOneSection(child,id,policyName)
case x => throw new ParsingException("Unexpected <%s> child element in policy package %s: %s".format(SECTIONS_ROOT,id, x))
}
}
} | fanf/cf-clerk | src/main/scala/com/normation/cfclerk/xmlparsers/SectionSpecParser.scala | Scala | agpl-3.0 | 8,006 |
package pl.touk.nussknacker.engine.lite.api.utils.sources
import cats.Monad
import cats.data.{Validated, ValidatedNel}
import pl.touk.nussknacker.engine.api.component.ComponentType
import pl.touk.nussknacker.engine.api.{Context, Lifecycle}
import pl.touk.nussknacker.engine.api.component.NodeComponentInfo
import pl.touk.nussknacker.engine.api.exception.NuExceptionInfo
import pl.touk.nussknacker.engine.api.runtimecontext.{ContextIdGenerator, EngineRuntimeContext}
import pl.touk.nussknacker.engine.api.NodeId
import pl.touk.nussknacker.engine.lite.api.commonTypes.ErrorType
import pl.touk.nussknacker.engine.lite.api.customComponentTypes
import pl.touk.nussknacker.engine.lite.api.customComponentTypes.LiteSource
import scala.language.higherKinds
import scala.util.Try
trait BaseLiteSource[T] extends LiteSource[T] with Lifecycle {
protected var context: EngineRuntimeContext = _
protected var contextIdGenerator: ContextIdGenerator = _
def nodeId: NodeId
override def open(context: EngineRuntimeContext): Unit = {
this.context = context
this.contextIdGenerator = context.contextIdGenerator(nodeId.id)
}
override def createTransformation[F[_] : Monad](componentContext: customComponentTypes.CustomComponentContext[F]): T => ValidatedNel[ErrorType, Context] =
record => Validated.fromEither(Try(transform(record)).toEither)
.leftMap(ex => NuExceptionInfo(Some(NodeComponentInfo(componentContext.nodeId, "unknown", ComponentType.Source)), ex, Context(contextIdGenerator.nextContextId()))).toValidatedNel
def transform(record: T): Context
} | TouK/nussknacker | engine/lite/components-api/src/main/scala/pl/touk/nussknacker/engine/lite/api/utils/sources/BaseLiteSource.scala | Scala | apache-2.0 | 1,580 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import org.apache.spark.scheduler.{LiveListenerBus, SparkListener, SparkListenerEvent}
import org.apache.spark.util.ListenerBus
/**
* A Streaming listener bus to forward events to StreamingListeners. This one will wrap received
* Streaming events as WrappedStreamingListenerEvent and send them to Spark listener bus. It also
* registers itself with Spark listener bus, so that it can receive WrappedStreamingListenerEvents,
* unwrap them as StreamingListenerEvent and dispatch them to StreamingListeners.
*/
private[streaming] class StreamingListenerBus(sparkListenerBus: LiveListenerBus)
extends SparkListener with ListenerBus[StreamingListener, StreamingListenerEvent] {
/**
* Post a StreamingListenerEvent to the Spark listener bus asynchronously. This event will be
* dispatched to all StreamingListeners in the thread of the Spark listener bus.
*/
def post(event: StreamingListenerEvent) {
sparkListenerBus.post(new WrappedStreamingListenerEvent(event))
}
override def onOtherEvent(event: SparkListenerEvent): Unit = {
event match {
case WrappedStreamingListenerEvent(e) =>
postToAll(e)
case _ =>
}
}
protected override def doPostEvent(
listener: StreamingListener,
event: StreamingListenerEvent): Unit = {
event match {
case receiverStarted: StreamingListenerReceiverStarted =>
listener.onReceiverStarted(receiverStarted)
case receiverError: StreamingListenerReceiverError =>
listener.onReceiverError(receiverError)
case receiverStopped: StreamingListenerReceiverStopped =>
listener.onReceiverStopped(receiverStopped)
case eventSourceStarted: StreamingListenerEventSourceStarted =>
listener.onEventSourceStarted(eventSourceStarted)
case eventSourceStopped: StreamingListenerEventSourceStopped =>
listener.onEventSourceStopped(eventSourceStopped)
case batchSubmitted: StreamingListenerBatchSubmitted =>
listener.onBatchSubmitted(batchSubmitted)
case batchStarted: StreamingListenerBatchStarted =>
listener.onBatchStarted(batchStarted)
case batchCompleted: StreamingListenerBatchCompleted =>
listener.onBatchCompleted(batchCompleted)
case outputOperationStarted: StreamingListenerOutputOperationStarted =>
listener.onOutputOperationStarted(outputOperationStarted)
case outputOperationCompleted: StreamingListenerOutputOperationCompleted =>
listener.onOutputOperationCompleted(outputOperationCompleted)
case _ =>
}
}
/**
* Register this one with the Spark listener bus so that it can receive Streaming events and
* forward them to StreamingListeners.
*/
def start(): Unit = {
sparkListenerBus.addListener(this) // for getting callbacks on spark events
}
/**
* Unregister this one with the Spark listener bus and all StreamingListeners won't receive any
* events after that.
*/
def stop(): Unit = {
sparkListenerBus.removeListener(this)
}
/**
* Wrapper for StreamingListenerEvent as SparkListenerEvent so that it can be posted to Spark
* listener bus.
*/
private case class WrappedStreamingListenerEvent(streamingListenerEvent: StreamingListenerEvent)
extends SparkListenerEvent {
// Do not log streaming events in event log as history server does not support streaming
// events (SPARK-12140). TODO Once SPARK-12140 is resolved we should set it to true.
protected[spark] override def logEvent: Boolean = false
}
}
| mashin-io/rich-spark | streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListenerBus.scala | Scala | apache-2.0 | 4,370 |
package bugs.bug1
object Bar{
val bar = bugs.bug1.otherPackage.Foo.foo
} | raisercostin/jedi-io | src/main/scala/bugs/bug1/Bar.scala | Scala | apache-2.0 | 80 |
package grasshopper.geocoder.search.census
import com.typesafe.scalalogging.Logger
import feature.Feature
import geometry.Point
import grasshopper.geocoder.search.census.SearchUtils._
import grasshopper.model.SearchableAddress
import io.geojson.FeatureJsonProtocol._
import org.elasticsearch.action.search.SearchType
import org.elasticsearch.client.Client
import org.elasticsearch.index.query.{ FilterBuilders, QueryBuilders }
import org.slf4j.LoggerFactory
import spray.json._
import scala.util.Try
trait CensusGeocode {
lazy val censusLogger = Logger(LoggerFactory.getLogger("grasshopper-census"))
def geocodeLine(client: Client, index: String, indexType: String, addressInput: SearchableAddress, count: Int): Array[Feature] = {
censusLogger.debug(s"Search Address: ${addressInput.toString()}")
val hits = searchAddress(client, index, indexType, addressInput)
val addressNumber = toInt(addressInput.addressNumber).getOrElse(0)
if (hits.length >= 1) {
hits
.map(hit => hit.getSourceAsString)
.take(count)
.map { s =>
val line = s.parseJson.convertTo[Feature]
censusLogger.debug(line.toJson.toString)
val addressRange = AddressInterpolator.calculateAddressRange(line, addressNumber)
AddressInterpolator.interpolate(line, addressRange, addressNumber)
}
.map(f => f.addOrUpdate("source", "census-tiger"))
.map { f =>
val streetName = f.get("FULLNAME").getOrElse("")
val city = addressInput.city
val state = f.get("STATE").getOrElse("")
val zipCodeR = f.get("ZIPR").getOrElse("")
f.addOrUpdate("address", s"${addressNumber} ${streetName} ${city} ${state} ${zipCodeR}")
}
} else {
Array(Feature(Point(0, 0)))
}
}
private def searchAddress(client: Client, index: String, indexType: String, addressInput: SearchableAddress) = {
censusLogger.debug(s"Searching on ${addressInput}")
val number = addressInput.addressNumber.toLowerCase
val street = addressInput.streetName
val zipCode = addressInput.zipCode
val state = addressInput.state
val stateQuery = QueryBuilders.matchQuery("STATE", state)
val streetQuery = QueryBuilders.matchPhraseQuery("FULLNAME", street)
val zipLeftFilter = FilterBuilders.termFilter("ZIPL", zipCode)
val zipRightFilter = FilterBuilders.termFilter("ZIPR", zipCode)
val zipFilter = FilterBuilders.orFilter(zipLeftFilter, zipRightFilter)
val rightHouseFilter = FilterBuilders.andFilter(
FilterBuilders.rangeFilter("RFROMHN").lte(number),
FilterBuilders.rangeFilter("RTOHN").gte(number)
)
val leftHouseFilter = FilterBuilders.andFilter(
FilterBuilders.rangeFilter("LFROMHN").lte(number),
FilterBuilders.rangeFilter("LTOHN").gte(number)
)
val houseFilter = FilterBuilders.orFilter(rightHouseFilter, leftHouseFilter)
val filter = FilterBuilders.andFilter(houseFilter, zipFilter)
val boolQuery = QueryBuilders
.boolQuery()
.must(stateQuery)
.must(streetQuery)
val query = QueryBuilders.filteredQuery(boolQuery, filter)
censusLogger.debug(query.toString)
val response = client.prepareSearch(index)
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setQuery(query)
.execute
.actionGet()
response.getHits.getHits
}
}
| awolfe76/grasshopper | geocoder/src/main/scala/grasshopper/geocoder/search/census/CensusGeocode.scala | Scala | cc0-1.0 | 3,388 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package transform
import symtab._
import Flags._
import scala.collection.mutable.ListBuffer
abstract class Flatten extends InfoTransform {
import global._
import treeInfo.isQualifierSafeToElide
/** the following two members override abstract members in Transform */
val phaseName: String = "flatten"
/** Updates the owning scope with the given symbol, unlinking any others.
*/
private def replaceSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
removeSymbolInCurrentScope(sym)
sym.owner.info.decls enter sym
}
private def removeSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
val scope = sym.owner.info.decls
val old = (scope lookupUnshadowedEntries sym.name).toList
old foreach (scope unlink _)
def old_s = old map (_.sym) mkString ", "
if (old.nonEmpty) debuglog(s"In scope of ${sym.owner}, unlinked $old_s")
}
private def liftClass(sym: Symbol): Unit = {
if (!sym.isLifted) {
sym setFlag LIFTED
debuglog("re-enter " + sym.fullLocationString)
replaceSymbolInCurrentScope(sym)
}
}
private def liftSymbol(sym: Symbol): Unit = {
liftClass(sym)
}
// This is a short-term measure partially working around objects being
// lifted out of parameterized classes, leaving them referencing
// invisible type parameters.
private def isFlattenablePrefix(pre: Type) = {
val clazz = pre.typeSymbol
clazz.isClass && !clazz.isPackageClass && {
// Cannot flatten here: class A[T] { object B }
// was "at erasurePhase.prev"
enteringErasure(clazz.typeParams.isEmpty)
}
}
private val flattened = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if isFlattenablePrefix(pre) =>
val top = sym.enclosingTopLevelClass
assert(args.isEmpty && top != NoSymbol, sym.ownerChain)
typeRef(top.owner.thisType, sym, Nil)
case ClassInfoType(parents, decls, clazz) =>
var parents1 = parents
val decls1 = scopeTransform(clazz) {
val decls1 = newScope
if (clazz.isPackageClass) {
exitingFlatten { decls foreach (decls1 enter _) }
}
else {
val oldowner = clazz.owner
exitingFlatten { oldowner.info }
parents1 = parents mapConserve (this)
for (sym <- decls) {
if (sym.isTerm && !sym.isStaticModule) {
decls1 enter sym
if (sym.isModule) {
// In theory, we could assert(sym.isMethod), because nested, non-static modules are
// transformed to methods (METHOD flag added in UnCurry). But this requires
// forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols
// too eagerly (scala/bug#8907).
// Note that module classes are not entered into the 'decls' of the ClassInfoType
// of the outer class, only the module symbols are. So the current loop does
// not visit module classes. Therefore we set the LIFTED flag here for module
// classes.
// TODO: should we also set the LIFTED flag for static, nested module classes?
// currently they don't get the flag, even though they are lifted to the package
sym.moduleClass setFlag LIFTED
}
} else if (sym.isClass)
liftSymbol(sym)
}
}
decls1
}
ClassInfoType(parents1, decls1, clazz)
case MethodType(params, restp) =>
val restp1 = apply(restp)
if (restp1 eq restp) tp else copyMethodType(tp, params, restp1)
case PolyType(tparams, restp) =>
val restp1 = apply(restp)
if (restp1 eq restp) tp else PolyType(tparams, restp1)
case _ =>
mapOver(tp)
}
}
def transformInfo(sym: Symbol, tp: Type): Type = flattened(tp)
protected def newTransformer(unit: CompilationUnit): Transformer = new Flattener
class Flattener extends Transformer {
/** Buffers for lifted out classes */
private val liftedDefs = perRunCaches.newMap[Symbol, ListBuffer[Tree]]()
override def transform(tree: Tree): Tree = postTransform {
tree match {
case PackageDef(_, _) =>
liftedDefs(tree.symbol.moduleClass) = new ListBuffer
tree.transform(this)
case Template(_, _, _) if tree.symbol.isDefinedInPackage =>
liftedDefs(tree.symbol.owner) = new ListBuffer
tree.transform(this)
case ClassDef(_, _, _, _) if tree.symbol.isNestedClass =>
// scala/bug#5508 Ordering important. In `object O { trait A { trait B } }`, we want `B` to appear after `A` in
// the sequence of lifted trees in the enclosing package. Why does this matter? Currently, mixin
// needs to transform `A` first to a chance to create accessors for private[this] trait fields
// *before* it transforms inner classes that refer to them. This also fixes scala/bug#6231.
//
// Alternative solutions
// - create the private[this] accessors eagerly in Namer (but would this cover private[this] fields
// added later phases in compilation?)
// - move the accessor creation to the Mixin info transformer
val liftedBuffer = liftedDefs(tree.symbol.enclosingTopLevelClass.owner)
val index = liftedBuffer.length
liftedBuffer.insert(index, tree.transform(this))
if (tree.symbol.sourceModule.isStaticModule)
removeSymbolInCurrentScope(tree.symbol.sourceModule)
EmptyTree
case _ =>
tree.transform(this)
}
}
private def postTransform(tree: Tree): Tree = {
val sym = tree.symbol
val tree1 = tree match {
case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel =>
exitingFlatten {
atPos(tree.pos) {
val ref = gen.mkAttributedRef(sym)
if (isQualifierSafeToElide(qual)) ref
else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module.
}
}
case _ =>
tree
}
tree1 setType flattened(tree1.tpe)
}
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val stats1 = super.transformStats(stats, exprOwner)
if (currentOwner.isPackageClass) {
val lifted = liftedDefs.remove(currentOwner).toList.flatten
stats1 ::: lifted
}
else stats1
}
}
}
| martijnhoekstra/scala | src/compiler/scala/tools/nsc/transform/Flatten.scala | Scala | apache-2.0 | 7,128 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.gihyo.spark.ch06
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{StreamingContext, Seconds}
import org.apache.spark.streaming.dstream.InputDStream
object gihyo_6_3_Transform {
def main(args: Array[String]) {
if (args.length != 2) {
new IllegalArgumentException("Invalid arguments")
System.exit(1)
}
val targetHost = args(0)
val targetHostPort = args(1).toInt
val conf = new SparkConf().setAppName("NetworkWordCount")
val sc = new SparkContext(conf)
val ssc = new StreamingContext(sc, Seconds(5))
val lines = ssc.socketTextStream(targetHost, targetHostPort)
val blackList = sc.parallelize(Array(("user002", "rockLogin"), ("user003", "rockPayment")))
run(lines, blackList)
ssc.start
ssc.awaitTermination
}
def run(stream: InputDStream[String], blackList: RDD[(String, String)]) {
val userList = stream.map(x => (x, "action:Login")).transform(rdd => {
val tmpUserList = rdd.leftOuterJoin(blackList)
tmpUserList.filter(user => (user._2._2 == None))
})
userList.print
}
}
| yu-iskw/gihyo-spark-book-example | src/main/scala/jp/gihyo/spark/ch06/gihyo_6_3_Transform.scala | Scala | apache-2.0 | 1,945 |
package scroll.tests.parameterized
import scroll.tests.mocks.CompartmentUnderTest
import java.io.IOException
class ThrowableInRoleMethodsTest extends AbstractParameterizedSCROLLTest {
class CoreType
class ExceptionShowcase(c: Boolean, cc: Boolean) extends CompartmentUnderTest(c, cc) {
class Exceptional {
def roleMethodWithError(): Unit = throw new Error()
def roleMethodWithUncheckedException(): Unit = throw new RuntimeException()
def roleMethodWithCheckedException(): Unit = throw new IOException()
}
}
test("Handling thrown Error") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
new ExceptionShowcase(c, cc) {
val core = new CoreType()
core play new Exceptional()
an[Error] should be thrownBy (+core).roleMethodWithError()
}
}
}
test("Handling thrown unchecked Exception") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
new ExceptionShowcase(c, cc) {
val core = new CoreType()
core play new Exceptional()
an[RuntimeException] should be thrownBy (+core).roleMethodWithUncheckedException()
}
}
}
test("Handling thrown checked Exception") {
forAll(PARAMS) { (c: Boolean, cc: Boolean) =>
new ExceptionShowcase(c, cc) {
val core = new CoreType()
core play new Exceptional()
an[IOException] should be thrownBy (+core).roleMethodWithCheckedException()
}
}
}
}
| max-leuthaeuser/SCROLL | tests/src/test/scala/scroll/tests/parameterized/ThrowableInRoleMethodsTest.scala | Scala | lgpl-3.0 | 1,449 |
package eu.reactivesystems.league.impl
import akka.actor.PoisonPill
import akka.cluster.singleton.{
ClusterSingletonManager,
ClusterSingletonManagerSettings
}
import com.lightbend.lagom.scaladsl.api.ServiceLocator
import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator
import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents
import com.lightbend.lagom.scaladsl.persistence.cassandra.WriteSideCassandraPersistenceComponents
import com.lightbend.lagom.scaladsl.persistence.jdbc.ReadSideJdbcPersistenceComponents
import com.lightbend.lagom.scaladsl.playjson.{
JsonSerializer,
JsonSerializerRegistry
}
import com.lightbend.lagom.scaladsl.server._
import com.softwaremill.macwire._
import com.softwaremill.macwire.akkasupport._
import eu.reactivesystems.league.api.LeagueService
import play.api.db.HikariCPComponents
import play.api.libs.ws.ahc.AhcWSComponents
import scala.collection.immutable.Seq
class LeagueLoader extends LagomApplicationLoader {
override def load(context: LagomApplicationContext): LagomApplication =
new LeagueApplication(context) {
override def serviceLocator: ServiceLocator = NoServiceLocator
}
override def loadDevMode(
context: LagomApplicationContext): LagomApplication =
new LeagueApplication(context) with LagomDevModeComponents
override def describeServices = List(
readDescriptor[LeagueService]
)
}
abstract class LeagueApplication(context: LagomApplicationContext)
extends LagomApplication(context)
with WriteSideCassandraPersistenceComponents
with ReadSideJdbcPersistenceComponents
with HikariCPComponents
with AhcWSComponents {
// Bind the service that this server provides
override lazy val lagomServer =
serverFor[LeagueService](wire[LeagueServiceImpl])
// Register the JSON serializer registry
override lazy val jsonSerializerRegistry = LeagueSerializerRegistry
// Register the league persistent entity
persistentEntityRegistry.register(wire[LeagueEntity])
// Register read side processor
val leagueProjectionProps = wireProps[LeagueProjection]
actorSystem.actorOf(
ClusterSingletonManager.props(
singletonProps = leagueProjectionProps,
terminationMessage = PoisonPill,
settings = ClusterSingletonManagerSettings(actorSystem)),
name = "leagueProjection"
)
}
/**
* Akka serialization, used by both persistence and remoting, needs to have
* serializers registered for every type serialized or deserialized. While it's
* possible to use any serializer you want for Akka messages, out of the box
* Lagom provides support for JSON, via this registry abstraction.
*
* The serializers are registered here, and then provided to Lagom in the
* application loader.
*/
object LeagueSerializerRegistry extends JsonSerializerRegistry {
override def serializers: Seq[JsonSerializer[_]] = Seq(
JsonSerializer[AddClub],
JsonSerializer[AddGame],
JsonSerializer[ChangeGame],
JsonSerializer[ClubRegistered],
JsonSerializer[GamePlayed],
JsonSerializer[ResultRevoked],
JsonSerializer[LeagueState]
)
}
| reactivesystems-eu/eventsourcing-intro | league-lagom/league-impl/src/main/scala/eu/reactivesystems/league/impl/LeagueLoader.scala | Scala | apache-2.0 | 3,119 |
object Test {
abstract class Settings {}
abstract class Grist
{ self =>
type settingsType <: Settings
type moduleType <: Module {type settingsType = self.settingsType}
val module: moduleType
}
abstract class Tool
{ self =>
type settingsType <: Settings
type moduleType = Module { type settingsType = self.settingsType }
type gristType = Grist { type moduleType <: self.moduleType; type settingsType <: self.settingsType }
def inputGrist: List[gristType]
}
abstract class Module
{ self =>
type settingsType <: Settings
final type commonModuleType = Module {type settingsType = self.settingsType}
type selfType >: self.type <: commonModuleType
// BTW: if we use the commented out type decls, the code compiles successfully
// type gristType = Grist {type settingsType <: self.settingsType; type moduleType <: commonModuleType }
val tools: List[Tool {type settingsType = self.settingsType}]
protected def f: List[commonModuleType] =
{
val inputGrists = tools.flatMap(_.inputGrist) // val inputGrists: List[gristType] =
inputGrists.map(_.module)
}
}
}
| AlexSikia/dotty | tests/pos/t1957.scala | Scala | bsd-3-clause | 1,248 |
package dhg.nlp.tag.support
import scala.annotation.tailrec
import dhg.nlp.tag.OptionalTagDict
import dhg.nlp.tag.SimpleTagDict
import dhg.nlp.tag.TagDict.OptionalTagDict
import dhg.nlp.tag.Tagger
import dhg.util.CollectionUtil._
import dhg.util.Pattern._
/**
* A generic implementation of the Viterbi algorithm for finding the most
* likely tagging for the sequence.
*
* @param edgeScorer class for calculating the probability of a symbol/tag transition
* @param tagDict tag dictionary indicating which words can be used with which tags
* @param tagTransitions valid tag-tag transitions
*/
class Viterbi[Sym, Tag](
edgeScorer: TagEdgeScorer[Sym, Tag],
tagDict: OptionalTagDict[Sym, Tag],
tagTransitions: Map[Option[Tag], Set[Option[Tag]]])
extends Tagger[Sym, Tag] {
/**
* Find the most likely tagging for the sequence given no constraints on
* tag-tag transitions
*
* @param sequence sequence to be tagged
* @param tagDict tag dictionary indicating which words can be used with which tags
*/
def this(edgeScorer: TagEdgeScorer[Sym, Tag], tagDict: OptionalTagDict[Sym, Tag]) =
this(edgeScorer, tagDict, { val allTags = tagDict.allTags + None; allTags.mapToVal(allTags).toMap })
/**
* Find the most likely tagging for the sequence given no constraints on
* which tags can be associated with which symbols
*
* @param sequence sequence to be tagged
* @param tagSet set of all tags
* @param tagTransitions valid tag-tag transitions
*/
def this(edgeScorer: TagEdgeScorer[Sym, Tag], tagSet: Set[Tag], tagTransitions: Map[Option[Tag], Set[Option[Tag]]]) =
this(edgeScorer, OptionalTagDict(SimpleTagDict(Map[Sym, Set[Tag]](), tagSet)), tagTransitions)
/**
* Find the most likely tagging for the sequence given no constraints on
* which tags can be associated with which symbols or tag-tag transitions
*
* @param sequence sequence to be tagged
* @param tagSet set of all tags
*/
def this(edgeScorer: TagEdgeScorer[Sym, Tag], tagSet: Set[Tag]) =
this(edgeScorer, OptionalTagDict(SimpleTagDict(Map[Sym, Set[Tag]](), tagSet)))
/**
* Tag each sequence using this model.
*
* @param rawSequences unlabeled data to be tagged
* @return sequences tagged by the model
*/
override def tagOptions(rawSequences: Vector[Vector[Sym]]): Vector[Option[Vector[(Sym, Tag)]]] =
rawSequences.par.map(tagSequence).seq
/**
* Find the most likely tagging for the sequence.
*
* @param sequence sequence to be tagged
*/
override def tagSequence(sequence: Vector[Sym]): Option[Vector[(Sym, Tag)]] = {
// viterbi(t)(j) = the probability of the most likely subsequence of states
// that accounts for the first t observations and ends in state j.
// Set the initial values for the fold based on the initial observation
val startViterbi = Map[Option[Tag], Double](None -> 1.0)
val startBackpointers = List[Map[Option[Tag], Option[Tag]]]()
val startSymbol: Option[Sym] = None
// Build up backpointers list by calculating viterbi scores for each subsequent observation
val backpointers =
(sequence.map(Some(_)) :+ None).foldLeft(Option(startViterbi, startBackpointers, startSymbol)) {
case (Some((viterbi, backpointers, prevSym)), currSym) =>
// for each possible tag, get the highest probability previous tag and its score
val transitionScores =
tagDict.set(currSym).mapTo(currTag => // each legal tag for the current symbol
viterbi.collect {
case (prevTag, viterbtiScore) if tagTransitions.getOrElse(prevTag, Set())(currTag) => // if the transition is valid
(prevTag, viterbtiScore * edgeScorer(prevSym, prevTag, currSym, currTag))
})
.toMap
.filter(_._2.nonEmpty) // remove tags that don't transition anywhere
val bestTransitions = transitionScores.mapVals(_.maxBy(_._2)) // get the previous tag with the highest probability (and its score)
if (bestTransitions.nonEmpty)
Some(
bestTransitions.mapVals(_._2), // update viterbi for the next row
bestTransitions.mapVals(_._1) :: backpointers, // append new backpointers
currSym)
else
None
case (None, _) => None
}.map { case (_, backpointers, _) => backpointers }
// Get the optimal tag sequence and map the tag indices back to their string values
backpointers.map(bp => sequence zipSafe backtrack(bp).flatten)
}
/**
* Backtrack through the backpointer maps to recover the optimal tag sequence.
*/
private def backtrack(backpointers: List[Map[Option[Tag], Option[Tag]]]): Vector[Option[Tag]] = {
@tailrec def inner(backpointers: List[Map[Option[Tag], Option[Tag]]], curTag: Option[Tag], tags: List[Option[Tag]]): List[Option[Tag]] =
backpointers match {
case Nil => assert(curTag == None); tags
case currPointers :: previousPointers => inner(previousPointers, currPointers(curTag), curTag :: tags)
}
val UMap(None -> lastTag) :: previousPointers = backpointers
inner(previousPointers, lastTag, Nil).toVector
}
}
////////////////////////////////
// TagEdgeScorer
////////////////////////////////
trait TagEdgeScorer[Sym, Tag] {
/**
* Calculate the value of a transition from the previous word/tag pair to the current word/tag pair.
*/
def apply(prevSym: Option[Sym], prevTag: Option[Tag], currSym: Option[Sym], currTag: Option[Tag]): Double
}
| dhgarrette/low-resource-pos-tagging-2013 | src/main/scala/dhg/nlp/tag/support/Viterbi.scala | Scala | apache-2.0 | 5,567 |
/*
* Copyright (c) 2015, streamdrill UG (haftungsbeschränkt)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package streamdrill.json
import java.text.SimpleDateFormat
import java.util.{Locale, TimeZone}
import scala.collection.JavaConverters._
trait HasToJSON {
def toJSON: String
}
/**
* JSON object serializer. Writes an object as a JSON represention.
* Handles java and scala types equally.
*/
object JSONWriter {
//val dateFormat = "EEE, dd MMM yyyy HH:mm:ss Z"
val dateFormat = new ThreadLocal[SimpleDateFormat]() {
override def initialValue() = {
val df = new SimpleDateFormat("EEE MMM dd HH:mm:ss ZZZZZ yyyy", Locale.ENGLISH)
df.setTimeZone(TimeZone.getTimeZone("UTC"))
df
}
}
/**
* Write an arbitrary data object as json. Handles Java and Scala types.
*
* @param v the value that should be represented as JSON
* @return a string representation of the value
*/
def toJSON[T](v: T)(implicit manifest: Manifest[T]): String = {
(v: @unchecked) match {
case b: Boolean => if (b) "true" else "false"
case i: Int => i.toString
case l: Long => l.toString
case x: Double if x.isInfinite || x.isNaN => "0.0"
case x: Double => java.lang.Double.toString(x)
case n: Number => n.toString
case s: String => formatString(s)
case d: java.util.Date => formatDate(d)
case m: Map[String, _] @unchecked => formatMap(m)
case m: java.util.Map[String, _] @unchecked => formatMap(m.asScala.toMap)
case l: List[_] @unchecked => formatList(l)
case l: java.util.List[_] => formatSeq(l.asScala.toSeq)
case s: Seq[_] => formatSeq(s)
case st: Set[_] => formatSeq(st.toSeq)
case a: Array[Byte] => formatByteArray(a)
case ia: Array[Int] => formatIntArray(ia)
case la: Array[Long] => formatLongArray(la)
case da: Array[Double] => formatDoubleArray(da)
case sa: Array[String] => formatStringArray(sa)
case h: HasToJSON => h.toJSON
case t: Product => formatSeq(t.productIterator.toSeq)
case j: JSONObject => j match {
case _ if j.isMap => toJSON(j.toMap)
case _ if j.isArray => toJSON(j.toArray)
case _ if j.isBoolean => toJSON(j.toBoolean)
case _ if j.isNumber => toJSON(j.toNumber)
case _ => toJSON(j.toString)
}
case _ => if (v == null)
"null"
else
throw new IllegalArgumentException("Don't know how to format '%s' in JSON".format(v.toString))
}
}
def formatString(s: String): String = {
if (s == "\\\\N") {
"null"
} else {
"\\"" + escapeString(s) + "\\""
}
}
def isPrint(c: String): Boolean = """(\\p{Print})""".r.findFirstIn(c).isDefined
def escapeString(s: String): String = {
//JSONValue.escape(s)
s.map {
case '\\"' => "\\\\\\""
case '\\\\' => "\\\\\\\\"
case c: Char if isPrint(c.toString) && c < 128 => c
case '\\b' => "\\\\b"
case '\\f' => "\\\\f"
case '\\n' => "\\\\n"
case '\\t' => "\\\\t"
case '\\r' => "\\\\r"
case c: Char => "\\\\u%04x".format(c.toInt)
}.mkString
}
def formatDate(d: java.util.Date): String = {
"\\"" + dateFormat.get.format(d) + "\\""
}
def formatMap(m: Map[String, Any]): String = {
"{" + m.map {kv => formatString(kv._1) + ":" + toJSON(kv._2)}.mkString(",") + "}"
}
def formatList(l: List[Any]): String = {
"[" + l.map {e => toJSON(e)}.mkString(",") + "]"
}
def formatSeq(s: Seq[Any]): String = {
"[" + s.map {e => toJSON(e)}.mkString(",") + "]"
}
def formatByteArray(array: Array[Byte]): String = {
val out: StringBuilder = new StringBuilder()
out.append("\\"")
for (b <- array) {
out.append("%02x" format b)
}
out.append("\\"")
out.toString()
}
def formatIntArray(array: Array[Int]): String = "[" + array.map(i => toJSON(i)).mkString(",") + "]"
def formatLongArray(array: Array[Long]): String = "[" + array.map(i => toJSON(i)).mkString(",") + "]"
def formatDoubleArray(array: Array[Double]): String = "[" + array.map(i => toJSON(i)).mkString(",") + "]"
def formatStringArray(array: Array[String]): String = "[" + array.map(i => toJSON(i)).mkString(",") + "]"
}
| streamdrill/streamdrill-json | src/main/scala/streamdrill/json/JSONWriter.scala | Scala | bsd-2-clause | 5,488 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
package com.intel.analytics.zoo.serving
import java.nio.file.{Files, Paths}
import java.util.{Base64, UUID}
import com.intel.analytics.zoo.serving.http._
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import scala.collection.mutable
import scala.util.Random
class FrontendDomainsSpec extends FlatSpec with Matchers with BeforeAndAfter with Supportive {
val random = new Random()
"ServingError" should "serialized as json" in {
val message = "contentType not supported"
val error = ServingError(message)
error.toString should include(s""""error" : "$message"""")
}
"Feature" should "serialized and deserialized as json" in {
val image1 = new ImageFeature("aW1hZ2UgYnl0ZXM=")
val image2 = new ImageFeature("YXdlc29tZSBpbWFnZSBieXRlcw==")
val image3Path = getClass().getClassLoader()
.getResource("imagenet/n02110063/n02110063_15462.JPEG").getFile()
val byteArray = Files.readAllBytes(Paths.get(image3Path))
val image3 = new ImageFeature(Base64.getEncoder().encodeToString(byteArray))
val instance3 = mutable.LinkedHashMap[String, Any]("image" -> image3, "caption" -> "dog")
val inputs = Instances(List.range(0, 2).map(i => instance3))
val json = timing("serialize")() {
JsonUtil.toJson(inputs)
}
// println(json)
val obj = timing("deserialize")() {
JsonUtil.fromJson(classOf[Instances], json)
}
obj.instances.size should be(2)
}
"BytesPredictionInput" should "works well" in {
val bytesStr = "aW1hZ2UgYnl0ZXM="
val input = BytesPredictionInput(bytesStr)
input.toHash().get("data") should equal(bytesStr)
}
"PredictionOutput" should "works well" in {
val uuid = UUID.randomUUID().toString
val result = "mock-result"
val out = PredictionOutput(uuid, result)
out.uuid should be(uuid)
out.result should be(result)
}
val instancesJson =
"""{
|"instances": [
| {
| "tag": "foo",
| "signal": [1, 2, 3, 4, 5],
| "sensor": [[1, 2], [3, 4]]
| },
| {
| "tag": "bar",
| "signal": [3, 4, 1, 2, 5],
| "sensor": [[4, 5], [6, 8]]
| }
|]
|}
|""".stripMargin
"Instances" should "works well" in {
val instances = JsonUtil.fromJson(classOf[Instances], instancesJson)
instances.instances.size should be(2)
val intScalar = 12345
val floatScalar = 3.14159
val stringScalar = "hello, world. hello, arrow."
val intTensor = List.range(0, 1000).map(i => random.nextInt(10000))
val floatTensor = List.range(0, 1000).map(i => random.nextFloat())
val stringTensor = List("come", "on", "united")
val intTensor2 = List(List(1, 2), List(3, 4), List(5, 6))
val floatTensor2 =
List(
List(
List(.2f, .3f),
List(.5f, .6f)),
List(
List(.2f, .3f),
List(.5f, .6f)))
val stringTensor2 =
List(
List(
List(
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united")),
List(
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united"))
),
List(
List(
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united")),
List(
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united"),
List("come", "on", "united"))
)
)
val instance = mutable.LinkedHashMap(
"intScalar" -> intScalar,
"floatScalar" -> floatScalar,
"stringScalar" -> stringScalar,
"intTensor" -> intTensor,
"floatTensor" -> floatTensor,
"stringTensor" -> stringTensor,
"intTensor2" -> intTensor2,
"floatTensor2" -> floatTensor2,
"stringTensor2" -> stringTensor2
)
val instances2 = Instances(instance, instance)
val json2 = timing("json serialization")() {
JsonUtil.toJson(instances2)
}
val instances3 = timing("json deserialization")() {
JsonUtil.fromJson(classOf[Instances], json2)
}
// println("json: " + json2)
// println("json serialized size: " + json2.getBytes.length)
val tensors = instances3.constructTensors()
val schemas = instances3.makeSchema(tensors)
val (shape1, data1) = Instances.transferListToTensor(intTensor)
shape1.reduce(_ * _) should be(data1.size)
val (shape2, data2) = Instances.transferListToTensor(intTensor2)
shape2.reduce(_ * _) should be(data2.size)
val (shape3, data3) = Instances.transferListToTensor(floatTensor2)
shape3.reduce(_ * _) should be(data3.size)
val (shape4, data4) = Instances.transferListToTensor(stringTensor2)
shape4.reduce(_ * _) should be(data4.size)
val arrowBytes = timing("arrow serialization")() {
instances3.toArrow()
}
// println("arrow:" + new String(arrowBytes))
// println("arrow serialized size: ", arrowBytes.length)
val instances4 = timing("arrow deserialization")() {
Instances.fromArrow(arrowBytes)
}
instances4.instances(0).get("intScalar") should be(Some(12345))
instances4.instances(0).get("floatScalar") should be(Some(3.14159f))
instances4.instances(0).get("stringScalar") should be(Some("hello, world. hello, arrow."))
println(instances4.instances(0).get("intTensor"))
println(instances4.instances(0).get("floatTensor"))
println(instances4.instances(0).get("stringTensor"))
println(instances4.instances(0).get("intTensor2"))
println(instances4.instances(0).get("floatTensor2"))
println(instances4.instances(0).get("stringTensor2"))
}
"Instances" should "works well too" in {
List.range(0, 10).foreach(i => {
val image3Path = getClass().getClassLoader()
.getResource("imagenet/n02110063/n02110063_15462.JPEG").getFile()
val byteArray = Files.readAllBytes(Paths.get(image3Path))
val b64 = Base64.getEncoder().encodeToString(byteArray)
val instance = mutable.LinkedHashMap("image" -> b64)
.asInstanceOf[mutable.LinkedHashMap[String, Any]]
val instances = Instances(List.range(0, 1).map(i => instance))
val json = timing("json serialization")() {
JsonUtil.toJson(instances)
}
val instances2 = timing("json deserialization")() {
JsonUtil.fromJson(classOf[Instances], json)
json
}
// println("json: " + json)
println("json serialized size: " + json.getBytes.length)
val arrowBytes = timing("arrow serialization")() {
instances.toArrow()
}
val instances3 = timing("arrow deserialization")() {
Instances.fromArrow(arrowBytes)
}
// println("arrow: " + new String(arrowBytes))
println("arrow serialized size: " + arrowBytes.length)
val data = List.range(0, 224).map(i => random.nextFloat())
val data2 = List.range(0, 224).map(i => data)
val data3 = List.range(0, 3).map(data2)
val instance2 = mutable.LinkedHashMap(
"feature" -> data3
).asInstanceOf[mutable.LinkedHashMap[String, Any]]
val instances4 = Instances(List.range(0, 1).map(i => instance2))
val json2 = timing("json serialization")() {
JsonUtil.toJson(instances4)
}
val instances5 = timing("json deserialization")() {
JsonUtil.fromJson(classOf[Instances], json2)
}
// println("json: " + json2)
println("json serialized size: " + json2.getBytes.length)
val arrowBytes2 = timing("arrow serialization")() {
instances4.toArrow()
}
val instances6 = timing("arrow deserialization")() {
Instances.fromArrow(arrowBytes2)
}
// println("arrow: " + new String(arrowBytes2))
// println("arrow serialized size: " + arrowBytes2.length)
val tensorFloat = List(
List(1, 2),
List(3, 4)
)
val instanceExample = mutable.LinkedHashMap("tensor" -> tensorFloat)
.asInstanceOf[mutable.LinkedHashMap[String, Any]]
val instancesExample = Instances(instanceExample)
val arrowBytesExample = timing("arrow serialization")() {
instancesExample.toArrow()
}
val b64Example = Base64.getEncoder().encodeToString(arrowBytesExample)
// println("XXXXXXXXXXXXXXXXX:\\n" + new String(arrowBytesExample))
// println("arrow:\\n " + b64Example)
val arrowBytesPath = getClass().getClassLoader()
.getResource("serving/arrowBytes").getFile()
val b64f = scala.io.Source.fromFile(arrowBytesPath).mkString
val bytes = java.util.Base64.getDecoder.decode(b64f)
// println(new String(bytes))
val instancesEx = timing("arrow deserialization")() {
Instances.fromArrow(bytes)
}
instancesEx.instances.size should be (1)
instancesEx.instances(0).get("my-img").size should be (1)
// println(instancesEx)
})
}
"sparse tensor" should "work" in {
val shape = List(100, 10000, 10)
val values = List(0.2f, 0.5f, 3.45f, 6.78f)
val indices = List(List(1, 1, 1), List(2, 2, 2), List(3, 3, 3), List(4, 4, 4))
val sparseTensor = SparseTensor(shape, values, indices)
val intTensor2 = List(List(1, 2), List(3, 4), List(5, 6))
val instance = mutable.LinkedHashMap(
"sparseTensor" -> sparseTensor,
"intTensor2" -> intTensor2
).asInstanceOf[mutable.LinkedHashMap[String, Any]]
val instances = Instances(instance, instance)
val json = timing("json serialization")() {
JsonUtil.toJson(instances)
}
// println(json)
val instances2 = timing("json deserialization")() {
JsonUtil.fromJson(classOf[Instances], json)
}
// println(instances2)
// println("json serialized size: " + json.getBytes.length)
val arrowBytes = timing("arrow serialization")() {
instances.toArrow()
}
val instances3 = timing("arrow deserialization")() {
Instances.fromArrow(arrowBytes)
}
// println(instances3)
// println("arrow serialized size: " + arrowBytes.length)
}
}
*/
| intel-analytics/analytics-zoo | zoo/src/test/scala/com/intel/analytics/zoo/serving/FrontendDomainsSpec.scala | Scala | apache-2.0 | 10,907 |
package gapt.formats.tptp
import gapt.expr._
import gapt.expr.formula.Atom
import gapt.expr.ty.Ti
import gapt.expr.ty.To
import org.specs2.mutable._
class TptpHOLExporterTest extends Specification {
"Export to TPTP thf" should {
"handle atoms correctly" in {
val x = Var( "x", Ti ->: To )
val y = Var( "y", To )
val c = Const( "c", Ti )
val ax = Atom( x, List( c ) )
val ay = Atom( y )
//println( TPTPHOLExporter( List( HOLSequent( Nil, List( ax, ay ) ) ), true ) )
/*
println( TPTPHOLExporter( List(
HOLSequent( List( ax ), Nil ),
HOLSequent( Nil, List( ay ) )
), true ) )
*/
ok
}
}
}
| gapt/gapt | tests/src/test/scala/gapt/formats/tptp/TptpHOLExporterTest.scala | Scala | gpl-3.0 | 686 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.csv
import java.util.Locale
import scala.util.control.Exception.allCatch
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.analysis.TypeCoercion
import org.apache.spark.sql.catalyst.expressions.ExprUtils
import org.apache.spark.sql.catalyst.util.LegacyDateFormats.FAST_DATE_FORMAT
import org.apache.spark.sql.catalyst.util.TimestampFormatter
import org.apache.spark.sql.types._
class CSVInferSchema(val options: CSVOptions) extends Serializable {
private val timestampParser = TimestampFormatter(
options.timestampFormat,
options.zoneId,
options.locale,
legacyFormat = FAST_DATE_FORMAT,
needVarLengthSecondFraction = true)
private val decimalParser = if (options.locale == Locale.US) {
// Special handling the default locale for backward compatibility
s: String => new java.math.BigDecimal(s)
} else {
ExprUtils.getDecimalParser(options.locale)
}
/**
* Similar to the JSON schema inference
* 1. Infer type of each row
* 2. Merge row types to find common type
* 3. Replace any null types with string type
*/
def infer(
tokenRDD: RDD[Array[String]],
header: Array[String]): StructType = {
val fields = if (options.inferSchemaFlag) {
val startType: Array[DataType] = Array.fill[DataType](header.length)(NullType)
val rootTypes: Array[DataType] =
tokenRDD.aggregate(startType)(inferRowType, mergeRowTypes)
toStructFields(rootTypes, header)
} else {
// By default fields are assumed to be StringType
header.map(fieldName => StructField(fieldName, StringType, nullable = true))
}
StructType(fields)
}
def toStructFields(
fieldTypes: Array[DataType],
header: Array[String]): Array[StructField] = {
header.zip(fieldTypes).map { case (thisHeader, rootType) =>
val dType = rootType match {
case _: NullType => StringType
case other => other
}
StructField(thisHeader, dType, nullable = true)
}
}
def inferRowType(rowSoFar: Array[DataType], next: Array[String]): Array[DataType] = {
var i = 0
while (i < math.min(rowSoFar.length, next.length)) { // May have columns on right missing.
rowSoFar(i) = inferField(rowSoFar(i), next(i))
i+=1
}
rowSoFar
}
def mergeRowTypes(first: Array[DataType], second: Array[DataType]): Array[DataType] = {
first.zipAll(second, NullType, NullType).map { case (a, b) =>
compatibleType(a, b).getOrElse(NullType)
}
}
/**
* Infer type of string field. Given known type Double, and a string "1", there is no
* point checking if it is an Int, as the final type must be Double or higher.
*/
def inferField(typeSoFar: DataType, field: String): DataType = {
if (field == null || field.isEmpty || field == options.nullValue) {
typeSoFar
} else {
typeSoFar match {
case NullType => tryParseInteger(field)
case IntegerType => tryParseInteger(field)
case LongType => tryParseLong(field)
case _: DecimalType =>
// DecimalTypes have different precisions and scales, so we try to find the common type.
compatibleType(typeSoFar, tryParseDecimal(field)).getOrElse(StringType)
case DoubleType => tryParseDouble(field)
case TimestampType => tryParseTimestamp(field)
case BooleanType => tryParseBoolean(field)
case StringType => StringType
case other: DataType =>
throw new UnsupportedOperationException(s"Unexpected data type $other")
}
}
}
private def isInfOrNan(field: String): Boolean = {
field == options.nanValue || field == options.negativeInf || field == options.positiveInf
}
private def tryParseInteger(field: String): DataType = {
if ((allCatch opt field.toInt).isDefined) {
IntegerType
} else {
tryParseLong(field)
}
}
private def tryParseLong(field: String): DataType = {
if ((allCatch opt field.toLong).isDefined) {
LongType
} else {
tryParseDecimal(field)
}
}
private def tryParseDecimal(field: String): DataType = {
val decimalTry = allCatch opt {
// The conversion can fail when the `field` is not a form of number.
val bigDecimal = decimalParser(field)
// Because many other formats do not support decimal, it reduces the cases for
// decimals by disallowing values having scale (eg. `1.1`).
if (bigDecimal.scale <= 0) {
// `DecimalType` conversion can fail when
// 1. The precision is bigger than 38.
// 2. scale is bigger than precision.
DecimalType(bigDecimal.precision, bigDecimal.scale)
} else {
tryParseDouble(field)
}
}
decimalTry.getOrElse(tryParseDouble(field))
}
private def tryParseDouble(field: String): DataType = {
if ((allCatch opt field.toDouble).isDefined || isInfOrNan(field)) {
DoubleType
} else {
tryParseTimestamp(field)
}
}
private def tryParseTimestamp(field: String): DataType = {
// This case infers a custom `dataFormat` is set.
if ((allCatch opt timestampParser.parse(field)).isDefined) {
TimestampType
} else {
tryParseBoolean(field)
}
}
private def tryParseBoolean(field: String): DataType = {
if ((allCatch opt field.toBoolean).isDefined) {
BooleanType
} else {
stringType()
}
}
// Defining a function to return the StringType constant is necessary in order to work around
// a Scala compiler issue which leads to runtime incompatibilities with certain Spark versions;
// see issue #128 for more details.
private def stringType(): DataType = {
StringType
}
/**
* Returns the common data type given two input data types so that the return type
* is compatible with both input data types.
*/
private def compatibleType(t1: DataType, t2: DataType): Option[DataType] = {
TypeCoercion.findTightestCommonType(t1, t2).orElse(findCompatibleTypeForCSV(t1, t2))
}
/**
* The following pattern matching represents additional type promotion rules that
* are CSV specific.
*/
private val findCompatibleTypeForCSV: (DataType, DataType) => Option[DataType] = {
case (StringType, t2) => Some(StringType)
case (t1, StringType) => Some(StringType)
// These two cases below deal with when `IntegralType` is larger than `DecimalType`.
case (t1: IntegralType, t2: DecimalType) =>
compatibleType(DecimalType.forType(t1), t2)
case (t1: DecimalType, t2: IntegralType) =>
compatibleType(t1, DecimalType.forType(t2))
// Double support larger range than fixed decimal, DecimalType.Maximum should be enough
// in most case, also have better precision.
case (DoubleType, _: DecimalType) | (_: DecimalType, DoubleType) =>
Some(DoubleType)
case (t1: DecimalType, t2: DecimalType) =>
val scale = math.max(t1.scale, t2.scale)
val range = math.max(t1.precision - t1.scale, t2.precision - t2.scale)
if (range + scale > 38) {
// DecimalType can't support precision > 38
Some(DoubleType)
} else {
Some(DecimalType(range + scale, scale))
}
case _ => None
}
}
| matthewfranglen/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVInferSchema.scala | Scala | mit | 8,059 |
/**
* Copyright (C) 2017 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.builder
import org.orbeon.datatypes.Orientation
import org.orbeon.facades.ResizeObserver
import org.orbeon.jquery.Offset
import org.orbeon.oxf.util.CoreUtils.asUnit
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.xforms.AjaxClient.AjaxResponseDetails
import org.orbeon.xforms._
import org.orbeon.xforms.facade.Events
import org.scalajs.dom.{document, window}
import org.scalajs.jquery.{JQuery, JQueryEventObject}
import scala.scalajs.js
import scala.util.Try
object Position {
// Keeps track of pointer position
var pointerPos: Offset = Offset(0, 0)
$(document).on("mousemove.orbeon.builder", (event: JQueryEventObject) => asUnit {
pointerPos =
Offset(
left = event.pageX,
top = event.pageY
)
})
// How much we need to add to offset to account for the form having been scrolled
def scrollTop() : Double = $(".fb-main").scrollTop ()
def scrollLeft(): Double = $(".fb-main").scrollLeft()
// Gets an element offset, normalizing for scrolling, so the offset can be stored in a cache
def adjustedOffset(el: JQuery): Offset = {
val rawOffset = Offset(el)
Offset(
left = rawOffset.left + scrollLeft(),
top = rawOffset.top + scrollTop()
)
}
// Calls listener when what is under the pointer has potentially changed
def onUnderPointerChange(fn: => Unit): Unit = {
$(document).on("mousemove.orbeon.builder", fn _)
// Resizing the window might change what is under the pointer the last time we saw it in the window
$(window).on("resize.orbeon.builder", fn _)
AjaxClient.ajaxResponseProcessed.add(_ => fn)
}
// Call listener when anything on the page that could change element positions happened
def onOffsetMayHaveChanged(fn: () => Unit): Unit = {
Events.orbeonLoadedEvent.subscribe(fn)
AjaxClient.ajaxResponseProcessed.add(_ => fn())
Events.componentChangedLayoutEvent.subscribe(fn)
// Can be removed once we only support Safari 14, which implements the `ResizeObserver`
$(window).on("resize.orbeon.builder", fn)
// `ResizeObserver` catches window resizes, but also Form Builder being moved or resized by the embedding app
if (ResizeObserver.isDefined) {
Events.orbeonLoadedEvent.subscribe(() => {
val resizeObserver = new ResizeObserver(fn)
val fbMainOpt = Option(document.querySelector(".fb-main"))
fbMainOpt.foreach(resizeObserver.observe)
})
}
}
// Finds the container, if any, based on a vertical position
def findInCache(
containerCache : BlockCache,
top : Double,
left : Double
): Option[Block] =
containerCache.elems find { container =>
// Rounding when comparing as the offset of an element isn't always exactly the same as the offset it was set to
val horizontalPosInside = Math.round(container.left) <= Math.round(left) &&
Math.round(left) <= Math.round(container.left + container.width)
val verticalPosInside = Math.round(container.top ) <= Math.round(top) &&
Math.round(top) <= Math.round(container.top + container.height)
horizontalPosInside && verticalPosInside
}
// Container is either a section or grid; calls listeners passing old/new container
def currentContainerChanged(
containerCache : BlockCache,
wasCurrent : Block => Unit,
becomesCurrent : Block => Unit
): Unit = {
val notifyChange = notifyOnChange(wasCurrent, becomesCurrent)
onUnderPointerChange {
val top = pointerPos.top + Position.scrollTop()
val left = pointerPos.left + Position.scrollLeft()
val dialogVisible =
Globals.dialogs.exists {
case (_: String, yuiDialog: js.Dynamic) =>
yuiDialog.cfg.config.visible.value.asInstanceOf[Boolean]
}
val newContainer =
if (dialogVisible)
// Ignore container under the pointer if a dialog is visible
None
else
findInCache(containerCache, top, left)
notifyChange(newContainer)
}
}
// Returns a function, which is expected to be called every time the value changes passing the new value, and which
// will when appropriate notify the listeners `was` and `becomes` of the old and new value
// TODO: replace `Any` by `Unit` once callers are all in Scala
def notifyOnChange[T](
was : Block => Unit,
becomes : Block => Unit
): Option[Block] => Unit = {
var currentBlockOpt: Option[Block] = None
(newBlockOpt: Option[Block]) => {
newBlockOpt match {
case Some(newBlock) =>
val doNotify =
currentBlockOpt match {
case None => true
case Some(currentBlock) =>
// Typically after an Ajax request, maybe a column/row was added/removed, so we might consequently
// need to update the icon position
! newBlock.el.is(currentBlock.el) ||
// The elements could be the same, but their position could have changed, in which case want to
// reposition relative icons, so we don't consider the value to be the "same"
newBlock.left != currentBlock.left ||
newBlock.top != currentBlock.top
}
if (doNotify) {
currentBlockOpt.foreach(was)
currentBlockOpt = newBlockOpt
becomes(newBlock)
}
case None =>
currentBlockOpt.foreach(was)
currentBlockOpt = None
}
}
}
// Get the height of each row track
def tracksWidth(
gridBody : JQuery,
orientation : Orientation
): List[Double] = {
val cssProperty = orientation match {
case Orientation.Horizontal => "grid-template-rows"
case Orientation.Vertical => "grid-template-columns"
}
val cssValue = gridBody.css(cssProperty)
// In the value of the CSS property returned by the browser, replace `repeat(X Ypx)` by `X` times `Ypx`
// Unlike other browsers, Edge 17 returns values that contains `repeat()`
val repeatRegex = "repeat\\\\(([0-9]+), ([0-9\\\\.]+px)\\\\)".r
val cssValueExpanded = repeatRegex.replaceAllIn(cssValue, m => {
val count = m.group(1).toInt
val value = m.group(2)
(1 to count).map(_ => value).mkString(" ")
})
cssValueExpanded
.splitTo[List]()
.map(w => w.substring(0, w.indexOf("px")))
.flatMap(v => Try(v.toDouble).toOption) // https://github.com/orbeon/orbeon-forms/issues/3700
}
}
| orbeon/orbeon-forms | form-builder/js/src/main/scala/org/orbeon/builder/Position.scala | Scala | lgpl-2.1 | 7,238 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.misc
import minitest.SimpleTestSuite
import monix.execution.{Cancelable, CancelableFuture, Scheduler}
import monix.execution.exceptions.DummyException
import monix.execution.schedulers.TracingScheduler
import scala.concurrent.Future
object LocalJVMSuite extends SimpleTestSuite {
testAsync("Local.isolate should properly isolate during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- Local.isolate {
Future {
local := 100
}
}
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("Local.isolate(CancelableFuture) should properly isolate during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- CancelableFuture(Future { local := 50 }, Cancelable())
_ <- Local.isolate {
CancelableFuture(Future {
local := 100
}, Cancelable())
}
v <- CancelableFuture(Future { local() }, Cancelable())
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("Local.isolate should properly isolate during async boundaries on error") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- Local.isolate {
Future {
local := 100
}.flatMap(_ => Future.failed(DummyException("boom")))
}.recoverWith { case _ => Future.successful(()) }
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("Local.bindCurrentIf(CancelableFuture) should properly restore context during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- Local.bindCurrentIf(true)(CancelableFuture(Future {
local := 100
}, Cancelable.empty))
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("Local.bind(Local.defaultContext()) should restore context during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- Local.bind(Local.newContext()) { Future { local := 100 } }
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("Local.bindClear should restore context during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- Local.bindClear { Future { local := 100 } }
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("local.bind should properly restore context during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- local.bind(100)(Future { () })
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
testAsync("local.bindClear should properly restore context during async boundaries") {
implicit val s = TracingScheduler(Scheduler.singleThread("local-test"))
val local = Local(0)
val f = for {
_ <- Future { local := 50 }
_ <- local.bindClear(Future { () })
v <- Future { local() }
} yield v
for (v <- f) yield assertEquals(v, 50)
}
}
| monix/monix | monix-execution/jvm/src/test/scala/monix/execution/misc/LocalJVMSuite.scala | Scala | apache-2.0 | 4,465 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.metrics
import java.io.{FileInputStream, InputStream}
import java.util.Properties
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.matching.Regex
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.METRICS_CONF
import org.apache.spark.util.Utils
private[spark] class MetricsConfig(conf: SparkConf) extends Logging {
private val DEFAULT_PREFIX = "*"
private val INSTANCE_REGEX = "^(\\\\*|[a-zA-Z]+)\\\\.(.+)".r
private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"
private[metrics] val properties = new Properties()
private[metrics] var perInstanceSubProperties: mutable.HashMap[String, Properties] = null
private def setDefaultProperties(prop: Properties): Unit = {
prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
prop.setProperty("*.sink.servlet.path", "/metrics/json")
prop.setProperty("master.sink.servlet.path", "/metrics/master/json")
prop.setProperty("applications.sink.servlet.path", "/metrics/applications/json")
}
/**
* Load properties from various places, based on precedence
* If the same property is set again latter on in the method, it overwrites the previous value
*/
def initialize(): Unit = {
// Add default properties in case there's no properties file
setDefaultProperties(properties)
loadPropertiesFromFile(conf.get(METRICS_CONF))
// Also look for the properties in provided Spark configuration
val prefix = "spark.metrics.conf."
conf.getAll.foreach {
case (k, v) if k.startsWith(prefix) =>
properties.setProperty(k.substring(prefix.length()), v)
case _ =>
}
// Now, let's populate a list of sub-properties per instance, instance being the prefix that
// appears before the first dot in the property name.
// Add to the sub-properties per instance, the default properties (those with prefix "*"), if
// they don't have that exact same sub-property already defined.
//
// For example, if properties has ("*.class"->"default_class", "*.path"->"default_path,
// "driver.path"->"driver_path"), for driver specific sub-properties, we'd like the output to be
// ("driver"->Map("path"->"driver_path", "class"->"default_class")
// Note how class got added to based on the default property, but path remained the same
// since "driver.path" already existed and took precedence over "*.path"
//
perInstanceSubProperties = subProperties(properties, INSTANCE_REGEX)
if (perInstanceSubProperties.contains(DEFAULT_PREFIX)) {
val defaultSubProperties = perInstanceSubProperties(DEFAULT_PREFIX).asScala
for ((instance, prop) <- perInstanceSubProperties if (instance != DEFAULT_PREFIX);
(k, v) <- defaultSubProperties if (prop.get(k) == null)) {
prop.put(k, v)
}
}
}
/**
* Take a simple set of properties and a regex that the instance names (part before the first dot)
* have to conform to. And, return a map of the first order prefix (before the first dot) to the
* sub-properties under that prefix.
*
* For example, if the properties sent were Properties("*.sink.servlet.class"->"class1",
* "*.sink.servlet.path"->"path1"), the returned map would be
* Map("*" -> Properties("sink.servlet.class" -> "class1", "sink.servlet.path" -> "path1"))
* Note in the subProperties (value of the returned Map), only the suffixes are used as property
* keys.
* If, in the passed properties, there is only one property with a given prefix, it is still
* "unflattened". For example, if the input was Properties("*.sink.servlet.class" -> "class1"
* the returned Map would contain one key-value pair
* Map("*" -> Properties("sink.servlet.class" -> "class1"))
* Any passed in properties, not complying with the regex are ignored.
*
* @param prop the flat list of properties to "unflatten" based on prefixes
* @param regex the regex that the prefix has to comply with
* @return an unflattened map, mapping prefix with sub-properties under that prefix
*/
def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
val subProperties = new mutable.HashMap[String, Properties]
prop.asScala.foreach { kv =>
if (regex.findPrefixOf(kv._1).isDefined) {
val regex(prefix, suffix) = kv._1
subProperties.getOrElseUpdate(prefix, new Properties).setProperty(suffix, kv._2)
}
}
subProperties
}
def getInstance(inst: String): Properties = {
perInstanceSubProperties.get(inst) match {
case Some(s) => s
case None => perInstanceSubProperties.getOrElse(DEFAULT_PREFIX, new Properties)
}
}
/**
* Loads configuration from a config file. If no config file is provided, try to get file
* in class path.
*/
private[this] def loadPropertiesFromFile(path: Option[String]): Unit = {
var is: InputStream = null
try {
is = path match {
case Some(f) => new FileInputStream(f)
case None => Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME)
}
if (is != null) {
properties.load(is)
}
} catch {
case e: Exception =>
val file = path.getOrElse(DEFAULT_METRICS_CONF_FILENAME)
logError(s"Error loading configuration file $file", e)
} finally {
if (is != null) {
is.close()
}
}
}
}
| ueshin/apache-spark | core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala | Scala | apache-2.0 | 6,326 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.db.evolutions
import javax.inject._
import play.api.db.DBApi
import play.api.inject._
import play.api.Configuration
import play.api.Environment
import play.core.WebCommands
/**
* Default module for evolutions API.
*/
class EvolutionsModule
extends SimpleModule(
bind[EvolutionsConfig].toProvider[DefaultEvolutionsConfigParser],
bind[EvolutionsReader].to[EnvironmentEvolutionsReader],
bind[EvolutionsApi].to[DefaultEvolutionsApi],
bind[ApplicationEvolutions].toProvider[ApplicationEvolutionsProvider].eagerly
)
/**
* Components for default implementation of the evolutions API.
*/
trait EvolutionsComponents {
def environment: Environment
def configuration: Configuration
def dbApi: DBApi
def webCommands: WebCommands
lazy val dynamicEvolutions: DynamicEvolutions = new DynamicEvolutions
lazy val evolutionsConfig: EvolutionsConfig = new DefaultEvolutionsConfigParser(configuration).parse
lazy val evolutionsReader: EvolutionsReader = new EnvironmentEvolutionsReader(environment)
lazy val evolutionsApi: EvolutionsApi = new DefaultEvolutionsApi(dbApi)
lazy val applicationEvolutions: ApplicationEvolutions = new ApplicationEvolutions(
evolutionsConfig,
evolutionsReader,
evolutionsApi,
dynamicEvolutions,
dbApi,
environment,
webCommands
)
}
@Singleton
class ApplicationEvolutionsProvider @Inject() (
config: EvolutionsConfig,
reader: EvolutionsReader,
evolutions: EvolutionsApi,
dbApi: DBApi,
environment: Environment,
webCommands: WebCommands,
injector: Injector
) extends Provider[ApplicationEvolutions] {
lazy val get = new ApplicationEvolutions(
config,
reader,
evolutions,
injector.instanceOf[DynamicEvolutions],
dbApi,
environment,
webCommands
)
}
| benmccann/playframework | persistence/play-jdbc-evolutions/src/main/scala/play/api/db/evolutions/EvolutionsModule.scala | Scala | apache-2.0 | 1,900 |
package progscala2.fp.categories
import scala.language.higherKinds
/**
* Created by younggi on 1/19/17.
*/
// 매개변수화 한 타입 F[_]의 추상화
// 존재 타입 F[_]
// F[A]에 A => B로 변환 하는 함수를 인자로 받아 F[B]로 변환하는 map
trait Functor[F[_]] {
def map[A, B](fa: F[A])(f: A => B): F[B]
}
object SeqF extends Functor[Seq] {
def map[A, B](seq: Seq[A])(f: A => B): Seq[B] = seq map f
}
object OptionF extends Functor[Option] {
def map[A, B](opt: Option[A])(f: A => B): Option[B] = opt map f
}
// Function A => A2, A2 => B 두 함수를 A => B로 변환하여 반환하는 map
// A2를 입력으로 받은 두번째 함수 f를 func의 A2 출력에 연쇄 시킴
object FunctionF {
def map[A, A2, B](func: A => A2)(f: A2 => B): A => B = {
val functor = new Functor[({type λ[β] = A => β})#λ] {
def map[A3, B](func: A => A3)(f: A3 => B): A => B = (a: A) => f(func(a))
}
functor.map(func)(f)
}
}
| younggi/books | programming_scala/progscala2/src/main/scala/progscala2/fp/categories/Functor.scala | Scala | mit | 972 |
package com.seanshubin.learn.datomic.experiment
import java.util.{Map => JavaMap}
import datomic.{Connection, Peer, Util}
object ParentChildSetOfReferences extends App {
def transactElectionSchema(connection: Connection) {
def createColumn(namespace: String, name: String, dataType: String, cardinality: String) = {
Util.map(
":db/id", Peer.tempid(":db.part/db"),
":db/ident", s":$namespace/$name",
":db/valueType", s":db.type/$dataType",
":db/cardinality", ":db.cardinality/" + cardinality,
":db.install/_attribute", ":db.part/db")
}
val electionName = createColumn("election", "name", "string", "one")
val electionCandidate = createColumn("election", "candidate", "ref", "many")
val candidateName = createColumn("candidate", "name", "string", "one")
connection.transact(Util.list(
electionName,
electionCandidate,
candidateName)).get()
}
def transactElectionSampleData(connection: Connection) {
def transactElection(connection: Connection, electionName: String, candidateNames: Seq[String]) {
val electionId = Peer.tempid(":db.part/user")
val election = Util.map(":db/id", electionId, ":election/name", electionName)
def createCandidateDatom(candidateName: String): Seq[JavaMap[_, _]] = {
val candidateId = Peer.tempid(":db.part/user")
val candidateNameData = Util.map(
":db/id", candidateId,
":candidate/name", candidateName)
val electionCandidate = Util.map(
":db/id", electionId,
":election/candidate", candidateId)
Seq(candidateNameData, electionCandidate)
}
val candidates = candidateNames.flatMap(createCandidateDatom)
val datoms = election +: candidates
connection.transact(Util.list(datoms: _*)).get()
}
transactElection(connection, "Favorite Programming Language", Seq("Scala", "Clojure", "Haskell"))
transactElection(connection, "Least Evil Political Party", Seq("Nefarious", "Debauched", "Spoiler"))
transactElection(connection, "Ice Cream", Seq("Chocolate", "Vanilla", "Strawberry"))
}
val uri = "datomic:mem://sample"
Peer.createDatabase(uri)
val connection = Peer.connect(uri)
val baseline = connection.db();
transactElectionSchema(connection)
transactElectionSampleData(connection)
val db = connection.db()
DatomicReporter.report(baseline, db).foreach(println)
sys.exit(0)
}
| SeanShubin/learn-datomic | experiment/src/main/scala/com/seanshubin/learn/datomic/experiment/ParentChildSetOfReferences.scala | Scala | unlicense | 2,447 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.compiler.codegenerator.opencl.hyperkernels
import cogx.compiler.codegenerator.opencl.fragments.{SmallTensorAddressing, TensorElementAddressing, AddressingMode, HyperKernel}
import cogx.platform.types._
import cogx.platform.types.ElementTypes.Float32
import cogx.compiler.codegenerator.common.FieldPolicies._
import cogx.cogmath.geometry.Shape
import cogx.compiler.codegenerator.opencl.fragments.HyperKernel._
import cogx.compiler.parser.op.SlicePointOp
/** A SlicePointKernel takes an N-dimensional field (first child) and
* a 0-dimensional scalar field (second child) and uses the single value in
* the second field as a slicing index into the first field. If necessary, the
* slicing index value is converted to an integer and clipped by the range
* of the first dimension of the first field. Works on any type of input
* field (scalar, vector, matrix, etc.) of dimensions >= 1.</p>
*
* @author Greg Snider and Dick Carter
*
* @param in The inputs virtual field register driving this kernel.
* @param operation The SlicePointOp opcode
* @param resultType The type of the resulting vector field.
* @param addressMode The addressing mode of this kernel.
*/
private[cogx]
class SlicePointHyperKernel private (in: Array[VirtualFieldRegister],
operation: Opcode,
resultType: FieldType,
addressMode: AddressingMode)
extends HyperKernel(operation, in, resultType, addressMode) {
val code = new StringBuilder
val inType = in(0).fieldType
val inDim = inType.dimensions
val firstDimensionSize = inType.fieldShape(0)
if (addressMode == TensorElementAddressing)
code append " tensorElement = _tensorElement;\\n"
// Get index and clip it to the bounds of the first dimension.
code append " float floatIndex = readScalar(@in1);\\n"
code append " int index = convert_int_rtz(floatIndex);\\n"
code append " index = max(index, 0);\\n"
code append " index = min(index, " + (firstDimensionSize - 1) + ");\\n"
inDim match {
case 1 =>
code append setLayerRowColumn(inType, "0", "0", "index")
case 2 =>
code append setLayerRowColumn(inType, "0", "index", "_column")
case 3 =>
code append setLayerRowColumn(inType, "index", "_row", "_column")
case _ =>
throw new RuntimeException("Unsupported input field dimension: " + inDim)
}
code append " @out0 = readNonlocal(@in0);\\n"
addCode(code.toString())
// debugCompile()
}
/** Factory object for creating kernels of this type.
*/
private[cogx]
object SlicePointHyperKernel {
/**
* Create a hyperkernel that slices an N-dimensional field to form an N-1
* dimensional field, based on an index provided by a 0D scalar field.
*
* @param in The input virtual field register driving this kernel.
* @param operation The SlicePointOp opcode
* @param resultType The type of the resulting vector field.
* @return The synthesized hyperkernel.
*/
def apply(in: Array[VirtualFieldRegister], operation: Opcode, resultType: FieldType): HyperKernel = {
val inType = in(0).fieldType
val expectedResultType = new FieldType(inType.fieldShape.drop(1), inType.tensorShape, inType.elementType)
require(resultType == expectedResultType)
val indexType = in(1).fieldType
require(indexType == new FieldType(Shape(), Shape(), Float32))
require(operation == SlicePointOp)
val addressing =
if (isSmallTensorField(resultType))
SmallTensorAddressing
else
TensorElementAddressing
new SlicePointHyperKernel(in, operation, resultType, addressing)
}
}
| hpe-cct/cct-core | src/main/scala/cogx/compiler/codegenerator/opencl/hyperkernels/SlicePointHyperKernel.scala | Scala | apache-2.0 | 4,329 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
import com.intellij.psi.PsiElement
import types.result.TypeResult
import types.ScType
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
trait ScSelfInvocation extends ScalaPsiElement {
def args: Option[ScArgumentExprList] = findChild(classOf[ScArgumentExprList])
def arguments: Seq[ScArgumentExprList] =
collection.immutable.Seq(findChildrenByClassScala(classOf[ScArgumentExprList]).toSeq: _*)
def bind: Option[PsiElement]
def shapeType(i: Int): TypeResult[ScType]
def shapeMultiType(i: Int): Seq[TypeResult[ScType]]
def multiType(i: Int): Seq[TypeResult[ScType]]
def thisElement: PsiElement = getFirstChild
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScSelfInvocation.scala | Scala | apache-2.0 | 792 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.integration
import org.apache.kafka.common.config.ConfigException
import org.junit.{After, Before, Ignore, Test}
import scala.util.Random
import org.apache.log4j.{Level, Logger}
import java.util.Properties
import java.util.concurrent.ExecutionException
import kafka.consumer.{Consumer, ConsumerConfig}
import kafka.serializer.StringDecoder
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.CoreUtils
import kafka.utils.TestUtils._
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.common.errors.TimeoutException
import org.junit.Assert._
class UncleanLeaderElectionTest extends ZooKeeperTestHarness {
val brokerId1 = 0
val brokerId2 = 1
// controlled shutdown is needed for these tests, but we can trim the retry count and backoff interval to
// reduce test execution time
val enableControlledShutdown = true
var configProps1: Properties = null
var configProps2: Properties = null
var configs: Seq[KafkaConfig] = Seq.empty[KafkaConfig]
var servers: Seq[KafkaServer] = Seq.empty[KafkaServer]
val random = new Random()
val topic = "topic" + random.nextLong
val partitionId = 0
val kafkaApisLogger = Logger.getLogger(classOf[kafka.server.KafkaApis])
val networkProcessorLogger = Logger.getLogger(classOf[kafka.network.Processor])
val syncProducerLogger = Logger.getLogger(classOf[kafka.producer.SyncProducer])
val eventHandlerLogger = Logger.getLogger(classOf[kafka.producer.async.DefaultEventHandler[Object, Object]])
@Before
override def setUp() {
super.setUp()
configProps1 = createBrokerConfig(brokerId1, zkConnect)
configProps2 = createBrokerConfig(brokerId2, zkConnect)
for (configProps <- List(configProps1, configProps2)) {
configProps.put("controlled.shutdown.enable", enableControlledShutdown.toString)
configProps.put("controlled.shutdown.max.retries", "1")
configProps.put("controlled.shutdown.retry.backoff.ms", "1000")
}
// temporarily set loggers to a higher level so that tests run quietly
kafkaApisLogger.setLevel(Level.FATAL)
networkProcessorLogger.setLevel(Level.FATAL)
syncProducerLogger.setLevel(Level.FATAL)
eventHandlerLogger.setLevel(Level.FATAL)
}
@After
override def tearDown() {
servers.foreach(server => shutdownServer(server))
servers.foreach(server => CoreUtils.delete(server.config.logDirs))
// restore log levels
kafkaApisLogger.setLevel(Level.ERROR)
networkProcessorLogger.setLevel(Level.ERROR)
syncProducerLogger.setLevel(Level.ERROR)
eventHandlerLogger.setLevel(Level.ERROR)
super.tearDown()
}
private def startBrokers(cluster: Seq[Properties]) {
for (props <- cluster) {
val config = KafkaConfig.fromProps(props)
val server = createServer(config)
configs ++= List(config)
servers ++= List(server)
}
}
@Test
def testUncleanLeaderElectionEnabled(): Unit = {
// enable unclean leader election
configProps1.put("unclean.leader.election.enable", "true")
configProps2.put("unclean.leader.election.enable", "true")
startBrokers(Seq(configProps1, configProps2))
// create topic with 1 partition, 2 replicas, one on each broker
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, Map(partitionId -> Seq(brokerId1, brokerId2)))
verifyUncleanLeaderElectionEnabled
}
@Test
@Ignore // Should be re-enabled after KAFKA-3096 is fixed
def testUncleanLeaderElectionDisabled(): Unit = {
// unclean leader election is disabled by default
startBrokers(Seq(configProps1, configProps2))
// create topic with 1 partition, 2 replicas, one on each broker
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, Map(partitionId -> Seq(brokerId1, brokerId2)))
verifyUncleanLeaderElectionDisabled
}
@Test
def testUncleanLeaderElectionEnabledByTopicOverride(): Unit = {
// disable unclean leader election globally, but enable for our specific test topic
configProps1.put("unclean.leader.election.enable", "false")
configProps2.put("unclean.leader.election.enable", "false")
startBrokers(Seq(configProps1, configProps2))
// create topic with 1 partition, 2 replicas, one on each broker, and unclean leader election enabled
val topicProps = new Properties()
topicProps.put("unclean.leader.election.enable", "true")
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, Map(partitionId -> Seq(brokerId1, brokerId2)),
topicProps)
verifyUncleanLeaderElectionEnabled
}
@Test
@Ignore // Should be re-enabled after KAFKA-3096 is fixed
def testCleanLeaderElectionDisabledByTopicOverride(): Unit = {
// enable unclean leader election globally, but disable for our specific test topic
configProps1.put("unclean.leader.election.enable", "true")
configProps2.put("unclean.leader.election.enable", "true")
startBrokers(Seq(configProps1, configProps2))
// create topic with 1 partition, 2 replicas, one on each broker, and unclean leader election disabled
val topicProps = new Properties()
topicProps.put("unclean.leader.election.enable", "false")
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, Map(partitionId -> Seq(brokerId1, brokerId2)),
topicProps)
verifyUncleanLeaderElectionDisabled
}
@Test
def testUncleanLeaderElectionInvalidTopicOverride(): Unit = {
startBrokers(Seq(configProps1))
// create topic with an invalid value for unclean leader election
val topicProps = new Properties()
topicProps.put("unclean.leader.election.enable", "invalid")
intercept[ConfigException] {
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, Map(partitionId -> Seq(brokerId1)), topicProps)
}
}
def verifyUncleanLeaderElectionEnabled(): Unit = {
// wait until leader is elected
val leaderId = waitUntilLeaderIsElectedOrChanged(zkUtils, topic, partitionId)
debug("Leader for " + topic + " is elected to be: %s".format(leaderId))
assertTrue("Leader id is set to expected value for topic: " + topic, leaderId == brokerId1 || leaderId == brokerId2)
// the non-leader broker is the follower
val followerId = if (leaderId == brokerId1) brokerId2 else brokerId1
debug("Follower for " + topic + " is: %s".format(followerId))
produceMessage(servers, topic, "first")
waitUntilMetadataIsPropagated(servers, topic, partitionId)
assertEquals(List("first"), consumeAllMessages(topic))
// shutdown follower server
servers.filter(server => server.config.brokerId == followerId).map(server => shutdownServer(server))
produceMessage(servers, topic, "second")
assertEquals(List("first", "second"), consumeAllMessages(topic))
// shutdown leader and then restart follower
servers.filter(server => server.config.brokerId == leaderId).map(server => shutdownServer(server))
servers.filter(server => server.config.brokerId == followerId).map(server => server.startup())
// wait until new leader is (uncleanly) elected
waitUntilLeaderIsElectedOrChanged(zkUtils, topic, partitionId, newLeaderOpt = Some(followerId))
produceMessage(servers, topic, "third")
// second message was lost due to unclean election
assertEquals(List("first", "third"), consumeAllMessages(topic))
}
def verifyUncleanLeaderElectionDisabled(): Unit = {
// wait until leader is elected
val leaderId = waitUntilLeaderIsElectedOrChanged(zkUtils, topic, partitionId)
debug("Leader for " + topic + " is elected to be: %s".format(leaderId))
assertTrue("Leader id is set to expected value for topic: " + topic, leaderId == brokerId1 || leaderId == brokerId2)
// the non-leader broker is the follower
val followerId = if (leaderId == brokerId1) brokerId2 else brokerId1
debug("Follower for " + topic + " is: %s".format(followerId))
produceMessage(servers, topic, "first")
waitUntilMetadataIsPropagated(servers, topic, partitionId)
assertEquals(List("first"), consumeAllMessages(topic))
// shutdown follower server
servers.filter(server => server.config.brokerId == followerId).map(server => shutdownServer(server))
produceMessage(servers, topic, "second")
assertEquals(List("first", "second"), consumeAllMessages(topic))
// shutdown leader and then restart follower
servers.filter(server => server.config.brokerId == leaderId).map(server => shutdownServer(server))
servers.filter(server => server.config.brokerId == followerId).map(server => server.startup())
// verify that unclean election to non-ISR follower does not occur
waitUntilLeaderIsElectedOrChanged(zkUtils, topic, partitionId, newLeaderOpt = Some(-1))
// message production and consumption should both fail while leader is down
try {
produceMessage(servers, topic, "third")
fail("Message produced while leader is down should fail, but it succeeded")
} catch {
case e: ExecutionException if e.getCause.isInstanceOf[TimeoutException] => // expected
}
assertEquals(List.empty[String], consumeAllMessages(topic))
// restart leader temporarily to send a successfully replicated message
servers.filter(server => server.config.brokerId == leaderId).map(server => server.startup())
waitUntilLeaderIsElectedOrChanged(zkUtils, topic, partitionId, newLeaderOpt = Some(leaderId))
produceMessage(servers, topic, "third")
waitUntilMetadataIsPropagated(servers, topic, partitionId)
servers.filter(server => server.config.brokerId == leaderId).map(server => shutdownServer(server))
// verify clean leader transition to ISR follower
waitUntilLeaderIsElectedOrChanged(zkUtils, topic, partitionId, newLeaderOpt = Some(followerId))
// verify messages can be consumed from ISR follower that was just promoted to leader
assertEquals(List("first", "second", "third"), consumeAllMessages(topic))
}
private def shutdownServer(server: KafkaServer) = {
server.shutdown()
server.awaitShutdown()
}
private def consumeAllMessages(topic: String) : List[String] = {
// use a fresh consumer group every time so that we don't need to mess with disabling auto-commit or
// resetting the ZK offset
val consumerProps = createConsumerProperties(zkConnect, "group" + random.nextLong, "id", 1000)
val consumerConnector = Consumer.create(new ConsumerConfig(consumerProps))
val messageStream = consumerConnector.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
val messages = getMessages(messageStream)
consumerConnector.shutdown
messages
}
}
| themarkypantz/kafka | core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala | Scala | apache-2.0 | 11,460 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.sdk.test
import java.io.{Serializable => JSerializable}
import com.stratio.sparta.sdk.WriteOp._
import com.stratio.sparta.sdk.{OperatorEntityCount, WriteOp}
import org.apache.spark.sql.types.StructType
class OperatorEntityCountMock(name: String, schema: StructType, properties: Map[String, JSerializable])
extends OperatorEntityCount(name, schema, properties) {
override def processReduce(values: Iterable[Option[Any]]): Option[Any] = values.head
override def writeOperation: WriteOp = WriteOp.Inc
}
| danielcsant/sparta | sdk/src/test/scala/com/stratio/sparta/sdk/test/OperatorEntityCountMock.scala | Scala | apache-2.0 | 1,158 |
package views.html.clas
import controllers.routes
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import lila.clas.{ Clas, Student }
object bits {
def layout(
title: String,
active: Either[Clas.WithStudents, String],
student: Option[Student] = none
)(body: Modifier*)(implicit ctx: Context) =
views.html.base.layout(
title = title,
moreCss = cssTag("clas"),
moreJs = jsModule("clas")
)(
if (isGranted(_.Teacher))
main(cls := "page-menu")(
st.nav(cls := "page-menu__menu subnav")(
a(cls := active.toOption.map(_.active("classes")), href := routes.Clas.index)(
trans.clas.lichessClasses()
),
active.left.toOption.map { clas =>
frag(
a(cls := "active", href := routes.Clas.show(clas.clas.id.value))(clas.clas.name),
clas.students.map { s =>
a(
cls := List("student" -> true, "active" -> student.exists(s.is)),
href := routes.Clas.studentShow(clas.clas.id.value, s.userId)
)(
titleNameOrId(s.userId),
em(s.realName)
)
}
)
} | {
a(cls := active.toOption.map(_.active("newClass")), href := routes.Clas.form)(
trans.clas.newClass()
)
}
),
div(cls := "page-menu__content box")(body)
)
else main(cls := "page-small box")(body)
)
def showArchived(archived: Clas.Recorded)(implicit ctx: Context) =
div(
trans.clas.closedByX(userIdLink(archived.by.some)),
" ",
momentFromNowOnce(archived.at)
)
}
| luanlv/lila | app/views/clas/bits.scala | Scala | mit | 1,810 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.consumers
import minitest.TestSuite
import monix.execution.Callback
import monix.execution.Ack.Continue
import monix.execution.Cancelable
import monix.execution.atomic.Atomic
import monix.execution.exceptions.DummyException
import monix.execution.schedulers.TestScheduler
import monix.reactive.{Consumer, Observable}
import scala.concurrent.Promise
import scala.util.{Failure, Success}
object ForeachParallelConsumerSuite extends TestSuite[TestScheduler] {
def setup(): TestScheduler = TestScheduler()
def tearDown(s: TestScheduler): Unit = {
assert(s.state.tasks.isEmpty, "TestScheduler should have no pending tasks")
}
test("should sum a long stream") { implicit s =>
val count = 10000L
val obs = Observable.range(0, count)
val sum = Atomic(0L)
val f = obs.consumeWith(Consumer.foreachParallel(10)(sum.add)).runToFuture
s.tick()
assertEquals(f.value, Some(Success(())))
assertEquals(sum.get(), count * (count - 1) / 2)
}
test("should interrupt with error") { implicit s =>
val ex = DummyException("dummy")
val obs = Observable.range(0, 10000).endWithError(ex)
val sum = Atomic(0L)
val f = obs.consumeWith(Consumer.foreachParallel(10)(sum.add)).runToFuture
s.tick()
assertEquals(f.value, Some(Failure(ex)))
}
test("should protect against user error") { implicit s =>
val ex = DummyException("dummy")
var mainWasCanceled = false
val consumer = Consumer.foreachParallel[Int](10)(x => throw ex)
val onFinish = Promise[Unit]()
val (out, c) = consumer.createSubscriber(Callback.fromPromise(onFinish), s)
c := Cancelable { () =>
mainWasCanceled = true
}
s.tick()
assertEquals(out.onNext(1), Continue)
s.tick()
assert(mainWasCanceled, "mainWasCanceled")
assertEquals(onFinish.future.value, Some(Failure(ex)))
}
}
| alexandru/monifu | monix-reactive/shared/src/test/scala/monix/reactive/consumers/ForeachParallelConsumerSuite.scala | Scala | apache-2.0 | 2,545 |
package process
import java.util
import java.util.Collections
import kpi.twitter.analysis.utils.{PredictedStatus, SentimentLabel, TweetSerDe}
import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords, KafkaConsumer}
import org.apache.kafka.common.TopicPartition
import org.scalatest.FunSuite
import org.scalatest.mockito.MockitoSugar
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import twitter4j.Status
class KafkaEventSourceTest extends FunSuite with MockitoSugar {
test("subscribe should be invoked once for correct topic") {
val topicName = "fake"
val mockConsumer = mock[KafkaConsumer[SentimentLabel, Status]]
val mockTime = new MockTime
val kafkaEventSource = new KafkaEventSource(mockConsumer, topicName, mockTime)
verify(mockConsumer, times(1)).subscribe(Collections.singletonList(topicName))
}
/**
* Test bound by time
*/
test("poll should return on timeout") {
val topicName = "fake"
val mockConsumer = mock[KafkaConsumer[SentimentLabel, Status]]
val mockTime = new MockTime
when(mockConsumer.poll(1000)).thenAnswer(new Answer[ConsumerRecords[SentimentLabel, Status]]() {
override def answer(invocation: InvocationOnMock): ConsumerRecords[SentimentLabel, Status] = {
val args = invocation.getArguments
mockTime.sleep(args(0).asInstanceOf[Long])
ConsumerRecords.empty[SentimentLabel, Status]()
}
})
val kafkaEventSource = new KafkaEventSource(mockConsumer, topicName, mockTime)
val records = kafkaEventSource.poll(1000, 1)
assert(0 === records.size)
assert(1000 === mockTime.currentMillis)
}
/**
* Test bound by records size
*/
test("poll should return on max records") {
val topicName = "fake"
val mockConsumer = mock[KafkaConsumer[SentimentLabel, Status]]
val mockTime = new MockTime
when(mockConsumer.poll(1000)).thenAnswer(new Answer[ConsumerRecords[SentimentLabel, Status]]() {
override def answer(invocation: InvocationOnMock): ConsumerRecords[SentimentLabel, Status] = {
mockTime.sleep(1)
val tp = new TopicPartition(topicName, 1)
val record = new ConsumerRecord[SentimentLabel, Status](topicName, 0, 0, mock[SentimentLabel], mock[Status])
val recordsMap = new util.HashMap[TopicPartition, util.List[ConsumerRecord[SentimentLabel, Status]]]()
val recordsList = new util.ArrayList[ConsumerRecord[SentimentLabel, Status]]()
recordsList.add(record)
recordsMap.put(tp, recordsList)
new ConsumerRecords[SentimentLabel, Status](recordsMap)
}
})
val kafkaEventSource = new KafkaEventSource(mockConsumer, topicName, mockTime)
val records = kafkaEventSource.poll(1000, 1)
assert(1 === records.size)
assert(1 === mockTime.currentMillis)
}
}
| GRpro/TwitterAnalytics | webapp/test/process/KafkaEventSourceTest.scala | Scala | apache-2.0 | 2,884 |
/**
* This file is part of SensApp [ http://sensapp.modelbased.net ]
*
* Copyright (C) 2011- SINTEF ICT
* Contact: SINTEF ICT <nicolas.ferry@sintef.no>
*
* Module: net.modelbased.sensapp
*
* SensApp is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* SensApp is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with SensApp. If not, see
* <http://www.gnu.org/licenses/>.
*/
package net.modelbased.sensapp.backyard.apm2import;
import datasets.{EBikePwr, WingFlight1, EBike1, Bike1}
import junit.framework._;
import Assert._;
object AppTest {
def suite: Test = {
val suite = new TestSuite(classOf[AppTest]);
suite
}
def main(args : Array[String]) {
junit.textui.TestRunner.run(suite);
}
}
/**
* Unit test for simple App.
*/
class AppTest extends TestCase("app") {
/**
* Rigourous Tests :-)
*/
def testProcessLogs() = {
Bike1.main(null)
EBike1.main(null)
WingFlight1.main(null)
EBikePwr.main(null)
}
//def testKO() = assertTrue(false);
}
| SINTEF-9012/sensapp | net.modelbased.sensapp.backyard.apm2import/src/test/scala/net/modelbased/sensapp/backyard/apm2import/AppTest.scala | Scala | lgpl-3.0 | 1,519 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.tf
import com.intel.analytics.bigdl.dllib.nn.abstractnn.DataFormat
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.T
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import scala.util.Random
class Conv2DBackFilterSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val conv2dBackFilter = Conv2DBackFilter[Float](2, 2, -1, -1, DataFormat.NHWC).
setName("conv2dBackFilter")
val inputTensor = Tensor[Float](1, 4, 3, 3).apply1(_ => Random.nextFloat())
val kernelSize = Tensor[Int](T(2, 2, 3, 3))
val grad = Tensor[Float](1, 2, 2, 3).apply1(_ => Random.nextFloat())
val input = T(inputTensor, kernelSize, grad)
runSerializationTest(conv2dBackFilter, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/tf/Conv2DBackFilterSpec.scala | Scala | apache-2.0 | 1,443 |
package com.github.scalaconsolefx.interpreter
import scala.tools.nsc.Settings
import java.io._
object Main {
def main(args : Array[String]) : Unit = {
var names: Array[String] = Array("1");
println(names.elements)
val interpreter = new SFXInterpreter()
//var result = interpreter.interpret("println(\\"Hello World\\")\\nval x= 10\\nprintln(x)");
//println("*\\n" + result)
var script = "trait Fruit{\\ndef eat():Unit = {\\nprintln(\\"X\\")\\n}\\n}\\n";
println(script)
var result = interpreter.interpret(script);
println("->\\n" + result)
script = "settings";
println(script)
result = interpreter.interpret(script);
println("->\\n" + result)
script = "scalaVersion";
println(script)
result = interpreter.interpret(script);
println("->\\n" + result)
println(1.->(2))
}
}
| cretzel/ScalaConsoleFX | scala-console-fx/src/main/scala/com/github/scalaconsolefx/interpreter/Main.scala | Scala | apache-2.0 | 883 |
class Test {
def f[T](xs: Set[T]) /* no expected type to trigger inference */ =
xs collect { case x => x }
def g[T](xs: Set[T]): Set[T] = f[T](xs) // check that f's inferred type is Set[T]
// check that this type checks:
List(1).flatMap(n => Set(1).collect { case w => w })
}
| som-snytt/dotty | tests/pos/t6925.scala | Scala | apache-2.0 | 290 |
package com.ee.assets.transformers
import java.io._
import java.util.zip.GZIPInputStream
import org.specs2.mutable.Specification
class ReadGzipWriteTest extends Specification with BaseIntegration {
"Read,Gzip,Write" should {
val outDir = makePath("target", "test-files", "gzip-files")
def fileFn(path: String) = {
val f = new File(makePath(outDir, path))
f.getParentFile.mkdirs()
f
}
"work" in new cleanGenerated(outDir) {
val read = ElementReader(readFn("it"))
val gzip = Gzip()
val write = ByteArrayWriter(fileFn)
val elements = Seq(
PathElement(makePath(pkg, "js-files", "one.js"))
)
val combi = read andThen gzip andThen write
val processed = combi(elements)
readGzip( s"$outDir${File.separator}${processed(0).path}").trim ===
"""var x = function(){
|}
| """.stripMargin.trim
}
}
private def readGzip(path: String): String = {
val is: InputStream = new FileInputStream(path)
val gzIs: InputStream = new GZIPInputStream(is)
val decoder: Reader = new InputStreamReader(gzIs, "UTF-8")
val buffered: BufferedReader = new BufferedReader(decoder)
val s = new StringBuilder
do {
s.append(buffered.readLine)
s.append(System.getProperty("line.separator"))
} while (buffered.readLine() != null)
s.toString
}
}
| edeustace/assets-loader | plugin/it/com/ee/assets/transformers/ReadGzipWriteTest.scala | Scala | mit | 1,394 |
package org.allenai.common.webapp
import spray.json.DefaultJsonProtocol._
case class Ping(message: String)
object Ping {
implicit val pingJsonFormat = jsonFormat1(Ping.apply)
}
case class Pong(message: String)
object Pong {
implicit val pongJsonFormat = jsonFormat1(Pong.apply)
}
| jkinkead/common | webapp/src/it/scala/org/allenai/common/webapp/Protocol.scala | Scala | apache-2.0 | 287 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package runtime
import internal.{SomePhase, NoPhase, Phase}
/** A helper trait to initialize things that need to be set before JavaMirrors and other
* reflect specific traits are initialized */
private[runtime] trait ReflectSetup extends internal.SymbolTable {
override val phaseWithId: Array[Phase] = Array(NoPhase, SomePhase)
override val currentRunId = 1 // fake a run id so that it is different from NoRunId
phase = SomePhase // set to a phase different from NoPhase
}
| martijnhoekstra/scala | src/reflect/scala/reflect/runtime/ReflectSetup.scala | Scala | apache-2.0 | 803 |
package com.softwaremill.streams.complete
import java.io.File
import akka.actor.ActorSystem
import akka.stream.scaladsl.{FileIO, Framing, Keep}
import akka.stream.{ActorMaterializer, IOResult}
import akka.util.ByteString
import com.softwaremill.streams.complete.util.TestFiles
import com.softwaremill.streams.complete.util.Timed._
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scalaz.stream.{io, text}
trait TransferTransformFile {
/**
* @return Number of bytes written
*/
def run(from: File, to: File): Long
}
object AkkaStreamsTransferTransformFile extends TransferTransformFile {
private lazy implicit val system = ActorSystem()
override def run(from: File, to: File) = {
implicit val mat = ActorMaterializer()
val r: Future[IOResult] = FileIO.fromPath(from.toPath)
.via(Framing.delimiter(ByteString("\\n"), 1048576))
.map(_.utf8String)
.filter(!_.contains("#!@"))
.map(_.replace("*", "0"))
.intersperse("\\n")
.map(ByteString(_))
.async
.toMat(FileIO.toPath(to.toPath))(Keep.right)
.run()
Await.result(r, 1.hour).count
}
def shutdown() = {
system.terminate()
}
}
object ScalazStreamsTransferTransformFile extends TransferTransformFile {
override def run(from: File, to: File) = {
io.linesR(from.getAbsolutePath)
.filter(!_.contains("#!@"))
.map(_.replace("*", "0"))
.intersperse("\\n")
.pipe(text.utf8Encode)
.to(io.fileChunkW(to.getAbsolutePath))
.run
.run
to.length()
}
}
object TransferTransformFileRunner extends App {
def runTransfer(ttf: TransferTransformFile, sizeMB: Int): String = {
val output = File.createTempFile("fft", "txt")
try {
ttf.run(TestFiles.testFile(sizeMB), output).toString
} finally output.delete()
}
val tests = List(
(ScalazStreamsTransferTransformFile, 10),
(ScalazStreamsTransferTransformFile, 100),
//(ScalazStreamsTransferTransformFile, 500),
(AkkaStreamsTransferTransformFile, 10),
(AkkaStreamsTransferTransformFile, 100)
//(AkkaStreamsTransferTransformFile, 500)
)
runTests(tests.map { case (ttf, sizeMB) =>
(s"${if (ttf == ScalazStreamsTransferTransformFile) "scalaz" else "akka"}, $sizeMB MB",
() => runTransfer(ttf, sizeMB))
}, 3)
AkkaStreamsTransferTransformFile.shutdown()
}
| adamw/streams-pres | src/main/scala/com/softwaremill/streams/complete/TransferTransformFile.scala | Scala | apache-2.0 | 2,378 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package mutable
import generic._
import immutable.{List, Nil, ::}
import java.io.{ObjectOutputStream, ObjectInputStream}
/** A `Buffer` implementation backed by a list. It provides constant time
* prepend and append. Most other operations are linear.
*
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 1
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#list_buffers "Scala's Collection Library overview"]]
* section on `List Buffers` for more information.
*
* @tparam A the type of this list buffer's elements.
*
* @define Coll `ListBuffer`
* @define coll list buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ListBuffer[B]` because an implicit of type `CanBuildFrom[ListBuffer, B, ListBuffer[B]]`
* is defined in object `ListBuffer`.
* @define bfinfo an implicit value of class `CanBuildFrom` which determines the
* result class `That` from the current representation type `Repr`
* and the new element type `B`. This is usually the `canBuildFrom` value
* defined in object `ListBuffer`.
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
@SerialVersionUID(3419063961353022662L)
final class ListBuffer[A]
extends AbstractBuffer[A]
with Buffer[A]
with GenericTraversableTemplate[A, ListBuffer]
with BufferLike[A, ListBuffer[A]]
with ReusableBuilder[A, List[A]]
with SeqForwarder[A]
with Serializable
{
override def companion: GenericCompanion[ListBuffer] = ListBuffer
import scala.collection.Traversable
import scala.collection.immutable.ListSerializeEnd
/** Expected invariants:
* If start.isEmpty, last0 == null
* If start.nonEmpty, last0 != null
* If len == 0, start.isEmpty
* If len > 0, start.nonEmpty
*/
private var start: List[A] = Nil
private var last0: ::[A] = _
private var exported: Boolean = false
private var len = 0
protected def underlying: List[A] = start
private def writeObject(out: ObjectOutputStream) {
// write start
var xs: List[A] = start
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
// no need to write last0
// write if exported
out.writeBoolean(exported)
// write the length
out.writeInt(len)
}
private def readObject(in: ObjectInputStream) {
// read start, set last0 appropriately
var elem: A = in.readObject.asInstanceOf[A]
if (elem == ListSerializeEnd) {
start = Nil
last0 = null
} else {
var current = new ::(elem, Nil)
start = current
elem = in.readObject.asInstanceOf[A]
while (elem != ListSerializeEnd) {
val list = new ::(elem, Nil)
current.tl = list
current = list
elem = in.readObject.asInstanceOf[A]
}
last0 = current
start
}
// read if exported
exported = in.readBoolean()
// read the length
len = in.readInt()
}
/** The current length of the buffer.
*
* This operation takes constant time.
*/
override def length = len
// Don't use the inherited size, which forwards to a List and is O(n).
override def size = length
// Implementations of abstract methods in Buffer
override def apply(n: Int): A =
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString())
else super.apply(n)
/** Replaces element at index `n` with the new element
* `newelem`. Takes time linear in the buffer size. (except the
* first element, which is updated in constant time).
*
* @param n the index of the element to replace.
* @param x the new element.
* @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def update(n: Int, x: A) {
// We check the bounds early, so that we don't trigger copying.
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString)
if (exported) copy()
if (n == 0) {
val newElem = new :: (x, start.tail)
if (last0 eq start) {
last0 = newElem
}
start = newElem
} else {
var cursor = start
var i = 1
while (i < n) {
cursor = cursor.tail
i += 1
}
val newElem = new :: (x, cursor.tail.tail)
if (last0 eq cursor.tail) {
last0 = newElem
}
cursor.asInstanceOf[::[A]].tl = newElem
}
}
/** Appends a single element to this buffer. This operation takes constant time.
*
* @param x the element to append.
* @return this $coll.
*/
def += (x: A): this.type = {
if (exported) copy()
if (isEmpty) {
last0 = new :: (x, Nil)
start = last0
} else {
val last1 = last0
last0 = new :: (x, Nil)
last1.tl = last0
}
len += 1
this
}
override def ++=(xs: TraversableOnce[A]): this.type = xs match {
case x: AnyRef if x eq this => this ++= (this take size)
case _ => super.++=(xs)
}
override def ++=:(xs: TraversableOnce[A]): this.type =
if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs)
/** Clears the buffer contents.
*/
def clear() {
start = Nil
last0 = null
exported = false
len = 0
}
/** Prepends a single element to this buffer. This operation takes constant
* time.
*
* @param x the element to prepend.
* @return this $coll.
*/
def +=: (x: A): this.type = {
if (exported) copy()
val newElem = new :: (x, start)
if (isEmpty) last0 = newElem
start = newElem
len += 1
this
}
/** Inserts new elements at the index `n`. Opposed to method
* `update`, this method will not replace an element with a new
* one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
* @param seq the iterable object providing all elements to insert.
* @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
// We check the bounds early, so that we don't trigger copying.
if (n < 0 || n > len) throw new IndexOutOfBoundsException(n.toString)
if (exported) copy()
var elems = seq.toList.reverse
len += elems.length
if (n == 0) {
while (!elems.isEmpty) {
val newElem = new :: (elems.head, start)
if (start.isEmpty) last0 = newElem
start = newElem
elems = elems.tail
}
} else {
var cursor = start
var i = 1
while (i < n) {
cursor = cursor.tail
i += 1
}
while (!elems.isEmpty) {
val newElem = new :: (elems.head, cursor.tail)
if (cursor.tail.isEmpty) last0 = newElem
cursor.asInstanceOf[::[A]].tl = newElem
elems = elems.tail
}
}
}
/** Reduce the length of the buffer, and null out last0
* if this reduces the length to 0.
*/
private def reduceLengthBy(num: Int) {
len -= num
if (len <= 0) // obviously shouldn't be < 0, but still better not to leak
last0 = null
}
/** Removes a given number of elements on a given index position. May take
* time linear in the buffer size.
*
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
* @throws IndexOutOfBoundsException if the index `n` is not in the valid range
* `0 <= n <= length - count` (with `count > 0`).
* @throws IllegalArgumentException if `count < 0`.
*/
override def remove(n: Int, count: Int) {
if (count < 0) throw new IllegalArgumentException("removing negative number of elements: " + count.toString)
else if (count == 0) return // Nothing to do
if (n < 0 || n > len - count) throw new IndexOutOfBoundsException("at " + n.toString + " deleting " + count.toString)
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
if (n1 == 0) {
var c = count1
while (c > 0) {
start = start.tail
c -= 1
}
} else {
var cursor = start
var i = 1
while (i < n1) {
cursor = cursor.tail
i += 1
}
var c = count1
while (c > 0) {
if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]]
cursor.asInstanceOf[::[A]].tl = cursor.tail.tail
c -= 1
}
}
reduceLengthBy(count1)
}
// Implementation of abstract method in Builder
/** Returns the accumulated `List`.
*
* This method may be called multiple times to obtain snapshots of the list in different stages of construction.
*/
def result: List[A] = toList
/** Converts this buffer to a list. Takes constant time. The buffer is
* copied lazily, the first time it is mutated.
*/
override def toList: List[A] = {
exported = !isEmpty
start
}
// New methods in ListBuffer
/** Prepends the elements of this buffer to a given list
*
* @param xs the list to which elements are prepended
*/
def prependToList(xs: List[A]): List[A] = {
if (isEmpty) xs
else {
if (exported) copy()
last0.tl = xs
toList
}
}
// Overrides of methods in Buffer
/** Removes the element on a given index position. May take time linear in
* the buffer size.
*
* @param n the index which refers to the element to delete.
* @return n the element that was formerly at position `n`.
* @note an element must exists at position `n`.
* @throws IndexOutOfBoundsException if `n` is out of bounds.
*/
def remove(n: Int): A = {
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString())
if (exported) copy()
var old = start.head
if (n == 0) {
start = start.tail
} else {
var cursor = start
var i = 1
while (i < n) {
cursor = cursor.tail
i += 1
}
old = cursor.tail.head
if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]]
cursor.asInstanceOf[::[A]].tl = cursor.tail.tail
}
reduceLengthBy(1)
old
}
/** Remove a single element from this buffer. May take time linear in the
* buffer size.
*
* @param elem the element to remove.
* @return this $coll.
*/
override def -= (elem: A): this.type = {
if (exported) copy()
if (isEmpty) {}
else if (start.head == elem) {
start = start.tail
reduceLengthBy(1)
}
else {
var cursor = start
while (!cursor.tail.isEmpty && cursor.tail.head != elem) {
cursor = cursor.tail
}
if (!cursor.tail.isEmpty) {
val z = cursor.asInstanceOf[::[A]]
if (z.tl == last0)
last0 = z
z.tl = cursor.tail.tail
reduceLengthBy(1)
}
}
this
}
/** Selects the last element.
*
* Runs in constant time.
*
* @return the last element of this buffer.
* @throws NoSuchElementException if this buffer is empty.
*/
override def last: A =
if (last0 eq null) throw new NoSuchElementException("last of empty ListBuffer")
else last0.head
/** Optionally selects the last element.
*
* Runs in constant time.
*
* @return `Some` of the last element of this buffer if the buffer is nonempty, `None` if it is empty.
*/
override def lastOption: Option[A] = if (last0 eq null) None else Some(last0.head)
/** Returns an iterator over this `ListBuffer`. The iterator will reflect
* changes made to the underlying `ListBuffer` beyond the next element;
* the next element's value is cached so that `hasNext` and `next` are
* guaranteed to be consistent. In particular, an empty `ListBuffer`
* will give an empty iterator even if the `ListBuffer` is later filled.
*/
override def iterator: Iterator[A] = new AbstractIterator[A] {
// Have to be careful iterating over mutable structures.
// This used to have "(cursor ne last0)" as part of its hasNext
// condition, which means it can return true even when the iterator
// is exhausted. Inconsistent results are acceptable when one mutates
// a structure while iterating, but we should never return hasNext == true
// on exhausted iterators (thus creating exceptions) merely because
// values were changed in-place.
var cursor: List[A] = if (ListBuffer.this.isEmpty) Nil else start
def hasNext: Boolean = cursor ne Nil
def next(): A =
if (!hasNext) throw new NoSuchElementException("next on empty Iterator")
else {
val ans = cursor.head
cursor = cursor.tail
ans
}
}
// Private methods
/** Copy contents of this buffer */
private def copy() {
if (isEmpty) return
var cursor = start
val limit = last0.tail
clear()
while (cursor ne limit) {
this += cursor.head
cursor = cursor.tail
}
}
override def equals(that: Any): Boolean = that match {
case that: ListBuffer[_] => this.start equals that.start
case _ => super.equals(that)
}
/** Returns a clone of this buffer.
*
* @return a `ListBuffer` with the same elements.
*/
override def clone(): ListBuffer[A] = (new ListBuffer[A]) ++= this
/** Defines the prefix of the string representation.
*
* @return the string representation of this buffer.
*/
override def stringPrefix: String = "ListBuffer"
}
/** $factoryInfo
* @define Coll `ListBuffer`
* @define coll list buffer
*/
object ListBuffer extends SeqFactory[ListBuffer] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowingBuilder(new ListBuffer[A])
}
| felixmulder/scala | src/library/scala/collection/mutable/ListBuffer.scala | Scala | bsd-3-clause | 14,494 |
package org.jetbrains.plugins.scala.lang.refactoring.introduceParameter
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.TextRange
import com.intellij.psi._
import com.intellij.refactoring.introduceParameter.{IntroduceParameterData, JavaExpressionWrapper}
import gnu.trove.TIntArrayList
import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike
import org.jetbrains.plugins.scala.lang.psi.types.{ScType, ScTypeExt}
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo
/**
* @author Nikolay.Tropin
*/
case class ScalaIntroduceParameterData(methodLike: ScMethodLike,
methodToSearchFor: ScMethodLike,
elems: Seq[PsiElement],
paramName: String,
possibleTypes: Array[ScType],
tp: ScType,
occurrences: Array[TextRange],
mainOcc: TextRange,
replaceAll: Boolean,
defaultArg: String,
functionalArgParams: Option[String] = None) extends IntroduceParameterData {
def getParametersToRemove: TIntArrayList = new TIntArrayList()
def getForcedType: PsiType = tp.toPsiType
def getScalaForcedType: ScType = tp
def isGenerateDelegate: Boolean = false
def isDeclareFinal: Boolean = false
def getReplaceFieldsWithGetters: Int = 0
def getParameterName: String = paramName
def getParameterInitializer =
new JavaExpressionWrapper(
JavaPsiFacade.getElementFactory(methodLike.getProject).createExpressionFromText(getParameterName, elems.head.getContext)
)
def getMethodToSearchFor: PsiMethod = methodToSearchFor
def getMethodToReplaceIn: PsiMethod = methodLike
def getProject: Project = methodLike.getProject
}
object isIntroduceParameter {
def unapply(scInfo: ScalaChangeInfo): Option[ScalaIntroduceParameterData] = {
scInfo.introducedParameterData
}
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterData.scala | Scala | apache-2.0 | 2,157 |
package parquet.filter2.dsl
import java.lang.{Double => JDouble, Integer => JInt}
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import parquet.filter2.predicate.Operators.{Or, UserDefined, DoubleColumn => JDoubleColumn, IntColumn => JIntColumn}
import parquet.filter2.predicate.{FilterApi, Statistics, UserDefinedPredicate}
class DummyFilter extends UserDefinedPredicate[JInt] {
override def keep(value: JInt): Boolean = false
override def canDrop(statistics: Statistics[JInt]): Boolean = false
override def inverseCanDrop(statistics: Statistics[JInt]): Boolean = false
}
@RunWith(classOf[JUnitRunner])
class DslTest extends FlatSpec{
import parquet.filter2.dsl.Dsl._
"predicates" should "be correctly constructed using the dsl" in {
val abc = IntColumn("a.b.c")
val xyz = DoubleColumn("x.y.z")
val complexPredicate = !(abc > 10 && (xyz === 17 || ((xyz !== 13) && (xyz <= 20))))
val abcGt = FilterApi.gt[JInt, JIntColumn](abc.javaColumn, 10)
val xyzAnd = FilterApi.and(FilterApi.notEq[JDouble, JDoubleColumn](xyz.javaColumn, 13.0),
FilterApi.ltEq[JDouble, JDoubleColumn](xyz.javaColumn, 20.0))
val xyzEq = FilterApi.eq[JDouble, JDoubleColumn](xyz.javaColumn, 17.0)
val xyzPred = FilterApi.or(xyzEq, xyzAnd)
val expected = FilterApi.not(FilterApi.and(abcGt, xyzPred))
assert(complexPredicate === expected)
}
"user defined predicates" should "be correctly constructed" in {
val abc = IntColumn("a.b.c")
val pred = (abc > 10) || abc.filterBy(classOf[DummyFilter])
val expected = FilterApi.or(FilterApi.gt[JInt, JIntColumn](abc.javaColumn, 10), FilterApi.userDefined(abc.javaColumn, classOf[DummyFilter]))
assert(pred === expected)
val intUserDefined = pred.asInstanceOf[Or].getRight.asInstanceOf[UserDefined[JInt, DummyFilter]]
assert(intUserDefined.getUserDefinedPredicateClass === classOf[DummyFilter])
assert(intUserDefined.getUserDefinedPredicate.isInstanceOf[DummyFilter])
}
"Column == and != " should "throw a helpful warning" in {
val abc = IntColumn("a.b.c")
intercept[UnsupportedOperationException] {
abc == 10
}
intercept[UnsupportedOperationException] {
abc != 10
}
}
}
| nevillelyh/parquet-mr | parquet-scala/src/test/scala/parquet/filter2/dsl/DslTest.scala | Scala | apache-2.0 | 2,270 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.