code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package org.refptr.iscala
import java.io.File
import sbt.{
ShowLines,
Logger, ConsoleLogger,
ModuleID,ModuleInfo,CrossVersion,Resolver,
IvyPaths,InlineIvyConfiguration,IvySbt,IvyScala,IvyActions,
InlineConfiguration,UpdateConfiguration,
UpdateOptions,UpdateLogging,UnresolvedWarningConfiguration}
import Util.scalaVersion
object Modules {
val Compiler = ModuleID("org.scala-lang", "scala-compiler", scalaVersion)
val IScala = ModuleID("org.refptr.iscala", "IScala", "0.3-SNAPSHOT", crossVersion=CrossVersion.binary)
}
case class ClassPath(jars: Seq[File]) {
def classpath: String = ClassPath.joinFiles(jars: _*)
}
object ClassPath {
def joinFiles(paths: File*): String = join(paths map (_.getAbsolutePath): _*)
def join(paths: String*): String = paths filterNot (_ == "") mkString File.pathSeparator
}
object Sbt {
def resolve(modules: Seq[ModuleID], resolvers: Seq[Resolver]): Option[ClassPath] =
resolve(modules, resolvers, ConsoleLogger())
def resolve(modules: Seq[ModuleID], resolvers: Seq[Resolver], logger: Logger): Option[ClassPath] = {
val paths = new IvyPaths(new File("."), None)
val allResolvers = Resolver.withDefaultResolvers(resolvers)
val ivyConf = new InlineIvyConfiguration(paths, allResolvers, Nil, Nil, false, None, Seq("sha1", "md5"), None, UpdateOptions(), logger)
val ivySbt = new IvySbt(ivyConf)
val binaryScalaVersion = CrossVersion.binaryScalaVersion(scalaVersion)
val ivyScala = new IvyScala(scalaVersion, binaryScalaVersion, Nil, checkExplicit=true, filterImplicit=false, overrideScalaVersion=false)
val settings = new InlineConfiguration(Modules.IScala, ModuleInfo("IScala"), modules, ivyScala=Some(ivyScala))
val module = new ivySbt.Module(settings)
val updateConf = new UpdateConfiguration(None, false, UpdateLogging.DownloadOnly)
val updateReport = IvyActions.updateEither(module, updateConf, UnresolvedWarningConfiguration(), logger)
updateReport match {
case Right(report) =>
Some(ClassPath(report.toSeq.map { case (_, _, _, jar) => jar }.distinct))
case Left(warning) =>
import ShowLines._
warning.lines.foreach(logger.error(_))
None
}
}
}
| nkhuyu/IScala | src/main/scala/Sbt.scala | Scala | mit | 2,330 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import org.scalatest.{BeforeAndAfterAll, FunSuite}
// scalastyle:off finalize
class Leakable(enableTracing: Boolean = false, markDisposed: Boolean = false)
extends WarnIfNotDisposed {
def isDisposed: Boolean = markDisposed
override protected def tracingEnabled = enableTracing
var warningWasLogged: Boolean = false
def getCreationTrace: Option[Array[StackTraceElement]] = creationTrace
override def finalize(): Unit = super.finalize()
override protected def logDisposeWarning() = {
warningWasLogged = true
}
}
// scalastyle:on finalize
class WarnIfNotDisposedSuite extends FunSuite with BeforeAndAfterAll {
test("trace collected if tracing enabled") {
val leakable = new Leakable(enableTracing = true)
val trace = leakable.getCreationTrace
assert(trace.isDefined)
assert(trace.get.exists(el => el.getClassName() == getClass().getName()))
}
test("trace not collected if tracing disabled") {
val leakable = new Leakable(enableTracing = false)
assert(!leakable.getCreationTrace.isDefined)
}
test("no warning logged if object disposed") {
val notLeaked = new Leakable(markDisposed = true)
notLeaked.finalize()
assert(!notLeaked.warningWasLogged)
}
test("warning logged if object not disposed") {
val leaked = new Leakable(markDisposed = false)
leaked.finalize()
assert(leaked.warningWasLogged)
}
}
| indhub/mxnet | scala-package/core/src/test/scala/org/apache/mxnet/util/WarnIfNotDiposedSuite.scala | Scala | apache-2.0 | 2,216 |
package mesosphere.marathon.upgrade
import akka.actor.{ ActorRef, ActorSystem }
import akka.testkit.{ TestActorRef, TestProbe }
import akka.util.Timeout
import mesosphere.marathon.core.launchqueue.LaunchQueue
import mesosphere.marathon.core.readiness.ReadinessCheckExecutor
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.tracker.TaskTracker
import mesosphere.marathon.event.MesosStatusUpdateEvent
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.io.storage.StorageProvider
import mesosphere.marathon.state._
import mesosphere.marathon.test.Mockito
import mesosphere.marathon.upgrade.DeploymentManager.{ DeploymentFinished, DeploymentStepInfo }
import mesosphere.marathon.{ MarathonSpec, MarathonTestHelper, SchedulerActions }
import org.apache.mesos.Protos.Status
import org.apache.mesos.SchedulerDriver
import org.mockito.Mockito.{ verifyNoMoreInteractions, when }
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.{ BeforeAndAfterAll, Matchers }
import scala.concurrent.Future
import scala.concurrent.duration._
class DeploymentActorTest
extends MarathonSpec
with Matchers
with BeforeAndAfterAll
with Mockito {
implicit val defaultTimeout: Timeout = 5.seconds
test("Deploy") {
val f = new Fixture
implicit val system = f.system
val managerProbe = TestProbe()
val receiverProbe = TestProbe()
val app1 = AppDefinition(id = PathId("/app1"), cmd = Some("cmd"), instances = 2)
val app2 = AppDefinition(id = PathId("/app2"), cmd = Some("cmd"), instances = 1)
val app3 = AppDefinition(id = PathId("/app3"), cmd = Some("cmd"), instances = 1)
val app4 = AppDefinition(id = PathId("/app4"), cmd = Some("cmd"))
val origGroup = Group(PathId("/foo/bar"), Set(app1, app2, app4))
val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000))
val app1New = app1.copy(instances = 1, versionInfo = version2)
val app2New = app2.copy(instances = 2, cmd = Some("otherCmd"), versionInfo = version2)
val targetGroup = Group(PathId("/foo/bar"), Set(app1New, app2New, app3))
// setting started at to 0 to make sure this survives
val task1_1 = MarathonTestHelper.runningTask("task1_1", appVersion = app1.version, startedAt = 0)
val task1_2 = MarathonTestHelper.runningTask("task1_2", appVersion = app1.version, startedAt = 1000)
val task2_1 = MarathonTestHelper.runningTask("task2_1", appVersion = app2.version)
val task3_1 = MarathonTestHelper.runningTask("task3_1", appVersion = app3.version)
val task4_1 = MarathonTestHelper.runningTask("task4_1", appVersion = app4.version)
val plan = DeploymentPlan(origGroup, targetGroup)
when(f.tracker.appTasksLaunchedSync(app1.id)).thenReturn(Set(task1_1, task1_2))
when(f.tracker.appTasksLaunchedSync(app2.id)).thenReturn(Set(task2_1))
when(f.tracker.appTasksLaunchedSync(app3.id)).thenReturn(Set(task3_1))
when(f.tracker.appTasksLaunchedSync(app4.id)).thenReturn(Set(task4_1))
f.driverKillSendsStatusKilledFor(app1, task1_2)
f.driverKillSendsStatusKilledFor(app2, task2_1)
f.driverKillSendsStatusKilledFor(app4, task4_1)
when(f.queue.add(same(app2New), any[Int])).thenAnswer(new Answer[Boolean] {
def answer(invocation: InvocationOnMock): Boolean = {
println(invocation.getArguments.toSeq)
for (i <- 0 until invocation.getArguments()(1).asInstanceOf[Int])
system.eventStream.publish(MesosStatusUpdateEvent(
slaveId = "", taskId = Task.Id.forApp(app2New.id), taskStatus = "TASK_RUNNING", message = "",
appId = app2.id, host = "", ipAddresses = None, ports = Nil, version = app2New.version.toString)
)
true
}
})
when(f.scheduler.startApp(f.driver, app3)).thenAnswer(new Answer[Future[Unit]] {
def answer(invocation: InvocationOnMock): Future[Unit] = {
// system.eventStream.publish(MesosStatusUpdateEvent("", "task3_1", "TASK_RUNNING", "", app3.id, "", "", Nil, app3.version.toString))
Future.successful(())
}
})
when(f.scheduler.scale(f.driver, app3)).thenAnswer(new Answer[Future[Unit]] {
def answer(invocation: InvocationOnMock): Future[Unit] = {
system.eventStream.publish(MesosStatusUpdateEvent(
slaveId = "", taskId = Task.Id("task3_1"), taskStatus = "TASK_RUNNING", message = "", appId = app3.id, host = "",
ipAddresses = None, ports = Nil, version = app3.version.toString))
Future.successful(())
}
})
try {
f.deploymentActor(managerProbe.ref, receiverProbe.ref, plan)
plan.steps.zipWithIndex.foreach {
case (step, num) => managerProbe.expectMsg(5.seconds, DeploymentStepInfo(plan, step, num + 1))
}
managerProbe.expectMsg(5.seconds, DeploymentFinished(plan))
verify(f.scheduler).startApp(f.driver, app3.copy(instances = 0))
verify(f.driver, times(1)).killTask(task1_2.taskId.mesosTaskId)
verify(f.scheduler).stopApp(f.driver, app4.copy(instances = 0))
}
finally {
system.shutdown()
}
}
test("Restart app") {
val f = new Fixture
implicit val system = f.system
val managerProbe = TestProbe()
val receiverProbe = TestProbe()
val app = AppDefinition(id = PathId("/app1"), cmd = Some("cmd"), instances = 2)
val origGroup = Group(PathId("/foo/bar"), Set(app))
val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000))
val appNew = app.copy(cmd = Some("cmd new"), versionInfo = version2)
val targetGroup = Group(PathId("/foo/bar"), Set(appNew))
val task1_1 = MarathonTestHelper.runningTask("task1_1", appVersion = app.version, startedAt = 0)
val task1_2 = MarathonTestHelper.runningTask("task1_2", appVersion = app.version, startedAt = 1000)
when(f.tracker.appTasksLaunchedSync(app.id)).thenReturn(Set(task1_1, task1_2))
val plan = DeploymentPlan("foo", origGroup, targetGroup, List(DeploymentStep(List(RestartApplication(appNew)))), Timestamp.now())
f.driverKillSendsStatusKilledFor(app, task1_1)
f.driverKillSendsStatusKilledFor(app, task1_2)
val taskIDs = Iterator.from(3)
when(f.queue.count(appNew.id)).thenAnswer(new Answer[Int] {
override def answer(p1: InvocationOnMock): Int = appNew.instances
})
when(f.queue.add(same(appNew), any[Int])).thenAnswer(new Answer[Boolean] {
def answer(invocation: InvocationOnMock): Boolean = {
for (i <- 0 until invocation.getArguments()(1).asInstanceOf[Int])
f.system.eventStream.publish(MesosStatusUpdateEvent("", Task.Id(s"task1_${taskIDs.next()}"),
"TASK_RUNNING", "", app.id, "", None, Nil, appNew.version.toString))
true
}
})
try {
f.deploymentActor(managerProbe.ref, receiverProbe.ref, plan)
receiverProbe.expectMsg(DeploymentFinished(plan))
verify(f.driver).killTask(task1_1.taskId.mesosTaskId)
verify(f.driver).killTask(task1_2.taskId.mesosTaskId)
verify(f.queue).add(appNew, 2)
}
finally {
f.system.shutdown()
}
}
test("Restart suspended app") {
val f = new Fixture
implicit val system = f.system
val managerProbe = TestProbe()
val receiverProbe = TestProbe()
val app = AppDefinition(id = PathId("/app1"), cmd = Some("cmd"), instances = 0)
val origGroup = Group(PathId("/foo/bar"), Set(app))
val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000))
val appNew = app.copy(cmd = Some("cmd new"), versionInfo = version2)
val targetGroup = Group(PathId("/foo/bar"), Set(appNew))
val plan = DeploymentPlan("foo", origGroup, targetGroup, List(DeploymentStep(List(RestartApplication(appNew)))), Timestamp.now())
when(f.tracker.appTasksLaunchedSync(app.id)).thenReturn(Iterable.empty[Task])
try {
f.deploymentActor(managerProbe.ref, receiverProbe.ref, plan)
receiverProbe.expectMsg(DeploymentFinished(plan))
}
finally {
f.system.shutdown()
}
}
test("Scale with tasksToKill") {
val f = new Fixture
implicit val system = f.system
val managerProbe = TestProbe()
val receiverProbe = TestProbe()
val app1 = AppDefinition(id = PathId("/app1"), cmd = Some("cmd"), instances = 3)
val origGroup = Group(PathId("/foo/bar"), Set(app1))
val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000))
val app1New = app1.copy(instances = 2, versionInfo = version2)
val targetGroup = Group(PathId("/foo/bar"), Set(app1New))
val task1_1 = MarathonTestHelper.runningTask("task1_1", appVersion = app1.version, startedAt = 0)
val task1_2 = MarathonTestHelper.runningTask("task1_2", appVersion = app1.version, startedAt = 500)
val task1_3 = MarathonTestHelper.runningTask("task1_3", appVersion = app1.version, startedAt = 1000)
val plan = DeploymentPlan(original = origGroup, target = targetGroup, toKill = Map(app1.id -> Set(task1_2)))
when(f.tracker.appTasksLaunchedSync(app1.id)).thenReturn(Set(task1_1, task1_2, task1_3))
f.driverKillSendsStatusKilledFor(app1, task1_2)
try {
f.deploymentActor(managerProbe.ref, receiverProbe.ref, plan)
plan.steps.zipWithIndex.foreach {
case (step, num) => managerProbe.expectMsg(5.seconds, DeploymentStepInfo(plan, step, num + 1))
}
managerProbe.expectMsg(5.seconds, DeploymentFinished(plan))
verify(f.driver, times(1)).killTask(task1_2.taskId.mesosTaskId)
verifyNoMoreInteractions(f.driver)
}
finally {
f.system.shutdown()
}
}
class Fixture {
val tracker: TaskTracker = mock[TaskTracker]
val queue: LaunchQueue = mock[LaunchQueue]
val driver: SchedulerDriver = mock[SchedulerDriver]
val scheduler: SchedulerActions = mock[SchedulerActions]
val storage: StorageProvider = mock[StorageProvider]
val hcManager: HealthCheckManager = mock[HealthCheckManager]
val config: UpgradeConfig = mock[UpgradeConfig]
val readinessCheckExecutor: ReadinessCheckExecutor = mock[ReadinessCheckExecutor]
implicit val system = ActorSystem("TestSystem")
config.killBatchSize returns 100
config.killBatchCycle returns 10.seconds
def driverKillSendsStatusKilledFor(app: AppDefinition, task: Task): Unit = {
driver.killTask(task.taskId.mesosTaskId) answers { args =>
system.eventStream.publish(MesosStatusUpdateEvent(
slaveId = "", taskId = task.taskId, taskStatus = "TASK_KILLED", message = "", appId = app.id, host = "",
ipAddresses = None, ports = Nil, version = app.version.toString))
Status.DRIVER_RUNNING
}
}
def deploymentActor(manager: ActorRef, receiver: ActorRef, plan: DeploymentPlan) = TestActorRef(
DeploymentActor.props(
manager,
receiver,
driver,
scheduler,
plan,
tracker,
queue,
storage,
hcManager,
system.eventStream,
readinessCheckExecutor,
config
)
)
}
}
| ss75710541/marathon | src/test/scala/mesosphere/marathon/upgrade/DeploymentActorTest.scala | Scala | apache-2.0 | 11,047 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Iat Chong Chan
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package th4j.func
import com.sun.jna._
import th4j.generate._
import th4j.generate.IfRealMatch
import th4j.util._
/**
* Created by et on 11/10/15.
*/
@GenerateAllTypes("Native", "TH", "TH")
trait TensorMathFunc [T<:AnyVal, U<:AnyVal, Z<:Device]{
def Tensor_fill(r:Pointer, value:T):Unit
def Tensor_zero(r:Pointer):Unit
def Tensor_maskedFill(tensor:Pointer, mask: Pointer, value:T):Unit
def Tensor_maskedCopy(tensor:Pointer, mask:Pointer, src:Pointer):Unit
def Tensor_maskedSelect(tensor:Pointer, src:Pointer, mask:Pointer):Unit
def Tensor_nonzero(subscript:Pointer, tensor:Pointer):Unit
def Tensor_indexSelect(tensor:Pointer,src:Pointer, dim:Int, index:Pointer):Unit
def Tensor_indexCopy(tensor:Pointer, dim:Int, index:Pointer, src:Pointer):Unit
def Tensor_indexFill(tensor:Pointer, dim:Int, index:Pointer, value:T):Unit
def Tensor_gather(tensor:Pointer, src:Pointer, dim:Int, index:Pointer):Unit
def Tensor_scatter(tensor:Pointer, dim:Int, index:Pointer, src:Pointer):Unit
def Tensor_scatterFill(tensor:Pointer, dim:Int, index:Pointer, value:T):Unit
def Tensor_dot(t:Pointer, src:Pointer):U
def Tensor_minall(t:Pointer):T
def Tensor_maxall(t:Pointer):T
def Tensor_sumall(t:Pointer):U
def Tensor_prodall(t:Pointer):U
def Tensor_add(r: Pointer, t:Pointer, value: T):Unit
def Tensor_mul(r: Pointer, t:Pointer, value: T):Unit
def Tensor_div(r: Pointer, t:Pointer, value: T):Unit
def Tensor_clamp(r: Pointer, t:Pointer, min_value:T, max_value:T):Unit
def Tensor_cadd(r: Pointer, t:Pointer, value:T, src:Pointer):Unit
def Tensor_cmul(r:Pointer, t:Pointer, src:Pointer):Unit
def Tensor_cpow(r:Pointer, t:Pointer, src:Pointer):Unit
def Tensor_cdiv(r:Pointer, t:Pointer, src:Pointer):Unit
def Tensor_addcmul(r:Pointer, t:Pointer, value:T, src1: Pointer, src2:Pointer):Unit
def Tensor_addcdiv(r:Pointer, t:Pointer, value:T, src1:Pointer, src2:Pointer):Unit
def Tensor_addmv(r:Pointer, beta:T, t:Pointer, alpha:T, mat:Pointer, vec:Pointer):Unit
def Tensor_addmm(r:Pointer, beta:T, t:Pointer, alpha:T, mat1:Pointer, mat2:Pointer):Unit
def Tensor_addr(r:Pointer, beta:T, t:Pointer, alpha:T, vec1:Pointer, vec2:Pointer):Unit
def Tensor_addbmm(r:Pointer, beta: T, t:Pointer, alpha:T, batch1:Pointer, batch2:Pointer):Unit
def Tensor_baddbmm(r:Pointer, beta: T, t:Pointer, alpha:T, batch1:Pointer, batch2:Pointer)
def Tensor_match(r:Pointer, m1:Pointer, m2:Pointer, gain:T):Unit
def Tensor_numel(t:Pointer):Long
def Tensor_max(values:Pointer, indicies:Pointer, t:Pointer, dimension:Int):Unit
def Tensor_min(values:Pointer, indicies:Pointer, t:Pointer, dimension:Int):Unit
def Tensor_kthvalue(values:Pointer, indicies:Pointer, t:Pointer, k:Long, dimension:Int):Unit
def Tensor_median(values:Pointer, indicies:Pointer, t:Pointer, dimension:Int)
def Tensor_sum(r:Pointer, t:Pointer, dimension:Int):Unit
def Tensor_prod(r:Pointer, t:Pointer, dimension:Int):Unit
def Tensor_cumsum(r:Pointer, t:Pointer, dimension:Int):Unit
def Tensor_cumprod(r:Pointer, t:Pointer, dimension:Int):Unit
def Tensor_sign(r:Pointer, t:Pointer):Unit
def Tensor_trace(t:Pointer):U
def Tensor_cross(r:Pointer, a:Pointer, b:Pointer, dimension:Int):Unit
def Tensor_cmax(r:Pointer, t:Pointer, src:Pointer):Unit
def Tensor_cmin(r:Pointer, t:Pointer, src:Pointer):Unit
def Tensor_cmaxValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_cminValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_zeros(r:Pointer, size:Pointer):Unit
def Tensor_ones(r:Pointer, size:Pointer):Unit
def Tensor_diag(r:Pointer, t:Pointer, k:Int):Unit
def Tensor_eye(r:Pointer, n:Long, m:Long):Unit
def Tensor_range(r:Pointer, xmin:U, xmax:U, step:U):Unit
def Tensor_randperm(r:Pointer, generator:Pointer, n:Long):Unit
def Tensor_reshape(r:Pointer, t:Pointer, size:Pointer):Unit
def Tensor_sort(rt:Pointer, ri:Pointer, t:Pointer, dimension:Int, descendingOrder:Int):Unit
def Tensor_tril(r:Pointer, t:Pointer, k:Long):Unit
def Tensor_triu(r:Pointer, t:Pointer, k:Long):Unit
def Tensor_cat(r:Pointer, ta:Pointer, tb:Pointer, dimension:Int):Unit
def Tensor_ltValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_leValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_gtValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_geValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_neValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_eqValue(r:Pointer, t:Pointer, value:T):Unit
def Tensor_ltTensor(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_leTensor(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_gtTensor(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_geTensor(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_neTensor(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_eqTensor(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_ltTensorT(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_leTensorT(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_gtTensorT(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_geTensorT(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_neTensorT(r:Pointer, ta:Pointer, tb:Pointer):Unit
def Tensor_eqTensorT(r:Pointer, ta:Pointer, tb:Pointer):Unit
@IfRealMatch("Int","Long", "Double", "Float") def Tensor_abs(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_log(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_log1p(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_exp(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_cos(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_acos(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_cosh(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_sin(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_asin(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_sinh(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_tan(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_atan(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_atan2(r:Pointer, tx:Pointer, ty:Pointer)
@IfRealMatch("Double","Float") def Tensor_tanh(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_pow(r:Pointer, t:Pointer, value:T)
@IfRealMatch("Double","Float") def Tensor_tpow(r:Pointer, value:T, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_sqrt(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_ceil(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_floor(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_round(r:Pointer, t:Pointer)
@IfRealMatch("Double","Float") def Tensor_mean(r:Pointer, t:Pointer, dimension:Int)
@IfRealMatch("Double","Float") def Tensor_std(r:Pointer, t:Pointer, dimension:Int, flag:Int)
@IfRealMatch("Double","Float") def Tensor_var(r:Pointer, t:Pointer, dimension:Int, flag:Int)
@IfRealMatch("Double","Float") def Tensor_norm(r:Pointer, t:Pointer, value:T, dimension:Int)
@IfRealMatch("Double","Float") def Tensor_dist(a:Pointer, b:Pointer, value:T):U
@IfRealMatch("Double","Float") def Tensor_histc(hist:Pointer, tensor:Pointer, nbins:Long, minvalue:T, maxvalue:T)
@IfRealMatch("Double","Float") def Tensor_meanall(self:Pointer):U
@IfRealMatch("Double","Float") def Tensor_varall(self:Pointer):U
@IfRealMatch("Double","Float") def Tensor_stdall(self:Pointer):U
@IfRealMatch("Double","Float") def Tensor_normall(t:Pointer, value:T):U
@IfRealMatch("Double","Float") def Tensor_linspace(r:Pointer, a:T, b:T, n:Long)
@IfRealMatch("Double","Float") def Tensor_logspace(r:Pointer, a:T, b:T, n:Long)
@IfRealMatch("Double","Float") def Tensor_rand(r:Pointer, generator:Pointer, size:Pointer)
@IfRealMatch("Double","Float") def Tensor_randn(r:Pointer, generator:Pointer, size:Pointer)
@IfRealMatch("Byte") def Tensor_logicalall(self:Pointer):Int
@IfRealMatch("Byte") def Tensor_logicalany(self:Pointer):Int
}
| ET-Chan/th4j | src/main/scala/th4j/func/TensorMathFunc.scala | Scala | mit | 9,148 |
package utils.postgres
//: ----------------------------------------------------------------------------
//:
//: Dependencies:
//: ?
//: ----------------------------------------------------------------------------
import java.time.{LocalDate, LocalDateTime}
import java.util.UUID
import com.github.tminglei.slickpg.PgRangeSupportUtils
import play.api.data.format.Formats
import play.api.data.format.Formatter
import play.api.data.FormError
import com.vividsolutions.jts.io.{WKTReader, WKTWriter}
import com.vividsolutions.jts.geom.Geometry
import play.api.libs.json._
/**
* my play form data formatters
*/
object MyFormats {
def jsonFormat: Formatter[JsValue] = new Formatter[JsValue] {
override val format = Some(("format.json", Nil))
def bind(key: String, data: Map[String, String]) =
parsing(Json.parse(_), "error.json", Nil)(key, data)
def unbind(key: String, value: JsValue) = Map(key -> Json.stringify(value))
}
///
def j8DateFormat: Formatter[LocalDate] = new Formatter[LocalDate] {
override val format = Some(("format.datetime", Nil))
def bind(key: String, data: Map[String, String]) =
parsing(LocalDate.parse, "error.datetime", Nil)(key, data)
def unbind(key: String, value: LocalDate) = Map(key -> value.toString)
}
def j8DateTimeFormat: Formatter[LocalDateTime] = new Formatter[LocalDateTime] {
override val format = Some(("format.datetime", Nil))
def bind(key: String, data: Map[String, String]) =
parsing(LocalDateTime.parse, "error.datetime", Nil)(key, data)
def unbind(key: String, value: LocalDateTime) = Map(key -> value.toString)
}
///
def uuidFormat: Formatter[UUID] = new Formatter[UUID] {
override val format = Some(("format.uuid", Nil))
def bind(key: String, data: Map[String, String]) =
parsing(UUID.fromString, "error.uuid", Nil)(key, data)
def unbind(key: String, value: UUID) = Map(key -> value.toString)
}
def rangeFormat[T](parseFn: (String => T)): Formatter[Range[T]] = new Formatter[Range[T]] {
override val format = Some(("format.range", Nil))
def bind(key: String, data: Map[String, String]) =
parsing(PgRangeSupportUtils.mkRangeFn(parseFn), "error.range", Nil)(key, data)
def unbind(key: String, value: Range[T]) = Map(key -> value.toString)
}
///
def strMapFormat = new Formatter[Map[String, String]] {
override val format = Some(("format.jsonmap", Seq("{key1:value1, key2:value2, ...}")))
def bind(key: String, data: Map[String, String]) =
parsing(fromJsonStr(_).getOrElse(Map.empty[String,String]), "error.jsonmap", Nil)(key, data)
def unbind(key: String, value: Map[String,String]) = Map(key -> toJsonStr(value))
}
implicit private val mapReads = Reads.mapReads[String]
implicit private val mapWrites = Writes.mapWrites[String]
def toJsonStr(v: Map[String,String]): String = Json.stringify(Json.toJson(v))
def fromJsonStr(s: String): Option[Map[String,String]] = Option(Json.fromJson(Json.parse(s)).get)
///
def geometryFormat[T <: Geometry]: Formatter[T] = new Formatter[T] {
override val format = Some(("format.geometry", Nil))
def bind(key: String, data: Map[String, String]) =
parsing(fromWKT[T], "error.geometry", Nil)(key, data)
def unbind(key: String, value: T) = Map(key -> toWKT(value))
}
//////////////////////////////////////////////////////////////////////////
private val wktWriterHolder = new ThreadLocal[WKTWriter]
private val wktReaderHolder = new ThreadLocal[WKTReader]
private def toWKT(geom: Geometry): String = {
if (wktWriterHolder.get == null) wktWriterHolder.set(new WKTWriter())
wktWriterHolder.get.write(geom)
}
private def fromWKT[T](wkt: String): T = {
if (wktReaderHolder.get == null) wktReaderHolder.set(new WKTReader())
wktReaderHolder.get.read(wkt).asInstanceOf[T]
}
/**
* (copy from [[play.api.data.format.Formats#parsing]])
* Helper for formatters binders
* @param parse Function parsing a String value into a T value, throwing an exception in case of failure
* @param errMsg Error to set in case of parsing failure
* @param errArgs Arguments for error message
* @param key Key name of the field to parse
* @param data Field data
*/
private def parsing[T](parse: String => T, errMsg: String, errArgs: Seq[Any])(
key: String, data: Map[String, String]): Either[Seq[FormError], T] = {
Formats.stringFormat.bind(key, data).right.flatMap { s =>
scala.util.control.Exception.allCatch[T]
.either(parse(s))
.left.map(e => Seq(FormError(key, errMsg, errArgs)))
}
}
} | PhilAndrew/play-silhouette-slick3-postgres-seed | app/utils/postgres/MyFormats.scala | Scala | apache-2.0 | 4,614 |
package com.github.al.roulette.bet.impl
import java.util.UUID
import akka.http.javadsl.model.headers
import akka.stream.scaladsl.Sink
import com.github.al.authentication.JwtTokenUtil
import com.github.al.persistence.UUIDConversions
import com.github.al.roulette.bet.BetComponents
import com.github.al.roulette.bet.api.{AllGameBetsProclaimed, Bet, BetService, PlayerBets}
import com.github.al.roulette.game.api.{GameEvent, GameService}
import com.github.al.roulette.{bet, game}
import com.lightbend.lagom.scaladsl.server.{LagomApplication, LocalServiceLocator}
import com.lightbend.lagom.scaladsl.testkit.{ProducerStub, ProducerStubFactory, ServiceTest, TestTopicComponents}
import org.mockito.Mockito.when
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{AsyncWordSpec, BeforeAndAfterAll, Matchers, Succeeded}
import play.api.libs.ws.ahc.AhcWSComponents
import scala.concurrent.Future
class BetServiceImplIntegrationTest
extends AsyncWordSpec
with Matchers with BeforeAndAfterAll with MockitoSugar with UUIDConversions {
private final val GameId = "f1f2581e-880e-4a67-ba1d-1d8835243fdd"
private final val GameId2 = "67ecf189-6caa-4963-b87f-31e162ea22da"
private final val GameId3 = "f53359db-c8ae-4af1-a738-954aaa58d027"
private final val GameIdUUID: UUID = GameId
private final val GameId2UUID: UUID = GameId2
private final val PlayerId = "7f06847c-5ae1-470a-8068-7c24fb16be7e"
private final val PlayerId2 = "18f69c13-675f-4202-8d33-ea62120fafd0"
private final val SampleBet = Bet(Some(2), bet.api.Number, 34.32)
private final val ExpectedPlayerBets = PlayerBets(PlayerId, List(SampleBet, SampleBet))
private final val ExpectedPlayerBets2 = PlayerBets(PlayerId2, List(SampleBet))
private val mockGameService = mock[GameService]
private var gameEventsProducerStub: ProducerStub[GameEvent] = _
private val server = ServiceTest.startServer(ServiceTest.defaultSetup.withCassandra(true)) { ctx =>
new LagomApplication(ctx) with BetComponents with LocalServiceLocator with AhcWSComponents with TestTopicComponents {
val stubFactory = new ProducerStubFactory(actorSystem, materializer)
gameEventsProducerStub = stubFactory.producer[GameEvent](GameService.GameEventTopicName)
when(mockGameService.gameEvents).thenReturn(gameEventsProducerStub.topic)
override lazy val gameService: GameService = mockGameService
}
}
private val betService = server.serviceClient.implement[BetService]
"The BetService" should {
"allow placing bets" in {
server.application.gameEventsSubscriber
gameEventsProducerStub.send(game.api.GameStarted(GameId3))
for {
_ <- placeBet(GameId3, PlayerId)
} yield {
Succeeded
}
}
"emit AllGameBetsProclaimed event with no bets" in {
server.application.gameEventsSubscriber
import server.materializer
gameEventsProducerStub.send(game.api.GameStarted(GameId))
gameEventsProducerStub.send(game.api.GameFinished(GameId))
for {
rouletteBetsEvents <- betService.rouletteBetsEvents.subscribe.atMostOnceSource
.filter(_.gameId == GameIdUUID)
.take(1)
.runWith(Sink.seq)
} yield {
rouletteBetsEvents.head shouldBe AllGameBetsProclaimed(GameId, Nil)
}
}
"emit AllGameBetsProclaimed event with bets" in {
server.application.gameEventsSubscriber
import server.materializer
gameEventsProducerStub.send(game.api.GameStarted(GameId2))
for {
_ <- placeBet(GameId2, PlayerId)
_ <- placeBet(GameId2, PlayerId2)
_ <- placeBet(GameId2, PlayerId)
_ = gameEventsProducerStub.send(game.api.GameFinished(GameId2))
rouletteBetsEvents <- betService.rouletteBetsEvents.subscribe.atMostOnceSource
.filter(_.gameId == GameId2UUID)
.take(1)
.runWith(Sink.seq)
} yield {
rouletteBetsEvents.head should matchPattern {
case AllGameBetsProclaimed(GameId2UUID, playerBets)
if playerBets.size == 2 && playerBets.toSet.subsetOf(Set(ExpectedPlayerBets, ExpectedPlayerBets2)) =>
}
}
}
}
private def placeBet(gameId: String, playerId: String): Future[_] = {
val jwtAuthorizationHeader = buildJwtAuthorizationHeader(playerId)
betService
.placeBet(gameId)
.handleRequestHeader(header => header.addHeader(jwtAuthorizationHeader.name(), jwtAuthorizationHeader.value()))
.invoke(SampleBet)
}
private def buildJwtAuthorizationHeader(playerId: String) =
headers.Authorization.oauth2(JwtTokenUtil.createJwtToken("playerId", playerId))
override protected def afterAll(): Unit = server.stop()
}
| andrei-l/reactive-roulette | roulette-bet-impl/src/test/scala/com/github/al/roulette/bet/impl/BetServiceImplIntegrationTest.scala | Scala | mit | 4,701 |
package lib
import org.scalatest.{FunSpec, Matchers}
class ServiceConfigurationSpec extends FunSpec with Matchers {
def createServiceConfiguration(orgNamespace: String) = {
ServiceConfiguration(
orgKey = "apidoc",
orgNamespace = orgNamespace,
version = "1.0"
)
}
it("applicationNamespace") {
val config = createServiceConfiguration("me.apidoc")
config.applicationNamespace("api") should be("me.apidoc.api.v1")
config.applicationNamespace("spec") should be("me.apidoc.spec.v1")
config.applicationNamespace("fooBar") should be("me.apidoc.foo.bar.v1")
config.applicationNamespace("foo-bar") should be("me.apidoc.foo.bar.v1")
config.applicationNamespace("foo_bar") should be("me.apidoc.foo.bar.v1")
config.applicationNamespace("Foo.bar") should be("me.apidoc.foo.bar.v1")
config.applicationNamespace("fooBarBaz") should be("me.apidoc.foo.bar.baz.v1")
}
it("applicationNamespace is in lower case") {
val config = createServiceConfiguration("ME.APIDOC")
config.applicationNamespace("API") should be("ME.APIDOC.api.v1")
}
it("applicationNamespace is trimmed") {
val config = createServiceConfiguration("me.apidoc")
config.applicationNamespace(" api ") should be("me.apidoc.api.v1")
}
it("applicationNamespace with numbers") {
val config = createServiceConfiguration("io.apibuilder")
config.applicationNamespace("mercury-3pl") should be("io.apibuilder.mercury3pl.v1")
}
}
| gheine/apidoc | lib/src/test/scala/ServiceConfigurationSpec.scala | Scala | mit | 1,476 |
package se.blea.flexiconf
/** Options for the ConfigVisitor */
private[flexiconf] case class ConfigVisitorOptions(sourceFile: String,
allowUnknownDirectives: Boolean = false,
allowDuplicateDirectives: Boolean = false,
allowMissingGroups: Boolean = false,
allowMissingIncludes: Boolean = false,
allowIncludeCycles: Boolean = false,
directives: Set[DirectiveDefinition] = Set.empty)
| thetristan/flexiconf | flexiconf-core/src/main/scala/se/blea/flexiconf/ConfigVisitorOptions.scala | Scala | mit | 689 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.database
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration._
import akka.actor.ActorSystem
import akka.event.Logging.ErrorLevel
import akka.http.scaladsl.model._
import akka.stream.scaladsl._
import akka.util.ByteString
import spray.json._
import whisk.common.Logging
import whisk.common.LoggingMarkers
import whisk.common.TransactionId
import whisk.core.entity.DocInfo
import whisk.core.entity.DocRevision
import whisk.core.entity.WhiskDocument
import whisk.http.Messages
/**
* Basic client to put and delete artifacts in a data store.
*
* @param dbProtocol the protocol to access the database with (http/https)
* @param dbHost the host to access database from
* @param dbPort the port on the host
* @param dbUserName the user name to access database as
* @param dbPassword the secret for the user name required to access the database
* @param dbName the name of the database to operate on
* @param serializerEvidence confirms the document abstraction is serializable to a Document with an id
*/
class CouchDbRestStore[DocumentAbstraction <: DocumentSerializer](
dbProtocol: String,
dbHost: String,
dbPort: Int,
dbUsername: String,
dbPassword: String,
dbName: String)(implicit system: ActorSystem, val logging: Logging, jsonFormat: RootJsonFormat[DocumentAbstraction])
extends ArtifactStore[DocumentAbstraction]
with DefaultJsonProtocol {
protected[core] implicit val executionContext = system.dispatcher
private val client: CouchDbRestClient = new CouchDbRestClient(dbProtocol, dbHost, dbPort.toInt, dbUsername, dbPassword, dbName)
override protected[database] def put(d: DocumentAbstraction)(implicit transid: TransactionId): Future[DocInfo] = {
val asJson = d.toDocumentRecord
val id: String = asJson.fields("_id").convertTo[String].trim
val rev: Option[String] = asJson.fields.get("_rev").map(_.convertTo[String])
require(!id.isEmpty, "document id must be defined")
val docinfoStr = s"id: $id, rev: ${rev.getOrElse("null")}"
val start = transid.started(this, LoggingMarkers.DATABASE_SAVE, s"[PUT] '$dbName' saving document: '${docinfoStr}'")
val request: CouchDbRestClient => Future[Either[StatusCode, JsObject]] = rev match {
case Some(r) => client => client.putDoc(id, r, asJson)
case None => client => client.putDoc(id, asJson)
}
val f = request(client).map { e =>
e match {
case Right(response) =>
transid.finished(this, start, s"[PUT] '$dbName' completed document: '${docinfoStr}', response: '$response'")
val id = response.fields("id").convertTo[String]
val rev = response.fields("rev").convertTo[String]
DocInfo ! (id, rev)
case Left(StatusCodes.Conflict) =>
transid.finished(this, start, s"[PUT] '$dbName', document: '${docinfoStr}'; conflict.")
// For compatibility.
throw DocumentConflictException("conflict on 'put'")
case Left(code) =>
transid.failed(this, start, s"[PUT] '$dbName' failed to put document: '${docinfoStr}'; http status: '${code}'", ErrorLevel)
throw new Exception("Unexpected http response code: " + code)
}
}
reportFailure(f, failure => transid.failed(this, start, s"[PUT] '$dbName' internal error, failure: '${failure.getMessage}'", ErrorLevel))
}
override protected[database] def del(doc: DocInfo)(implicit transid: TransactionId): Future[Boolean] = {
require(doc != null && doc.rev.asString != null, "doc revision required for delete")
val start = transid.started(this, LoggingMarkers.DATABASE_DELETE, s"[DEL] '$dbName' deleting document: '$doc'")
val f = client.deleteDoc(doc.id.id, doc.rev.rev).map { e =>
e match {
case Right(response) =>
transid.finished(this, start, s"[DEL] '$dbName' completed document: '$doc', response: $response")
response.fields("ok").convertTo[Boolean]
case Left(StatusCodes.NotFound) =>
transid.finished(this, start, s"[DEL] '$dbName', document: '${doc}'; not found.")
// for compatibility
throw NoDocumentException("not found on 'delete'")
case Left(StatusCodes.Conflict) =>
transid.finished(this, start, s"[DEL] '$dbName', document: '${doc}'; conflict.")
throw DocumentConflictException("conflict on 'delete'")
case Left(code) =>
transid.failed(this, start, s"[DEL] '$dbName' failed to delete document: '${doc}'; http status: '${code}'", ErrorLevel)
throw new Exception("Unexpected http response code: " + code)
}
}
reportFailure(f, failure => transid.failed(this, start, s"[DEL] '$dbName' internal error, doc: '$doc', failure: '${failure.getMessage}'", ErrorLevel))
}
override protected[database] def get[A <: DocumentAbstraction](doc: DocInfo)(
implicit transid: TransactionId,
ma: Manifest[A]): Future[A] = {
val start = transid.started(this, LoggingMarkers.DATABASE_GET, s"[GET] '$dbName' finding document: '$doc'")
require(doc != null, "doc undefined")
val request: CouchDbRestClient => Future[Either[StatusCode, JsObject]] = if (doc.rev.rev != null) {
client => client.getDoc(doc.id.id, doc.rev.rev)
} else {
client => client.getDoc(doc.id.id)
}
val f = request(client).map { e =>
e match {
case Right(response) =>
transid.finished(this, start, s"[GET] '$dbName' completed: found document '$doc'")
val asFormat = jsonFormat.read(response)
if (asFormat.getClass != ma.runtimeClass) {
throw DocumentTypeMismatchException(s"document type ${asFormat.getClass} did not match expected type ${ma.runtimeClass}.")
}
val deserialized = asFormat.asInstanceOf[A]
val responseRev = response.fields("_rev").convertTo[String]
assert(doc.rev.rev == null || doc.rev.rev == responseRev, "Returned revision should match original argument")
// FIXME remove mutability from appropriate classes now that it is no longer required by GSON.
deserialized.asInstanceOf[WhiskDocument].revision(DocRevision(responseRev))
deserialized
case Left(StatusCodes.NotFound) =>
transid.finished(this, start, s"[GET] '$dbName', document: '${doc}'; not found.")
// for compatibility
throw NoDocumentException("not found on 'get'")
case Left(code) =>
transid.finished(this, start, s"[GET] '$dbName' failed to get document: '${doc}'; http status: '${code}'")
throw new Exception("Unexpected http response code: " + code)
}
} recoverWith {
case e: DeserializationException => throw DocumentUnreadable(Messages.corruptedEntity)
}
reportFailure(f, failure => transid.failed(this, start, s"[GET] '$dbName' internal error, doc: '$doc', failure: '${failure.getMessage}'", ErrorLevel))
}
override protected[core] def query(table: String, startKey: List[Any], endKey: List[Any], skip: Int, limit: Int, includeDocs: Boolean, descending: Boolean, reduce: Boolean)(
implicit transid: TransactionId): Future[List[JsObject]] = {
require(!(reduce && includeDocs), "reduce and includeDocs cannot both be true")
// Apparently you have to do that in addition to setting "descending"
val (realStartKey, realEndKey) = if (descending) {
(endKey, startKey)
} else {
(startKey, endKey)
}
val parts = table.split("/")
val start = transid.started(this, LoggingMarkers.DATABASE_QUERY, s"[QUERY] '$dbName' searching '$table ${startKey}:${endKey}'")
val f = for (
eitherResponse <- client.executeView(parts(0), parts(1))(
startKey = realStartKey,
endKey = realEndKey,
skip = Some(skip),
limit = Some(limit),
includeDocs = includeDocs,
descending = descending,
reduce = reduce)
) yield eitherResponse match {
case Right(response) =>
val rows = response.fields("rows").convertTo[List[JsObject]]
val out = if (includeDocs) {
rows.map(_.fields("doc").asJsObject)
} else if (reduce && !rows.isEmpty) {
assert(rows.length == 1, s"result of reduced view contains more than one value: '$rows'")
rows.head.fields("value").convertTo[List[JsObject]]
} else if (reduce) {
List(JsObject())
} else {
rows
}
transid.finished(this, start, s"[QUERY] '$dbName' completed: matched ${out.size}")
out
case Left(code) =>
transid.failed(this, start, s"Unexpected http response code: $code", ErrorLevel)
throw new Exception("Unexpected http response code: " + code)
}
reportFailure(f, failure => transid.failed(this, start, s"[QUERY] '$dbName' internal error, failure: '${failure.getMessage}'", ErrorLevel))
}
override protected[core] def attach(doc: DocInfo, name: String, contentType: ContentType, docStream: Source[ByteString, _])(
implicit transid: TransactionId): Future[DocInfo] = {
val start = transid.started(this, LoggingMarkers.DATABASE_ATT_SAVE, s"[ATT_PUT] '$dbName' uploading attachment '$name' of document '$doc'")
require(doc != null, "doc undefined")
require(doc.rev.rev != null, "doc revision must be specified")
val f = client.putAttachment(doc.id.id, doc.rev.rev, name, contentType, docStream).map { e =>
e match {
case Right(response) =>
transid.finished(this, start, s"[ATT_PUT] '$dbName' completed uploading attachment '$name' of document '$doc'")
val id = response.fields("id").convertTo[String]
val rev = response.fields("rev").convertTo[String]
DocInfo ! (id, rev)
case Left(StatusCodes.NotFound) =>
transid.finished(this, start, s"[ATT_PUT] '$dbName' uploading attachment '$name' of document '$doc'; not found")
throw NoDocumentException("Not found on 'readAttachment'.")
case Left(code) =>
transid.failed(this, start, s"[ATT_PUT] '$dbName' failed to upload attachment '$name' of document '$doc'; http status '$code'")
throw new Exception("Unexpected http response code: " + code)
}
}
reportFailure(f, failure => transid.failed(this, start, s"[ATT_PUT] '$dbName' internal error, name: '$name', doc: '$doc', failure: '${failure.getMessage}'", ErrorLevel))
}
override protected[core] def readAttachment[T](doc: DocInfo, name: String, sink: Sink[ByteString, Future[T]])(
implicit transid: TransactionId): Future[(ContentType, T)] = {
val start = transid.started(this, LoggingMarkers.DATABASE_ATT_GET, s"[ATT_GET] '$dbName' finding attachment '$name' of document '$doc'")
require(doc != null, "doc undefined")
require(doc.rev.rev != null, "doc revision must be specified")
val f = client.getAttachment[T](doc.id.id, doc.rev.rev, name, sink)
val g = f.map { e =>
e match {
case Right((contentType, result)) =>
transid.finished(this, start, s"[ATT_GET] '$dbName' completed: found attachment '$name' of document '$doc'")
(contentType, result)
case Left(StatusCodes.NotFound) =>
transid.finished(this, start, s"[ATT_GET] '$dbName', retrieving attachment '$name' of document '$doc'; not found.")
throw NoDocumentException("Not found on 'readAttachment'.")
case Left(code) =>
transid.failed(this, start, s"[ATT_GET] '$dbName' failed to get attachment '$name' of document '$doc'; http status: '${code}'")
throw new Exception("Unexpected http response code: " + code)
}
}
reportFailure(g, failure => transid.failed(this, start, s"[ATT_GET] '$dbName' internal error, name: '$name', doc: '$doc', failure: '${failure.getMessage}'", ErrorLevel))
}
override def shutdown(): Unit = {
Await.ready(client.shutdown(), 1.minute)
}
private def reportFailure[T, U](f: Future[T], onFailure: Throwable => U): Future[T] = {
f.onFailure({
case _: ArtifactStoreException => // These failures are intentional and shouldn't trigger the catcher.
case x => onFailure(x)
})
f
}
}
| lzbj/openwhisk | common/scala/src/main/scala/whisk/core/database/CouchDbRestStore.scala | Scala | apache-2.0 | 14,235 |
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.psi.PsiElement
import com.intellij.refactoring.changeSignature.{ChangeInfo, JavaChangeInfo, OverriderUsageInfo}
import com.intellij.util.containers.MultiMap
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScPatternDefinition, ScVariableDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScMember
import org.jetbrains.plugins.scala.lang.psi.types.api.ValType
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo
/**
* Nikolay.Tropin
* 2014-08-13
*/
private[changeSignature] object ConflictsUtil {
type ConflictsMap = MultiMap[PsiElement, String]
def addJavaOverriderConflicts(info: OverriderUsageInfo, change: ChangeInfo, map: ConflictsMap): Unit = {
change match {
case sc: ScalaChangeInfo if sc.newParameters.exists(p => p.isByName && p.scType.isInstanceOf[ValType]) =>
val message = s"This method has java overriders, by-name parameters of value classes cannot be used."
map.putValue(info.getOverridingMethod, message)
case _ =>
}
}
def addBindingPatternConflicts(bp: ScBindingPattern,
change: ChangeInfo,
result: ConflictsMap): Unit = {
if (change.getNewParameters.nonEmpty) {
val (member: ScMember, kind, isSimple) = bp match {
case ScalaPsiUtil.inNameContext(pd: ScPatternDefinition) => (pd, "pattern definition", pd.isSimple)
case ScalaPsiUtil.inNameContext(vd: ScVariableDefinition) => (vd, "variable definition", vd.isSimple)
case _ => return
}
if (!isSimple) {
val className = member.containingClass.qualifiedName
val message = s"Method is overridden in a composite $kind in $className. " +
"Converting it to function definition is not supported."
result.putValue(bp, message)
}
}
}
def addClassParameterConflicts(cp: ScClassParameter, change: ChangeInfo, result: ConflictsMap): Unit = {
if (change.getNewParameters.nonEmpty) {
val className = cp.containingClass.qualifiedName
val message = s"Method is overridden by class parameter of $className. " +
"Converting it to a function definition is not supported."
result.putValue(cp, message)
}
}
def addUnapplyUsagesConflicts(p: PatternUsageInfo, change: ChangeInfo, result: ConflictsMap): Unit = {
change match {
case jc: JavaChangeInfo if jc.isParameterSetOrOrderChanged || jc.isParameterTypesChanged =>
jc.getMethod match {
case ScPrimaryConstructor.ofClass(clazz) if clazz.isCase =>
val message = "Updating of usages of generated `unapply` methods is not supported"
result.putValue(p.pattern, message)
case _ =>
}
case _ =>
}
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ConflictsUtil.scala | Scala | apache-2.0 | 3,226 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.geotools
import java.io.{Closeable, Flushable}
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicLong
import com.github.benmanes.caffeine.cache.Caffeine
import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.simple.SimpleFeatureWriter
import org.geotools.data.{Query, Transaction}
import org.geotools.factory.Hints
import org.geotools.filter.identity.FeatureIdImpl
import org.locationtech.geomesa.features.{ScalaSimpleFeature, ScalaSimpleFeatureFactory}
import org.locationtech.geomesa.index.api.{GeoMesaFeatureIndex, WrappedFeature}
import org.locationtech.geomesa.index.geotools.GeoMesaFeatureWriter.FlushableFeatureWriter
import org.locationtech.geomesa.utils.cache.CacheKeyGenerator
import org.locationtech.geomesa.utils.index.IndexMode
import org.locationtech.geomesa.utils.io.{CloseQuietly, FlushQuietly}
import org.locationtech.geomesa.utils.uuid.{FeatureIdGenerator, Z3FeatureIdGenerator}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
object GeoMesaFeatureWriter extends LazyLogging {
private val tempFeatureIds = new AtomicLong(0)
private val converterCache =
Caffeine.newBuilder()
.expireAfterWrite(60, TimeUnit.MINUTES) // re-load periodically
.build[String, (IndexedSeq[String], IndexedSeq[(Any) => Seq[Any]], IndexedSeq[(Any) => Seq[Any]])]()
private val idGenerator: FeatureIdGenerator = {
import org.locationtech.geomesa.index.conf.FeatureProperties.FEATURE_ID_GENERATOR
try {
logger.debug(s"Using feature id generator '${FEATURE_ID_GENERATOR.get}'")
Class.forName(FEATURE_ID_GENERATOR.get).newInstance().asInstanceOf[FeatureIdGenerator]
} catch {
case e: Throwable =>
logger.error(s"Could not load feature id generator class '${FEATURE_ID_GENERATOR.get}'", e)
new Z3FeatureIdGenerator
}
}
trait FlushableFeatureWriter extends SimpleFeatureWriter with Flushable
/**
* Sets the feature ID on the feature. If the user has requested a specific ID, that will be used,
* otherwise one will be generated. If possible, the original feature will be modified and returned.
*/
def featureWithFid(sft: SimpleFeatureType, feature: SimpleFeature): SimpleFeature = {
if (feature.getUserData.containsKey(Hints.PROVIDED_FID)) {
withFid(sft, feature, feature.getUserData.get(Hints.PROVIDED_FID).toString)
} else if (feature.getUserData.containsKey(Hints.USE_PROVIDED_FID) &&
feature.getUserData.get(Hints.USE_PROVIDED_FID).asInstanceOf[Boolean]) {
feature
} else {
withFid(sft, feature, idGenerator.createId(sft, feature))
}
}
private def withFid(sft: SimpleFeatureType, feature: SimpleFeature, fid: String): SimpleFeature =
feature.getIdentifier match {
case f: FeatureIdImpl =>
f.setID(fid)
feature
case f =>
logger.warn(s"Unknown feature ID implementation found, rebuilding feature: ${f.getClass} $f")
ScalaSimpleFeatureFactory.copyFeature(sft, feature, fid)
}
/**
* Gets table names and converters for each table (e.g. index) that supports the sft
*
* @param sft simple feature type
* @param ds data store
* @param indices indices to write/delete
* @return (table names, write converters, remove converters)
*/
def getTablesAndConverters[DS <: GeoMesaDataStore[DS, F, W], F <: WrappedFeature, W](
sft: SimpleFeatureType,
ds: DS,
indices: Option[Seq[GeoMesaFeatureIndex[DS, F, W]]] = None): (IndexedSeq[String], IndexedSeq[(F) => Seq[W]], IndexedSeq[(F) => Seq[W]]) = {
val toWrite = indices.getOrElse(ds.manager.indices(sft, IndexMode.Write)).toIndexedSeq
val key = s"${ds.config.catalog};${CacheKeyGenerator.cacheKey(sft)};${toWrite.map(_.identifier).mkString(",")}"
val load = new java.util.function.Function[String, (IndexedSeq[String], IndexedSeq[(Any) => Seq[Any]], IndexedSeq[(Any) => Seq[Any]])] {
override def apply(ignored: String): (IndexedSeq[String], IndexedSeq[(Any) => Seq[Any]], IndexedSeq[(Any) => Seq[Any]]) = {
val tables = toWrite.map(_.getTableName(sft.getTypeName, ds))
val writers = toWrite.map(_.writer(sft, ds))
val removers = toWrite.map(_.remover(sft, ds))
(tables, writers.asInstanceOf[IndexedSeq[(Any) => Seq[Any]]], removers.asInstanceOf[IndexedSeq[(Any) => Seq[Any]]])
}
}
converterCache.get(key, load).asInstanceOf[(IndexedSeq[String], IndexedSeq[(F) => Seq[W]], IndexedSeq[(F) => Seq[W]])]
}
private [geomesa] def expireConverterCache(): Unit = converterCache.invalidateAll()
}
abstract class GeoMesaFeatureWriter[DS <: GeoMesaDataStore[DS, F, W], F <: WrappedFeature, W, T]
(val sft: SimpleFeatureType, val ds: DS, val indices: Option[Seq[GeoMesaFeatureIndex[DS, F, W]]])
extends FlushableFeatureWriter with LazyLogging {
private val statUpdater = ds.stats.statUpdater(sft)
private val (tables, writeConverters, removeConverters) =
GeoMesaFeatureWriter.getTablesAndConverters[DS, F, W](sft, ds, indices)
protected val mutators = createMutators(tables)
private val writers = mutators.zip(writeConverters)
private val removers = mutators.zip(removeConverters)
protected val exceptions = ArrayBuffer.empty[Throwable]
// returns a temporary id - we will replace it just before write
protected def nextFeatureId = GeoMesaFeatureWriter.tempFeatureIds.getAndIncrement().toString
protected def writeFeature(feature: SimpleFeature): Unit = {
// see if there's a suggested ID to use for this feature, else create one based on the feature
val featureWithFid = GeoMesaFeatureWriter.featureWithFid(sft, feature)
val wrapped = wrapFeature(featureWithFid)
// calculate all mutations up front in case the feature is not valid, so we don't write partial entries
val converted = try { writers.map { case (mutator, convert) => (mutator, convert(wrapped)) } } catch {
case NonFatal(e) =>
import scala.collection.JavaConversions._
val attributes = s"${featureWithFid.getID}:${featureWithFid.getAttributes.mkString("|")}"
throw new IllegalArgumentException(s"Error indexing feature '$attributes'", e)
}
converted.foreach { case (mutator, writes) => executeWrite(mutator, writes) }
statUpdater.add(featureWithFid)
}
protected def removeFeature(feature: SimpleFeature): Unit = {
val wrapped = wrapFeature(feature)
removers.foreach { case (mutator, convert) => executeRemove(mutator, convert(wrapped)) }
statUpdater.remove(feature)
}
protected def createMutators(tables: IndexedSeq[String]): IndexedSeq[T]
protected def executeWrite(mutator: T, writes: Seq[W]): Unit
protected def executeRemove(mutator: T, removes: Seq[W]): Unit
protected def wrapFeature(feature: SimpleFeature): F
override def getFeatureType: SimpleFeatureType = sft
override def hasNext: Boolean = false
override def flush(): Unit = {
mutators.foreach {
case m: Flushable => FlushQuietly(m).foreach(exceptions.+=)
case _ => // no-op
}
FlushQuietly(statUpdater).foreach(exceptions.+=)
propagateExceptions()
}
override def close(): Unit = {
mutators.foreach {
case m: Closeable => CloseQuietly(m).foreach(exceptions.+=)
case _ => // no-op
}
CloseQuietly(statUpdater).foreach(exceptions.+=)
propagateExceptions()
}
private def propagateExceptions(): Unit = {
if (exceptions.nonEmpty) {
val msg = s"Error writing features: ${exceptions.map(_.getMessage).distinct.mkString("; ")}"
val cause = exceptions.head
exceptions.clear()
throw new RuntimeException(msg, cause)
}
}
}
/**
* Appends new features - can't modify or delete existing features.
*/
trait GeoMesaAppendFeatureWriter[DS <: GeoMesaDataStore[DS, F, W], F <: WrappedFeature, W, T]
extends GeoMesaFeatureWriter[DS, F, W, T] {
var currentFeature: SimpleFeature = _
override def write(): Unit =
if (currentFeature != null) {
writeFeature(currentFeature)
currentFeature = null
}
override def remove(): Unit =
throw new UnsupportedOperationException("Use getFeatureWriter instead of getFeatureWriterAppend")
override def next(): SimpleFeature = {
currentFeature = new ScalaSimpleFeature(sft, nextFeatureId)
currentFeature
}
override protected def executeRemove(mutator: T, removes: Seq[W]): Unit = throw new NotImplementedError()
}
/**
* Modifies or deletes existing features. Per the data store api, does not allow appending new features.
*/
trait GeoMesaModifyFeatureWriter[DS <: GeoMesaDataStore[DS, F, W], F <: WrappedFeature, W, T]
extends GeoMesaFeatureWriter[DS, F, W, T] {
def filter: Filter
private val reader = ds.getFeatureReader(new Query(sft.getTypeName, filter), Transaction.AUTO_COMMIT)
// feature that caller will modify
private var live: SimpleFeature = _
// feature returned from reader
private var original: SimpleFeature = _
override def remove(): Unit = if (original != null) {
removeFeature(original)
}
override def hasNext: Boolean = reader.hasNext
/* only write if non null and it hasn't changed...*/
/* original should be null only when reader runs out */
override def write(): Unit =
// comparison of feature ID and attributes - doesn't consider concrete class used
if (!ScalaSimpleFeature.equalIdAndAttributes(live, original)) {
remove()
writeFeature(live)
}
override def next: SimpleFeature = {
original = reader.next()
// set the use provided FID hint - allows user to update fid if desired,
// but if not we'll use the existing one
original.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
live = ScalaSimpleFeatureFactory.copyFeature(sft, original, original.getID) // this copies user data as well
live
}
abstract override def close(): Unit = {
super.close() // closes writer
reader.close()
}
}
| ronq/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/geotools/GeoMesaFeatureWriter.scala | Scala | apache-2.0 | 10,574 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Optional
import kafka.utils.TestUtils
import org.apache.kafka.common.message.ListOffsetsRequestData.{ListOffsetsPartition, ListOffsetsTopic}
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import org.apache.kafka.common.requests.{ListOffsetsRequest, ListOffsetsResponse}
import org.apache.kafka.common.{IsolationLevel, TopicPartition}
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api.Test
import scala.jdk.CollectionConverters._
class ListOffsetsRequestTest extends BaseRequestTest {
val topic = "topic"
val partition = new TopicPartition(topic, 0)
@Test
def testListOffsetsErrorCodes(): Unit = {
val targetTimes = List(new ListOffsetsTopic()
.setName(topic)
.setPartitions(List(new ListOffsetsPartition()
.setPartitionIndex(partition.partition)
.setTimestamp(ListOffsetsRequest.EARLIEST_TIMESTAMP)
.setCurrentLeaderEpoch(0)).asJava)).asJava
val consumerRequest = ListOffsetsRequest.Builder
.forConsumer(false, IsolationLevel.READ_UNCOMMITTED)
.setTargetTimes(targetTimes)
.build()
val replicaRequest = ListOffsetsRequest.Builder
.forReplica(ApiKeys.LIST_OFFSETS.latestVersion, servers.head.config.brokerId)
.setTargetTimes(targetTimes)
.build()
val debugReplicaRequest = ListOffsetsRequest.Builder
.forReplica(ApiKeys.LIST_OFFSETS.latestVersion, ListOffsetsRequest.DEBUGGING_REPLICA_ID)
.setTargetTimes(targetTimes)
.build()
// Unknown topic
val randomBrokerId = servers.head.config.brokerId
assertResponseError(Errors.UNKNOWN_TOPIC_OR_PARTITION, randomBrokerId, consumerRequest)
assertResponseError(Errors.UNKNOWN_TOPIC_OR_PARTITION, randomBrokerId, replicaRequest)
assertResponseError(Errors.UNKNOWN_TOPIC_OR_PARTITION, randomBrokerId, debugReplicaRequest)
val partitionToLeader = TestUtils.createTopic(zkClient, topic, numPartitions = 1, replicationFactor = 2, servers)
val replicas = zkClient.getReplicasForPartition(partition).toSet
val leader = partitionToLeader(partition.partition)
val follower = replicas.find(_ != leader).get
val nonReplica = servers.map(_.config.brokerId).find(!replicas.contains(_)).get
// Follower
assertResponseError(Errors.NOT_LEADER_OR_FOLLOWER, follower, consumerRequest)
assertResponseError(Errors.NOT_LEADER_OR_FOLLOWER, follower, replicaRequest)
assertResponseError(Errors.NONE, follower, debugReplicaRequest)
// Non-replica
assertResponseError(Errors.NOT_LEADER_OR_FOLLOWER, nonReplica, consumerRequest)
assertResponseError(Errors.NOT_LEADER_OR_FOLLOWER, nonReplica, replicaRequest)
assertResponseError(Errors.NOT_LEADER_OR_FOLLOWER, nonReplica, debugReplicaRequest)
}
def assertResponseErrorForEpoch(error: Errors, brokerId: Int, currentLeaderEpoch: Optional[Integer]): Unit = {
val listOffsetPartition = new ListOffsetsPartition()
.setPartitionIndex(partition.partition)
.setTimestamp(ListOffsetsRequest.EARLIEST_TIMESTAMP)
if (currentLeaderEpoch.isPresent)
listOffsetPartition.setCurrentLeaderEpoch(currentLeaderEpoch.get)
val targetTimes = List(new ListOffsetsTopic()
.setName(topic)
.setPartitions(List(listOffsetPartition).asJava)).asJava
val request = ListOffsetsRequest.Builder
.forConsumer(false, IsolationLevel.READ_UNCOMMITTED)
.setTargetTimes(targetTimes)
.build()
assertResponseError(error, brokerId, request)
}
@Test
def testCurrentEpochValidation(): Unit = {
val topic = "topic"
val topicPartition = new TopicPartition(topic, 0)
val partitionToLeader = TestUtils.createTopic(zkClient, topic, numPartitions = 1, replicationFactor = 3, servers)
val firstLeaderId = partitionToLeader(topicPartition.partition)
// We need a leader change in order to check epoch fencing since the first epoch is 0 and
// -1 is treated as having no epoch at all
killBroker(firstLeaderId)
// Check leader error codes
val secondLeaderId = TestUtils.awaitLeaderChange(servers, topicPartition, firstLeaderId)
val secondLeaderEpoch = TestUtils.findLeaderEpoch(secondLeaderId, topicPartition, servers)
assertResponseErrorForEpoch(Errors.NONE, secondLeaderId, Optional.empty())
assertResponseErrorForEpoch(Errors.NONE, secondLeaderId, Optional.of(secondLeaderEpoch))
assertResponseErrorForEpoch(Errors.FENCED_LEADER_EPOCH, secondLeaderId, Optional.of(secondLeaderEpoch - 1))
assertResponseErrorForEpoch(Errors.UNKNOWN_LEADER_EPOCH, secondLeaderId, Optional.of(secondLeaderEpoch + 1))
// Check follower error codes
val followerId = TestUtils.findFollowerId(topicPartition, servers)
assertResponseErrorForEpoch(Errors.NOT_LEADER_OR_FOLLOWER, followerId, Optional.empty())
assertResponseErrorForEpoch(Errors.NOT_LEADER_OR_FOLLOWER, followerId, Optional.of(secondLeaderEpoch))
assertResponseErrorForEpoch(Errors.UNKNOWN_LEADER_EPOCH, followerId, Optional.of(secondLeaderEpoch + 1))
assertResponseErrorForEpoch(Errors.FENCED_LEADER_EPOCH, followerId, Optional.of(secondLeaderEpoch - 1))
}
// -1 indicate "latest"
def fetchOffsetAndEpoch(serverId: Int,
timestamp: Long,
version: Short): (Long, Int) = {
val targetTimes = List(new ListOffsetsTopic()
.setName(topic)
.setPartitions(List(new ListOffsetsPartition()
.setPartitionIndex(partition.partition)
.setTimestamp(timestamp)).asJava)).asJava
val builder = ListOffsetsRequest.Builder
.forConsumer(false, IsolationLevel.READ_UNCOMMITTED)
.setTargetTimes(targetTimes)
val request = if (version == -1) builder.build() else builder.build(version)
val response = sendRequest(serverId, request)
val partitionData = response.topics.asScala.find(_.name == topic).get
.partitions.asScala.find(_.partitionIndex == partition.partition).get
if (version == 0) {
if (partitionData.oldStyleOffsets().isEmpty)
(-1, partitionData.leaderEpoch)
else
(partitionData.oldStyleOffsets().asScala.head, partitionData.leaderEpoch)
} else
(partitionData.offset, partitionData.leaderEpoch)
}
@Test
def testResponseIncludesLeaderEpoch(): Unit = {
val partitionToLeader = TestUtils.createTopic(zkClient, topic, numPartitions = 1, replicationFactor = 3, servers)
val firstLeaderId = partitionToLeader(partition.partition)
TestUtils.generateAndProduceMessages(servers, topic, 10)
assertEquals((0L, 0), fetchOffsetAndEpoch(firstLeaderId, 0L, -1))
assertEquals((0L, 0), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.EARLIEST_TIMESTAMP, -1))
assertEquals((10L, 0), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.LATEST_TIMESTAMP, -1))
// Kill the first leader so that we can verify the epoch change when fetching the latest offset
killBroker(firstLeaderId)
val secondLeaderId = TestUtils.awaitLeaderChange(servers, partition, firstLeaderId)
val secondLeaderEpoch = TestUtils.findLeaderEpoch(secondLeaderId, partition, servers)
// No changes to written data
assertEquals((0L, 0), fetchOffsetAndEpoch(secondLeaderId, 0L, -1))
assertEquals((0L, 0), fetchOffsetAndEpoch(secondLeaderId, ListOffsetsRequest.EARLIEST_TIMESTAMP, -1))
assertEquals((0L, 0), fetchOffsetAndEpoch(secondLeaderId, 0L, -1))
assertEquals((0L, 0), fetchOffsetAndEpoch(secondLeaderId, ListOffsetsRequest.EARLIEST_TIMESTAMP, -1))
// The latest offset reflects the updated epoch
assertEquals((10L, secondLeaderEpoch), fetchOffsetAndEpoch(secondLeaderId, ListOffsetsRequest.LATEST_TIMESTAMP, -1))
}
@Test
def testResponseDefaultOffsetAndLeaderEpochForAllVersions(): Unit = {
val partitionToLeader = TestUtils.createTopic(zkClient, topic, numPartitions = 1, replicationFactor = 3, servers)
val firstLeaderId = partitionToLeader(partition.partition)
TestUtils.generateAndProduceMessages(servers, topic, 10)
for (version <- ApiKeys.LIST_OFFSETS.oldestVersion to ApiKeys.LIST_OFFSETS.latestVersion) {
if (version == 0) {
assertEquals((-1L, -1), fetchOffsetAndEpoch(firstLeaderId, 0L, version.toShort))
assertEquals((0L, -1), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.EARLIEST_TIMESTAMP, version.toShort))
assertEquals((10L, -1), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.LATEST_TIMESTAMP, version.toShort))
} else if (version >= 1 && version <= 3) {
assertEquals((0L, -1), fetchOffsetAndEpoch(firstLeaderId, 0L, version.toShort))
assertEquals((0L, -1), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.EARLIEST_TIMESTAMP, version.toShort))
assertEquals((10L, -1), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.LATEST_TIMESTAMP, version.toShort))
} else if (version >= 4) {
assertEquals((0L, 0), fetchOffsetAndEpoch(firstLeaderId, 0L, version.toShort))
assertEquals((0L, 0), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.EARLIEST_TIMESTAMP, version.toShort))
assertEquals((10L, 0), fetchOffsetAndEpoch(firstLeaderId, ListOffsetsRequest.LATEST_TIMESTAMP, version.toShort))
}
}
}
private def assertResponseError(error: Errors, brokerId: Int, request: ListOffsetsRequest): Unit = {
val response = sendRequest(brokerId, request)
assertEquals(request.topics.size, response.topics.size)
response.topics.asScala.foreach { topic =>
topic.partitions.asScala.foreach { partition =>
assertEquals(error.code, partition.errorCode)
}
}
}
private def sendRequest(leaderId: Int, request: ListOffsetsRequest): ListOffsetsResponse = {
connectAndReceive[ListOffsetsResponse](request, destination = brokerSocketServer(leaderId))
}
}
| Chasego/kafka | core/src/test/scala/unit/kafka/server/ListOffsetsRequestTest.scala | Scala | apache-2.0 | 10,626 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.ElasticDate
import com.sksamuel.elastic4s.script.Script
import com.sksamuel.elastic4s.searches.DateHistogramInterval
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAgg
import com.sksamuel.exts.OptionImplicits._
import org.joda.time.DateTimeZone
import scala.concurrent.duration.{FiniteDuration, _}
case class HistogramOrder(name: String, asc: Boolean)
object HistogramOrder {
val KEY_ASC = HistogramOrder("_key", true)
val KEY_DESC = HistogramOrder("_key", false)
val COUNT_ASC = HistogramOrder("_count", true)
val COUNT_DESC = HistogramOrder("_count", false)
}
case class DateHistogramAggregation(name: String,
interval: Option[DateHistogramInterval] = None,
minDocCount: Option[Long] = None,
timeZone: Option[DateTimeZone] = None,
order: Option[HistogramOrder] = None,
offset: Option[String] = None,
format: Option[String] = None,
field: Option[String] = None,
script: Option[Script] = None,
missing: Option[Any] = None,
extendedBounds: Option[ExtendedBounds] = None,
pipelines: Seq[PipelineAgg] = Nil,
subaggs: Seq[AbstractAggregation] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends Aggregation {
type T = DateHistogramAggregation
def extendedBounds(bounds: ExtendedBounds): DateHistogramAggregation = copy(extendedBounds = bounds.some)
def extendedBounds(min: ElasticDate, max: ElasticDate): DateHistogramAggregation =
copy(extendedBounds = ExtendedBounds(min, max).some)
def interval(seconds: Long): DateHistogramAggregation = interval(seconds.seconds)
def interval(dur: FiniteDuration): DateHistogramAggregation = interval(DateHistogramInterval.seconds(dur.toSeconds))
def interval(interval: DateHistogramInterval): DateHistogramAggregation = copy(interval = interval.some)
def minDocCount(min: Long): DateHistogramAggregation = copy(minDocCount = min.some)
def timeZone(timeZone: DateTimeZone): DateHistogramAggregation = copy(timeZone = timeZone.some)
def offset(offset: String): DateHistogramAggregation = copy(offset = offset.some)
def order(order: HistogramOrder): DateHistogramAggregation = copy(order = order.some)
def format(format: String): DateHistogramAggregation = copy(format = format.some)
def field(field: String): DateHistogramAggregation = copy(field = field.some)
def script(script: Script): DateHistogramAggregation = copy(script = script.some)
def missing(missing: Any): DateHistogramAggregation = copy(missing = missing.some)
override def subAggregations(aggs: Iterable[AbstractAggregation]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = map)
}
| Tecsisa/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/DateHistogramAggregation.scala | Scala | apache-2.0 | 3,222 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.cluster.sdv.register
import java.io.IOException
import org.scalatest.BeforeAndAfterAll
import org.apache.hadoop.fs.{FileUtil, Path}
import org.apache.spark.sql.test.TestQueryExecutor
import org.apache.spark.sql.test.util.QueryTest
import org.apache.spark.sql.{AnalysisException, CarbonEnv, Row}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.spark.exception.ProcessMetaDataException
/**
*
*/
class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
var dbLocationCustom = TestQueryExecutor.warehouse +
CarbonCommonConstants.FILE_SEPARATOR + "dbName"
override def beforeAll {
sql("drop database if exists carbon cascade")
}
def restoreData(dbLocationCustom: String, tableName: String) = {
val destination = dbLocationCustom + CarbonCommonConstants.FILE_SEPARATOR + tableName
val source = dbLocationCustom+ "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
try {
val fs = new Path(source).getFileSystem(FileFactory.getConfiguration)
val sourceFileStatus = fs.getFileStatus(new Path(source))
FileUtil.copy(fs,
sourceFileStatus,
fs,
new Path(destination),
true,
true,
FileFactory.getConfiguration)
} catch {
case e : Exception =>
throw new IOException("carbon table data restore failed.")
} finally {
}
}
def backUpData(dbLocationCustom: String, tableName: String) = {
val source = dbLocationCustom + CarbonCommonConstants.FILE_SEPARATOR + tableName
val destination = dbLocationCustom+ "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
try {
val fs = new Path(source).getFileSystem(FileFactory.getConfiguration)
val sourceFileStatus = fs.getFileStatus(new Path(source))
FileUtil.copy(fs,
sourceFileStatus,
fs,
new Path(destination),
false,
true,
FileFactory.getConfiguration)
} catch {
case e : Exception =>
throw new IOException("carbon table data backup failed.")
}
}
test("register tables test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
checkAnswer(sql("select count(*) from carbontable"), Row(1))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a")))
}
test("register table test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
checkAnswer(sql("select count(*) from carbontable"), Row(1))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a")))
}
test("register pre aggregate tables test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
restoreData(dbLocationCustom, "carbontable_preagg1")
sql("refresh table carbontable")
}
checkAnswer(sql("select count(*) from carbontable"), Row(3))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a"), Row("b"), Row("a")))
checkAnswer(sql("select count(*) from carbontable_preagg1"), Row(2))
checkAnswer(sql("select carbontable_c1 from carbontable_preagg1"), Seq(Row("a"), Row("b")))
}
test("register pre aggregate table test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
restoreData(dbLocationCustom, "carbontable_preagg1")
sql("refresh table carbontable")
}
checkAnswer(sql("select count(*) from carbontable"), Row(3))
checkAnswer(sql("select c1 from carbontable"), Seq(Row("a"), Row("b"), Row("a")))
checkAnswer(sql("select count(*) from carbontable_preagg1"), Row(2))
checkAnswer(sql("select carbontable_c1 from carbontable_preagg1"), Seq(Row("a"), Row("b")))
}
test("register pre aggregate table should fail if the aggregate table not copied") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'aa','aaa'")
sql("insert into carbontable select 'a',10,'aa','aaa'")
sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
backUpData(dbLocationCustom, "carbontable_preagg1")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
intercept[ProcessMetaDataException] {
sql("refresh table carbontable")
}
restoreData(dbLocationCustom, "carbontable_preagg1")
}
}
test("Update operation on carbon table should pass after registration or refresh") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
// update operation
sql("""update carbon.carbontable d set (d.c2) = (d.c2 + 1) where d.c1 = 'a'""").show()
sql("""update carbon.carbontable d set (d.c2) = (d.c2 + 1) where d.c1 = 'b'""").show()
checkAnswer(
sql("""select c1,c2,c3,c5 from carbon.carbontable"""),
Seq(Row("a", 2, "aa", "aaa"), Row("b", 2, "bb", "bbb"))
)
}
test("Delete operation on carbon table") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
// delete operation
sql("""delete from carbontable where c3 = 'aa'""").show
checkAnswer(
sql("""select c1,c2,c3,c5 from carbon.carbontable"""),
Seq(Row("b", 1, "bb", "bbb"))
)
sql("drop table carbontable")
}
test("Alter table add column test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
sql("Alter table carbontable add columns(c4 string) " +
"TBLPROPERTIES('DICTIONARY_EXCLUDE'='c4', 'DEFAULT.VALUE.c4'='def')")
checkAnswer(
sql("""select c1,c2,c3,c5,c4 from carbon.carbontable"""),
Seq(Row("a", 1, "aa", "aaa", "def"), Row("b", 1, "bb", "bbb", "def"))
)
sql("drop table carbontable")
}
test("Alter table change column datatype test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
sql("Alter table carbontable change c2 c2 long")
checkAnswer(
sql("""select c1,c2,c3,c5 from carbon.carbontable"""),
Seq(Row("a", 1, "aa", "aaa"), Row("b", 1, "bb", "bbb"))
)
sql("drop table carbontable")
}
test("Alter table drop column test") {
sql("drop database if exists carbon cascade")
sql(s"create database carbon location '$dbLocationCustom'")
sql("use carbon")
sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
sql("insert into carbontable select 'a',1,'aa','aaa'")
sql("insert into carbontable select 'b',1,'bb','bbb'")
if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
backUpData(dbLocationCustom, "carbontable")
sql("drop table carbontable")
restoreData(dbLocationCustom, "carbontable")
sql("refresh table carbontable")
}
sql("Alter table carbontable drop columns(c2)")
checkAnswer(
sql("""select * from carbon.carbontable"""),
Seq(Row("a", "aa", "aaa"), Row("b", "bb", "bbb"))
)
sql("drop table carbontable")
}
override def afterAll {
sql("use default")
sql("drop database if exists carbon cascade")
}
}
| manishgupta88/carbondata | integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/register/TestRegisterCarbonTable.scala | Scala | apache-2.0 | 13,410 |
import scala.quoted.*
object Macros {
inline def inspect[T](x: T): Unit = ${ impl('x) }
def impl[T](x: Expr[T])(using q: Quotes) : Expr[Unit] = {
import q.reflect.*
val tree = x.asTerm
given Printer[Tree] = Printer.TreeStructure
'{
println()
println("tree: " + ${Expr(tree.show)})
println("tree deref. vals: " + ${Expr(tree.underlying.show)})
}
}
}
| lampepfl/dotty | tests/run-macros/tasty-argument-tree-1/quoted_1.scala | Scala | apache-2.0 | 396 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package statements
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElementVisitor
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScVariableStub
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, TypeResult, TypingContext}
import com.intellij.util.IncorrectOperationException
import org.jetbrains.plugins.scala.extensions.ifReadAllowed
/**
* @author Alexander Podkhalyuzin
*/
class ScVariableDefinitionImpl private (stub: ScVariableStub, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, VARIABLE_DEFINITION, node) with ScVariableDefinition {
def this(node: ASTNode) = this(null, node)
def this(stub: ScVariableStub) = this(stub, null)
override def accept(visitor: PsiElementVisitor) {
visitor match {
case visitor: ScalaElementVisitor => super.accept(visitor)
case _ => super.accept(visitor)
}
}
def expr: Option[ScExpression] = byPsiOrStub(findChild(classOf[ScExpression]))(_.bodyExpression)
override def toString: String = "ScVariableDefinition: " + ifReadAllowed(declaredNames.mkString(", "))("")
def bindings: Seq[ScBindingPattern] = pList match {
case null => Seq.empty
case ScPatternList(Seq(pattern)) => pattern.bindings
case ScPatternList(patterns) => patterns.flatMap(_.bindings)
}
def getType(ctx: TypingContext): TypeResult[ScType] = typeElement match {
case Some(te) => te.getType(ctx)
case None => expr.map(_.getType(TypingContext.empty))
.getOrElse(Failure("Cannot infer type without an expression", Some(this)))
}
def typeElement: Option[ScTypeElement] = byPsiOrStub(findChild(classOf[ScTypeElement]))(_.typeElement)
def pList: ScPatternList = getStubOrPsiChild(PATTERN_LIST)
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScVariableDefinitionImpl.scala | Scala | apache-2.0 | 2,306 |
package scala.lms
package epfl
package test10
import common._
import internal.{NestedBlockTraversal}
import test1._
import test7.{Print,PrintExp,ScalaGenPrint}
import test7.{ArrayLoops,ArrayLoopsExp,ScalaGenArrayLoops}
import test8._
import util.OverloadHack
import java.io.{PrintWriter,StringWriter,FileOutputStream}
import scala.reflect.SourceContext
// investigate modified mutation tracking similar to SSA form.
//
// val x = vzeros(100)
// val as = vliteral(List(x))
// vupdate(x,5,7)
// val y = as(0)
// println(y)
//
// becomes:
//
// val x0 = vzeros(100)
// val as0 = vliteral(List(x))
// val m0 = vupdate(x0,5,7)
// val x1 = x0 // mutate m0
// val as1 = as0 // mutate m0
//
// current questions:
// what do we gain?
// improved DCE: no Mutate node -> vupdate stm can go, too
// can we do without .mutable annotations?
// probably not. would need to be pessimistic about everything, i.e. no CSE at all
// can we do cse for mutable data?
// one idea was to introduce copy on write, i.e. undo cse when mutation is detected.
// this seems hard: original data may be used as part of complex structures that
// would need to be recreated and the previous version is not guaranteed to be DCEd.
// another idea was to recognize common subexpressions but insert copy nodes instead
// of eliminating them (similar to mzeros(100) = zeros(100).mutable)
//
// implementation (DONE):
// ensure anti-deps: mutated symbols must be dead after the VectorUpdate stm
// add fattening: fuse VectorUpdate and those Mutate nodes that aren't DCEd
// is this really necessary? it seems like
trait Lib extends VectorOps
trait LibExp extends Lib with VectorExp with BaseFatExp with EffectExp {
case class Mutate[T](a: Rep[T], b: Rep[Any]) extends Def[T]
case class Copy[T](a: Rep[T]) extends Def[T]
case class ReflectSoft[T](a: Def[T], es: List[Exp[Any]]) extends Def[T]
case class Multi(as: List[Def[Any]]) extends FatDef
override def mirror[A:Typ](e: Def[A], f: Transformer)(implicit pos: SourceContext): Exp[A] = (e match {
case Mutate(a,b) => toAtom(Mutate(f(a),f(b)))
case Copy(a) => toAtom(Copy(f(a)))
case _ => super.mirror(e,f)
}).asInstanceOf[Exp[A]] // why??
def writeSyms(d: Any): List[Sym[Any]] = d match {
case VectorUpdate(a,x,y) => syms(a)
case _ => List()
}
override def syms(d: Any): List[Sym[Any]] = d match {
case d@ReflectSoft(a,es) => syms(a)
case _ => super.syms(d)
}
override def symsFreq(d: Any) = d match {
case d@ReflectSoft(a,es) => symsFreq(a)
case _ => super.symsFreq(d)
}
override def softSyms(d: Any): List[Sym[Any]] = d match {
case d@ReflectSoft(a,es) => syms(es)
case _ => super.softSyms(d)
}
var subst: Map[Sym[_],Exp[_]] = Map() // need to hook this up with reifyEffects? does it ever need to be reset?
// speculative nature: we must be able to back off if speculation fails.
// ergo, need to save original Defs (or have a way to reconstruct them).
override implicit def toAtom[A:Typ](d: Def[A])(implicit pos: SourceContext): Exp[A] = {
val in = syms(d)
val actual = in map (s => subst.getOrElse(s,s))
if (in != actual) {
val t = new SubstTransformer
t.subst ++= subst
mirror(d,t)
} else {
val kill = writeSyms(d)
if (kill.nonEmpty) {
val transitive = globalDefs collect { case e@TP(s,rhs) if (s::allAliases(rhs) intersect kill).nonEmpty => e }
println("killing: " + kill + "/" + transitive.map(_.sym) + " by " + d)
val sym = fresh[A]
for (TP(s,rhs) <- transitive) {
subst += (s -> toAtom(Mutate(s, sym))(s.tp,mpos(s.pos)))
}
// add soft dependencies on transitive to ensure ordering!
// at first sight it should be enough that for each s in transitive
// there is a Mutate node with a dependency.
// however the Mutate node might be DCE'd, but not the original
// mutable sym.
/* scenario:
val v = vrand(100)
val w = v
println(w)
v(5) = 100 <--- must still come after w
val v1 = v // mutated
[ val w1 = w // mutated ] <--- dce
println(v)
*/
createDefinition(sym, ReflectSoft(d, transitive.flatMap(_.lhs)))
sym
} else {
// right now we add copy statements whenever we'd do CSE.
// TODO: re-introduce a split between mutable and immutable data.
val o = findDefinition(d)
val sym = fresh[A]
if (o.nonEmpty) { //d available as s1
val s1 = o.get.lhs.head
println("cse: " + sym + " -> " + s1)
//subst += (sym -> s1)
createDefinition(sym, Copy(s1)) // no CS
} else {
createDefinition(sym, d) // no CSE!
}
// if (d available as s1) subst += (sym -> s1)
sym
}
}
}
override def findDefinition[A](d: Sym[A]): Option[Stm] = {
super.findDefinition(d)
}
}
class TestEffects extends FileDiffSuite {
val prefix = home + "test-out/epfl/test10-"
trait DSL extends Lib with ArrayMutation with OrderingOps with PrimitiveOps with LiftPrimitives with LiftVariables with IfThenElse with While with RangeOps with Print {
def infix_toDouble(x: Rep[Int]): Rep[Double] = x.asInstanceOf[Rep[Double]]
def test(x: Rep[Int]): Rep[Unit]
}
trait Impl extends DSL with ArrayMutationExp with PrimitiveOpsExp with OrderingOpsExpOpt
with EqualExpOpt //with VariablesExpOpt
with IfThenElseExpOpt with WhileExpOptSpeculative with RangeOpsExp with PrintExp
with StringOpsExp with SeqOpsExp
with Lib with LibExp { self =>
override val verbosity = 2
val codegen = new ScalaGenFat with ScalaGenArrayMutation with ScalaGenPrimitiveOps with ScalaGenOrderingOps
with ScalaGenVariables with ScalaGenIfThenElse with ScalaGenWhileOptSpeculative with ScalaGenRangeOps
with ScalaGenPrint /*with LivenessOpt*/ { val IR: self.type = self
override def fattenAll(e: List[Stm]): List[Stm] = {
println("**fatten "+e)
// group all Mutate helper nodes together with the mutation
// TBD: is this necessary (if not, desirable) ?
val m = e collect { case e@TP(s, Mutate(a,b)) => e }
val mg = m groupBy { case e@TP(s, Mutate(a,b)) => b }
val e2 = e map {
case e@TP(s, rhs) if mg.contains(s) =>
val vs = mg(s)
val llhs = vs map (_.sym)
val rrhs = vs map (_.rhs)
TTP(s::llhs, rhs::rrhs, Multi(rhs::rrhs))
case e => e
}
val e3 = e2 diff m
super.fattenAll(getSchedule(e3)(e3.flatMap(_.lhs)))
}
override def emitFatNode(symList: List[Sym[Any]], rhs: FatDef) = rhs match {
case Multi(as) =>
stream.println("// begin multi")
(symList zip as) foreach { case (s,e) => emitNode(s,e) }
stream.println("// end multi")
case _ => super.emitFatNode(symList, rhs)
}
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
case _ if rhs.toString.startsWith("Vector") =>
emitValDef(sym, rhs.toString)
case ReflectSoft(x,es) =>
stream.println("// soft deps: "+es.map(quote).mkString(","))
emitNode(sym,x)
case Mutate(a,b) =>
emitValDef(sym, quote(a) + " // mutated by "+ quote(b))
case Copy(a) =>
emitValDef(sym, quote(a) + ".clone")
case _ =>
super.emitNode(sym,rhs)
}
}
codegen.emitSource(test, "Test", new PrintWriter(System.out))
}
def testEffects1 = withOutFileChecked(prefix+"effects1") { // test ordering
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val x = vzeros(100)
val as = vliteral(List(x))
vupdate(x,5,7.0) // later reads of as must come after this
val y = vapply(as,0)
print(y)
}
}
new Prog with Impl
}
def testEffects2 = withOutFileChecked(prefix+"effects2") { // test cse
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val x = vzeros(100)
val y = vzeros(100) // this will do speculative cse (or call copy)
val as = vliteral(List(x))
val bs = vliteral(List(y))
vupdate(x,5,7.0) // must undo cse: now x and y are different. also, as and bs are different now
val u = vapply(as,0)
val v = vapply(bs,0)
print(u)
print(v)
}
}
new Prog with Impl
}
def testEffects3 = withOutFileChecked(prefix+"effects3") { // test cse
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val x = vzeros(100)
val y = vzeros(100) // this will do speculative cse (or call copy)
val e = vliteral(List(y)) // assume that this operation is expensive (don't want to do it twice)
print(e)
vupdate(x,5,7.0) // must undo cse: now x and y are different. also, as and bs are different now
print(e)
}
}
new Prog with Impl
}
def testEffects4 = withOutFileChecked(prefix+"effects4") { // test mutable dce
trait Prog extends DSL {
def test(x: Rep[Int]) = {
val x = vzeros(100)
val y = x
val as = vliteral(List(x))
val bs = vliteral(List(y)) // this one should be dce'd because it is never used
vupdate(x,5,7.0) // this will invalidate bs (anti-dep) but should not give rise to a hard dependency
val u = vapply(as,0)
val v = vapply(bs,0)
print(u)
}
}
new Prog with Impl
}
}
| astojanov/virtualization-lms-core | test-src/epfl/test10-transform/TestEffect.scala | Scala | bsd-3-clause | 9,891 |
package dotty.tools
package dotc
package reporting
import core._
import Contexts._
import Decorators._, Symbols._, Names._, NameOps._, Types._, Flags._, Phases._
import Denotations.SingleDenotation
import SymDenotations.SymDenotation
import NameKinds.WildcardParamName
import parsing.Scanners.Token
import parsing.Tokens
import printing.Highlighting._
import printing.Formatting
import ErrorMessageID._
import ast.Trees
import config.{Feature, ScalaVersion}
import typer.ErrorReporting.{err, matchReductionAddendum}
import typer.ProtoTypes.ViewProto
import typer.Implicits.Candidate
import scala.util.control.NonFatal
import StdNames.nme
import printing.Formatting.hl
import ast.Trees._
import ast.untpd
import ast.tpd
import transform.SymUtils._
/** Messages
* ========
* The role of messages is to provide the necessary details for a simple to
* understand diagnostic event. Each message can be turned into a message
* container (one of the above) by calling the appropriate method on them.
* For instance:
*
* ```scala
* EmptyCatchBlock(tree).error(pos) // res: Error
* EmptyCatchBlock(tree).warning(pos) // res: Warning
* ```
*/
abstract class SyntaxMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Syntax"
abstract class TypeMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Type"
trait ShowMatchTrace(tps: Type*)(using Context) extends Message:
override def msgSuffix: String = matchReductionAddendum(tps*)
abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context)
extends Message(errorId), ShowMatchTrace(found, expected):
def kind = "Type Mismatch"
def explain = err.whyNoMatchStr(found, expected)
override def canExplain = true
abstract class NamingMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Naming"
abstract class DeclarationMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Declaration"
/** A simple not found message (either for idents, or member selection.
* Messages of this class are sometimes dropped in favor of other, more
* specific messages.
*/
abstract class NotFoundMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Not Found"
def name: Name
abstract class PatternMatchMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Pattern Match"
abstract class CyclicMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Cyclic"
abstract class ReferenceMsg(errorId: ErrorMessageID) extends Message(errorId):
def kind = "Reference"
abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context)
extends SyntaxMsg(EmptyCatchOrFinallyBlockID) {
def explain = {
val tryString = tryBody match {
case Block(Nil, untpd.EmptyTree) => "{}"
case _ => tryBody.show
}
val code1 =
s"""|import scala.util.control.NonFatal
|
|try $tryString catch {
| case NonFatal(e) => ???
|}""".stripMargin
val code2 =
s"""|try $tryString finally {
| // perform your cleanup here!
|}""".stripMargin
em"""|A ${hl("try")} expression should be followed by some mechanism to handle any exceptions
|thrown. Typically a ${hl("catch")} expression follows the ${hl("try")} and pattern matches
|on any expected exceptions. For example:
|
|$code1
|
|It is also possible to follow a ${hl("try")} immediately by a ${hl("finally")} - letting the
|exception propagate - but still allowing for some clean up in ${hl("finally")}:
|
|$code2
|
|It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it
|correctly handles transfer functions like ${hl("return")}."""
}
}
class EmptyCatchBlock(tryBody: untpd.Tree)(using Context)
extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) {
def msg =
em"""|The ${hl("catch")} block does not contain a valid expression, try
|adding a case like - ${hl("case e: Exception =>")} to the block"""
}
class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(using Context)
extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) {
def msg =
em"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting
|its body in a block; no exceptions are handled."""
}
class DeprecatedWithOperator()(using Context)
extends SyntaxMsg(DeprecatedWithOperatorID) {
def msg =
em"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead"""
def explain =
em"""|Dotty introduces intersection types - ${hl("&")} types. These replace the
|use of the ${hl("with")} keyword. There are a few differences in
|semantics between intersection types and using ${hl("with")}."""
}
class CaseClassMissingParamList(cdef: untpd.TypeDef)(using Context)
extends SyntaxMsg(CaseClassMissingParamListID) {
def msg =
em"""|A ${hl("case class")} must have at least one parameter list"""
def explain =
em"""|${cdef.name} must have at least one parameter list, if you would rather
|have a singleton representation of ${cdef.name}, use a "${hl("case object")}".
|Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}."""
}
class AnonymousFunctionMissingParamType(param: untpd.ValDef,
tree: untpd.Function,
pt: Type)
(using Context)
extends TypeMsg(AnonymousFunctionMissingParamTypeID) {
def msg = {
val ofFun =
if param.name.is(WildcardParamName)
|| (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name)
then i" of expanded function:\n$tree"
else ""
val inferred =
if (pt == WildcardType) ""
else i"\nWhat I could infer was: $pt"
i"""Missing parameter type
|
|I could not infer the type of the parameter ${param.name}$ofFun.$inferred"""
}
def explain = ""
}
class WildcardOnTypeArgumentNotAllowedOnNew()(using Context)
extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) {
def msg = "Type argument must be fully defined"
def explain =
val code1: String =
"""
|object TyperDemo {
| class Team[A]
| val team = new Team[?]
|}
""".stripMargin
val code2: String =
"""
|object TyperDemo {
| class Team[A]
| val team = new Team[Int]
|}
""".stripMargin
em"""|Wildcard on arguments is not allowed when declaring a new type.
|
|Given the following example:
|
|$code1
|
|You must complete all the type parameters, for instance:
|
|$code2 """
}
// Type Errors ------------------------------------------------------------ //
class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(using Context)
extends NamingMsg(DuplicateBindID) {
def msg = em"duplicate pattern variable: ${bind.name}"
def explain = {
val pat = tree.pat.show
val guard = tree.guard match {
case untpd.EmptyTree => ""
case guard => s"if ${guard.show}"
}
val body = tree.body match {
case Block(Nil, untpd.EmptyTree) => ""
case body => s" ${body.show}"
}
val caseDef = s"case $pat$guard => $body"
em"""|For each ${hl("case")} bound variable names have to be unique. In:
|
|$caseDef
|
|${bind.name} is not unique. Rename one of the bound variables!"""
}
}
class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context)
extends NotFoundMsg(MissingIdentID) {
def msg = em"Not found: $treeKind$name"
def explain = {
em"""|The identifier for `$treeKind$name` is not bound, that is,
|no declaration for this identifier can be found.
|That can happen, for example, if `$name` or its declaration has either been
|misspelt or if an import is missing."""
}
}
class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context)
extends TypeMismatchMsg(found, expected)(TypeMismatchID):
// replace constrained TypeParamRefs and their typevars by their bounds where possible
// the idea is that if the bounds are also not-subtypes of each other to report
// the type mismatch on the bounds instead of the original TypeParamRefs, since
// these are usually easier to analyze.
object reported extends TypeMap:
def setVariance(v: Int) = variance = v
val constraint = mapCtx.typerState.constraint
def apply(tp: Type): Type = tp match
case tp: TypeParamRef =>
constraint.entry(tp) match
case bounds: TypeBounds =>
if variance < 0 then apply(TypeComparer.fullUpperBound(tp))
else if variance > 0 then apply(TypeComparer.fullLowerBound(tp))
else tp
case NoType => tp
case instType => apply(instType)
case tp: TypeVar => apply(tp.stripTypeVar)
case _ => mapOver(tp)
def msg =
val found1 = reported(found)
reported.setVariance(-1)
val expected1 = reported(expected)
val (found2, expected2) =
if (found1 frozen_<:< expected1) (found, expected) else (found1, expected1)
val postScript = addenda.find(!_.isEmpty) match
case Some(p) => p
case None =>
if expected.isTopType || found.isBottomType
then ""
else ctx.typer.importSuggestionAddendum(ViewProto(found.widen, expected))
val (where, printCtx) = Formatting.disambiguateTypes(found2, expected2)
val whereSuffix = if (where.isEmpty) where else s"\n\n$where"
val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2)(using printCtx)
s"""|Found: $foundStr
|Required: $expectedStr""".stripMargin
+ whereSuffix + postScript
override def explain =
val treeStr = inTree.map(x => s"\nTree: ${x.show}").getOrElse("")
treeStr + "\n" + super.explain
end TypeMismatch
class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context)
extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) {
//println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG
def msg = {
import core.Flags._
val maxDist = 3 // maximal number of differences to be considered for a hint
val missing = name.show
// The symbols of all non-synthetic, non-private members of `site`
// that are of the same type/term kind as the missing member.
def candidates: Set[Symbol] =
for
bc <- site.widen.baseClasses.toSet
sym <- bc.info.decls.filter(sym =>
sym.isType == name.isTypeName
&& !sym.isConstructor
&& !sym.flagsUNSAFE.isOneOf(Synthetic | Private))
yield sym
// Calculate Levenshtein distance
def distance(s1: String, s2: String): Int =
val dist = Array.ofDim[Int](s2.length + 1, s1.length + 1)
for
j <- 0 to s2.length
i <- 0 to s1.length
do
dist(j)(i) =
if j == 0 then i
else if i == 0 then j
else if s2(j - 1) == s1(i - 1) then dist(j - 1)(i - 1)
else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1
dist(s2.length)(s1.length)
// A list of possible candidate symbols with their Levenstein distances
// to the name of the missing member
def closest: List[(Int, Symbol)] = candidates
.toList
.map(sym => (distance(sym.name.show, missing), sym))
.filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length)
.sortBy((d, sym) => (d, sym.name.show)) // sort by distance first, alphabetically second
val enumClause =
if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then
val kind = if name eq nme.values then i"${nme.values} array" else i"${nme.valueOf} lookup method"
// an assumption is made here that the values and valueOf methods were not generated
// because the enum defines non-singleton cases
i"""
|Although ${site.classSymbol.companionClass} is an enum, it has non-singleton cases,
|meaning a $kind is not defined"""
else
""
def prefixEnumClause(addendum: String) =
if enumClause.nonEmpty then s".$enumClause$addendum" else addendum
val finalAddendum =
if addendum.nonEmpty then prefixEnumClause(addendum)
else closest match
case (d, sym) :: _ =>
val siteName = site match
case site: NamedType => site.name.show
case site => i"$site"
val showName =
// Add .type to the name if it is a module
if sym.is(ModuleClass) then s"${sym.name.show}.type"
else sym.name.show
s" - did you mean $siteName.$showName?$enumClause"
case Nil => prefixEnumClause("")
ex"$selected $name is not a member of ${site.widen}$finalAddendum"
}
def explain = ""
}
class EarlyDefinitionsNotSupported()(using Context)
extends SyntaxMsg(EarlyDefinitionsNotSupportedID) {
def msg = "Early definitions are not supported; use trait parameters instead"
def explain = {
val code1 =
"""|trait Logging {
| val f: File
| f.open()
| onExit(f.close())
| def log(msg: String) = f.write(msg)
|}
|
|class B extends Logging {
| val f = new File("log.data") // triggers a NullPointerException
|}
|
|// early definition gets around the NullPointerException
|class C extends {
| val f = new File("log.data")
|} with Logging""".stripMargin
val code2 =
"""|trait Logging(f: File) {
| f.open()
| onExit(f.close())
| def log(msg: String) = f.write(msg)
|}
|
|class C extends Logging(new File("log.data"))""".stripMargin
em"""|Earlier versions of Scala did not support trait parameters and "early
|definitions" (also known as "early initializers") were used as an alternative.
|
|Example of old syntax:
|
|$code1
|
|The above code can now be written as:
|
|$code2
|"""
}
}
class TopLevelImplicitClass(cdef: untpd.TypeDef)(using Context)
extends SyntaxMsg(TopLevelImplicitClassID) {
def msg = em"""An ${hl("implicit class")} may not be top-level"""
def explain = {
val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef
val exampleArgs =
if(constr0.termParamss.isEmpty) "..."
else constr0.termParamss(0).map(_.withMods(untpd.Modifiers()).show).mkString(", ")
def defHasBody[T] = impl.body.exists(!_.isEmpty)
val exampleBody = if (defHasBody) "{\n ...\n }" else ""
em"""|There may not be any method, member or object in scope with the same name as
|the implicit class and a case class automatically gets a companion object with
|the same name created by the compiler which would cause a naming conflict if it
|were allowed.
| |
|To resolve the conflict declare ${cdef.name} inside of an ${hl("object")} then import the class
|from the object at the use site if needed, for example:
|
|object Implicits {
| implicit class ${cdef.name}($exampleArgs)$exampleBody
|}
|
|// At the use site:
|import Implicits.${cdef.name}"""
}
}
class ImplicitCaseClass(cdef: untpd.TypeDef)(using Context)
extends SyntaxMsg(ImplicitCaseClassID) {
def msg = em"""A ${hl("case class")} may not be defined as ${hl("implicit")}"""
def explain =
em"""|Implicit classes may not be case classes. Instead use a plain class:
|
|implicit class ${cdef.name}...
|
|"""
}
class ImplicitClassPrimaryConstructorArity()(using Context)
extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){
def msg = "Implicit classes must accept exactly one primary constructor parameter"
def explain = {
val example = "implicit class RichDate(date: java.util.Date)"
em"""Implicit classes may only take one non-implicit argument in their constructor. For example:
|
| $example
|
|While it’s possible to create an implicit class with more than one non-implicit argument,
|such classes aren’t used during implicit lookup.
|"""
}
}
class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(using Context)
extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) {
def msg = em"""${hl("object")}s must not have a self ${hl("type")}"""
def explain = {
val untpd.ModuleDef(name, tmpl) = mdef
val ValDef(_, selfTpt, _) = tmpl.self
em"""|${hl("object")}s must not have a self ${hl("type")}:
|
|Consider these alternative solutions:
| - Create a trait or a class instead of an object
| - Let the object extend a trait containing the self type:
|
| object $name extends ${selfTpt.show}"""
}
}
class RepeatedModifier(modifier: String)(implicit ctx:Context)
extends SyntaxMsg(RepeatedModifierID) {
def msg = em"""Repeated modifier $modifier"""
def explain = {
val code1 = em"""private private val Origin = Point(0, 0)"""
val code2 = em"""private final val Origin = Point(0, 0)"""
em"""This happens when you accidentally specify the same modifier twice.
|
|Example:
|
|$code1
|
|instead of
|
|$code2
|
|"""
}
}
class InterpolatedStringError()(implicit ctx:Context)
extends SyntaxMsg(InterpolatedStringErrorID) {
def msg = "Error in interpolated string: identifier or block expected"
def explain = {
val code1 = "s\"$new Point(0, 0)\""
val code2 = "s\"${new Point(0, 0)}\""
em"""|This usually happens when you forget to place your expressions inside curly braces.
|
|$code1
|
|should be written as
|
|$code2
|"""
}
}
class UnboundPlaceholderParameter()(implicit ctx:Context)
extends SyntaxMsg(UnboundPlaceholderParameterID) {
def msg = em"""Unbound placeholder parameter; incorrect use of ${hl("_")}"""
def explain =
em"""|The ${hl("_")} placeholder syntax was used where it could not be bound.
|Consider explicitly writing the variable binding.
|
|This can be done by replacing ${hl("_")} with a variable (eg. ${hl("x")})
|and adding ${hl("x =>")} where applicable.
|
|Example before:
|
|${hl("{ _ }")}
|
|Example after:
|
|${hl("x => { x }")}
|
|Another common occurrence for this error is defining a val with ${hl("_")}:
|
|${hl("val a = _")}
|
|But this val definition isn't very useful, it can never be assigned
|another value. And thus will always remain uninitialized.
|Consider replacing the ${hl("val")} with ${hl("var")}:
|
|${hl("var a = _")}
|
|Note that this use of ${hl("_")} is not placeholder syntax,
|but an uninitialized var definition.
|Only fields can be left uninitialized in this manner; local variables
|must be initialized.
|
|Another occurrence for this error is self type definition.
|The ${hl("_")} can be replaced with ${hl("this")}.
|
|Example before:
|
|${hl("trait A { _: B => ... ")}
|
|Example after:
|
|${hl("trait A { this: B => ... ")}
|"""
}
class IllegalStartSimpleExpr(illegalToken: String)(using Context)
extends SyntaxMsg(IllegalStartSimpleExprID) {
def msg = em"expression expected but ${Red(illegalToken)} found"
def explain = {
em"""|An expression cannot start with ${Red(illegalToken)}."""
}
}
class MissingReturnType()(implicit ctx:Context)
extends SyntaxMsg(MissingReturnTypeID) {
def msg = "Missing return type"
def explain =
em"""|An abstract declaration must have a return type. For example:
|
|trait Shape {hl(
| def area: Double // abstract declaration returning a ${"Double"}
|)}"""
}
class MissingReturnTypeWithReturnStatement(method: Symbol)(using Context)
extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) {
def msg = em"$method has a return statement; it needs a result type"
def explain =
em"""|If a method contains a ${hl("return")} statement, it must have an
|explicit return type. For example:
|
|${hl("def good: Int /* explicit return type */ = return 1")}"""
}
class YieldOrDoExpectedInForComprehension()(using Context)
extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) {
def msg = em"${hl("yield")} or ${hl("do")} expected"
def explain =
em"""|When the enumerators in a for comprehension are not placed in parentheses or
|braces, a ${hl("do")} or ${hl("yield")} statement is required after the enumerators
|section of the comprehension.
|
|You can save some keystrokes by omitting the parentheses and writing
|
|${hl("val numbers = for i <- 1 to 3 yield i")}
|
| instead of
|
|${hl("val numbers = for (i <- 1 to 3) yield i")}
|
|but the ${hl("yield")} keyword is still required.
|
|For comprehensions that simply perform a side effect without yielding anything
|can also be written without parentheses but a ${hl("do")} keyword has to be
|included. For example,
|
|${hl("for (i <- 1 to 3) println(i)")}
|
|can be written as
|
|${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")}
|
|"""
}
class ProperDefinitionNotFound()(using Context)
extends Message(ProperDefinitionNotFoundID) {
def kind: String = "Doc Comment"
def msg = em"""Proper definition was not found in ${hl("@usecase")}"""
def explain = {
val noUsecase =
"def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That"
val usecase =
"""|/** Map from List[A] => List[B]
| *
| * @usecase def map[B](f: A => B): List[B]
| */
|def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That
|""".stripMargin
em"""|Usecases are only supported for ${hl("def")}s. They exist because with Scala's
|advanced type-system, we sometimes end up with seemingly scary signatures.
|The usage of these methods, however, needs not be - for instance the ${hl("map")}
|function
|
|${hl("List(1, 2, 3).map(2 * _) // res: List(2, 4, 6)")}
|
|is easy to understand and use - but has a rather bulky signature:
|
|$noUsecase
|
|to mitigate this and ease the usage of such functions we have the ${hl("@usecase")}
|annotation for docstrings. Which can be used like this:
|
|$usecase
|
|When creating the docs, the signature of the method is substituted by the
|usecase and the compiler makes sure that it is valid. Because of this, you're
|only allowed to use ${hl("def")}s when defining usecases."""
}
}
class ByNameParameterNotSupported(tpe: untpd.TypTree)(using Context)
extends SyntaxMsg(ByNameParameterNotSupportedID) {
def msg = em"By-name parameter type ${tpe} not allowed here."
def explain =
em"""|By-name parameters act like functions that are only evaluated when referenced,
|allowing for lazy evaluation of a parameter.
|
|An example of using a by-name parameter would look like:
|${hl("def func(f: => Boolean) = f // 'f' is evaluated when referenced within the function")}
|
|An example of the syntax of passing an actual function as a parameter:
|${hl("def func(f: (Boolean => Boolean)) = f(true)")}
|
|or:
|
|${hl("def func(f: Boolean => Boolean) = f(true)")}
|
|And the usage could be as such:
|${hl("func(bool => // do something...)")}
|"""
}
class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(using Context)
extends SyntaxMsg(WrongNumberOfTypeArgsID) {
private val expectedCount = expectedArgs.length
private val actualCount = actual.length
private val msgPrefix = if (actualCount > expectedCount) "Too many" else "Not enough"
def msg =
val expectedArgString = expectedArgs
.map(_.paramName.unexpandedName.show)
.mkString("[", ", ", "]")
val actualArgString = actual.map(_.show).mkString("[", ", ", "]")
val prettyName =
try fntpe.termSymbol match
case NoSymbol => fntpe.show
case symbol => symbol.showFullName
catch case NonFatal(ex) => fntpe.show
em"""|$msgPrefix type arguments for $prettyName$expectedArgString
|expected: $expectedArgString
|actual: $actualArgString""".stripMargin
def explain = {
val tooManyTypeParams =
"""|val tuple2: (Int, String) = (1, "one")
|val list: List[(Int, String)] = List(tuple2)""".stripMargin
if (actualCount > expectedCount)
em"""|You have supplied too many type parameters
|
|For example List takes a single type parameter (List[A])
|If you need to hold more types in a list then you need to combine them
|into another data type that can contain the number of types you need,
|In this example one solution would be to use a Tuple:
|
|${tooManyTypeParams}"""
else
em"""|You have not supplied enough type parameters
|If you specify one type parameter then you need to specify every type parameter."""
}
}
class IllegalVariableInPatternAlternative()(using Context)
extends SyntaxMsg(IllegalVariableInPatternAlternativeID) {
def msg = "Variables are not allowed in alternative patterns"
def explain = {
val varInAlternative =
"""|def g(pair: (Int,Int)): Int = pair match {
| case (1, n) | (n, 1) => n
| case _ => 0
|}""".stripMargin
val fixedVarInAlternative =
"""|def g(pair: (Int,Int)): Int = pair match {
| case (1, n) => n
| case (n, 1) => n
| case _ => 0
|}""".stripMargin
em"""|Variables are not allowed within alternate pattern matches. You can workaround
|this issue by adding additional cases for each alternative. For example, the
|illegal function:
|
|$varInAlternative
|could be implemented by moving each alternative into a separate case:
|
|$fixedVarInAlternative"""
}
}
class IdentifierExpected(identifier: String)(using Context)
extends SyntaxMsg(IdentifierExpectedID) {
def msg = "identifier expected"
def explain = {
val wrongIdentifier = em"def foo: $identifier = {...}"
val validIdentifier = em"def foo = {...}"
em"""|An identifier expected, but $identifier found. This could be because
|$identifier is not a valid identifier. As a workaround, the compiler could
|infer the type for you. For example, instead of:
|
|$wrongIdentifier
|
|Write your code like:
|
|$validIdentifier
|
|"""
}
}
class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context)
extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) {
def msg = "Auxiliary constructor needs non-implicit parameter list"
def explain =
em"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list;
|auxiliary constructors need non-implicit parameter lists. When a primary
|constructor has an implicit argslist, auxiliary constructors that call the
|primary constructor must specify the implicit value.
|
|To resolve this issue check for:
| - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")})
| - Auxiliary constructors specify the implicit value
|"""
}
class IllegalLiteral()(using Context)
extends SyntaxMsg(IllegalLiteralID) {
def msg = "Illegal literal"
def explain =
em"""|Available literals can be divided into several groups:
| - Integer literals: 0, 21, 0xFFFFFFFF, -42L
| - Floating Point Literals: 0.0, 1e30f, 3.14159f, 1.0e-100, .1
| - Boolean Literals: true, false
| - Character Literals: 'a', '\u0041', '\n'
| - String Literals: "Hello, World!"
| - null
|"""
}
class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context)
extends Message(LossyWideningConstantConversionID):
def kind = "Lossy Conversion"
def msg = em"""|Widening conversion from $sourceType to $targetType loses precision.
|Write `.to$targetType` instead.""".stripMargin
def explain = ""
class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context)
extends Message(PatternMatchExhaustivityID) {
def kind = "Pattern Match Exhaustivity"
lazy val uncovered = uncoveredFn
def msg =
val addendum = if hasMore then "(More unmatched cases are elided)" else ""
em"""|${hl("match")} may not be exhaustive.
|
|It would fail on pattern case: $uncovered
|$addendum"""
def explain =
em"""|There are several ways to make the match exhaustive:
| - Add missing cases as shown in the warning
| - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type
| - Add a ${hl("case _ => ...")} at the end to match all remaining cases
|"""
}
class UncheckedTypePattern(msgFn: => String)(using Context)
extends PatternMatchMsg(UncheckedTypePatternID) {
def msg = msgFn
def explain =
em"""|Type arguments and type refinements are erased during compile time, thus it's
|impossible to check them at run-time.
|
|You can either replace the type arguments by ${hl("_")} or use `@unchecked`.
|"""
}
class MatchCaseUnreachable()(using Context)
extends Message(MatchCaseUnreachableID) {
def kind = "Match case Unreachable"
def msg = "Unreachable case"
def explain = ""
}
class MatchCaseOnlyNullWarning()(using Context)
extends PatternMatchMsg(MatchCaseOnlyNullWarningID) {
def msg = em"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead)."""
def explain = ""
}
class MatchableWarning(tp: Type, pattern: Boolean)(using Context)
extends TypeMsg(MatchableWarningID) {
def msg =
val kind = if pattern then "pattern selector" else "value"
em"""${kind} should be an instance of Matchable,,
|but it has unmatchable type $tp instead"""
def explain =
if pattern then
em"""A value of type $tp cannot be the selector of a match expression
|since it is not constrained to be `Matchable`. Matching on unconstrained
|values is disallowed since it can uncover implementation details that
|were intended to be hidden and thereby can violate paramtetricity laws
|for reasoning about programs.
|
|The restriction can be overridden by appending `.asMatchable` to
|the selector value. `asMatchable` needs to be imported from
|scala.compiletime. Example:
|
| import compiletime.asMatchable
| def f[X](x: X) = x.asMatchable match { ... }"""
else
em"""The value can be converted to a `Matchable` by appending `.asMatchable`.
|`asMatchable` needs to be imported from scala.compiletime."""
}
class SeqWildcardPatternPos()(using Context)
extends SyntaxMsg(SeqWildcardPatternPosID) {
def msg = em"""${hl("*")} can be used only for last argument"""
def explain = {
val code =
"""def sumOfTheFirstTwo(list: List[Int]): Int = list match {
| case List(first, second, x*) => first + second
| case _ => 0
|}"""
em"""|Sequence wildcard pattern is expected at the end of an argument list.
|This pattern matches any remaining elements in a sequence.
|Consider the following example:
|
|$code
|
|Calling:
|
|${hl("sumOfTheFirstTwo(List(1, 2, 10))")}
|
|would give 3 as a result"""
}
}
class IllegalStartOfSimplePattern()(using Context)
extends SyntaxMsg(IllegalStartOfSimplePatternID) {
def msg = "pattern expected"
def explain = {
val sipCode =
"""def f(x: Int, y: Int) = x match {
| case `y` => ...
|}
"""
val constructorPatternsCode =
"""case class Person(name: String, age: Int)
|
|def test(p: Person) = p match {
| case Person(name, age) => ...
|}
"""
val tupplePatternsCode =
"""def swap(tuple: (String, Int)): (Int, String) = tuple match {
| case (text, number) => (number, text)
|}
"""
val patternSequencesCode =
"""def getSecondValue(list: List[Int]): Int = list match {
| case List(_, second, x:_*) => second
| case _ => 0
|}"""
em"""|Simple patterns can be divided into several groups:
|- Variable Patterns: ${hl("case x => ...")}.
| It matches any value, and binds the variable name to that value.
| A special case is the wild-card pattern _ which is treated as if it was a fresh
| variable on each occurrence.
|
|- Typed Patterns: ${hl("case x: Int => ...")} or ${hl("case _: Int => ...")}.
| This pattern matches any value matched by the specified type; it binds the variable
| name to that value.
|
|- Literal Patterns: ${hl("case 123 => ...")} or ${hl("case 'A' => ...")}.
| This type of pattern matches any value that is equal to the specified literal.
|
|- Stable Identifier Patterns:
|
| $sipCode
|
| the match succeeds only if the x argument and the y argument of f are equal.
|
|- Constructor Patterns:
|
| $constructorPatternsCode
|
| The pattern binds all object's fields to the variable names (name and age, in this
| case).
|
|- Tuple Patterns:
|
| $tupplePatternsCode
|
| Calling:
|
| ${hl("""swap(("Luftballons", 99)""")}
|
| would give ${hl("""(99, "Luftballons")""")} as a result.
|
|- Pattern Sequences:
|
| $patternSequencesCode
|
| Calling:
|
| ${hl("getSecondValue(List(1, 10, 2))")}
|
| would give 10 as a result.
| This pattern is possible because a companion object for the List class has a method
| with the following signature:
|
| ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")}
|"""
}
}
class PkgDuplicateSymbol(existing: Symbol)(using Context)
extends NamingMsg(PkgDuplicateSymbolID) {
def msg = em"Trying to define package with same name as $existing"
def explain = ""
}
class ExistentialTypesNoLongerSupported()(using Context)
extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) {
def msg =
em"""|Existential types are no longer supported -
|use a wildcard or dependent type instead"""
def explain =
em"""|The use of existential types is no longer supported.
|
|You should use a wildcard or dependent type instead.
|
|For example:
|
|Instead of using ${hl("forSome")} to specify a type variable
|
|${hl("List[T forSome { type T }]")}
|
|Try using a wildcard type variable
|
|${hl("List[?]")}
|"""
}
class UnboundWildcardType()(using Context)
extends SyntaxMsg(UnboundWildcardTypeID) {
def msg = "Unbound wildcard type"
def explain =
em"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound.
|Replace ${hl("_")} with a non-wildcard type. If the type doesn't matter,
|try replacing ${hl("_")} with ${hl("Any")}.
|
|Examples:
|
|- Parameter lists
|
| Instead of:
| ${hl("def foo(x: _) = ...")}
|
| Use ${hl("Any")} if the type doesn't matter:
| ${hl("def foo(x: Any) = ...")}
|
|- Type arguments
|
| Instead of:
| ${hl("val foo = List[?](1, 2)")}
|
| Use:
| ${hl("val foo = List[Int](1, 2)")}
|
|- Type bounds
|
| Instead of:
| ${hl("def foo[T <: _](x: T) = ...")}
|
| Remove the bounds if the type doesn't matter:
| ${hl("def foo[T](x: T) = ...")}
|
|- ${hl("val")} and ${hl("def")} types
|
| Instead of:
| ${hl("val foo: _ = 3")}
|
| Use:
| ${hl("val foo: Int = 3")}
|"""
}
class DanglingThisInPath()(using Context) extends SyntaxMsg(DanglingThisInPathID) {
def msg = em"""Expected an additional member selection after the keyword ${hl("this")}"""
def explain =
val contextCode: String =
""" trait Outer {
| val member: Int
| type Member
| trait Inner {
| ...
| }
| }"""
val importCode: String =
""" import Outer.this.member
| // ^^^^^^^"""
val typeCode: String =
""" type T = Outer.this.Member
| // ^^^^^^^"""
em"""|Paths of imports and type selections must not end with the keyword ${hl("this")}.
|
|Maybe you forgot to select a member of ${hl("this")}? As an example, in the
|following context:
|${contextCode}
|
|- This is a valid import expression using a path
|${importCode}
|
|- This is a valid type using a path
|${typeCode}
|"""
}
class OverridesNothing(member: Symbol)(using Context)
extends DeclarationMsg(OverridesNothingID) {
def msg = em"""${member} overrides nothing"""
def explain =
em"""|There must be a field or method with the name ${member.name} in a super
|class of ${member.owner} to override it. Did you misspell it?
|Are you extending the right classes?
|"""
}
class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(using Context)
extends DeclarationMsg(OverridesNothingButNameExistsID) {
def msg =
val what =
if !existing.exists(_.symbol.hasTargetName(member.targetName))
then "target name"
else "signature"
em"""${member} has a different $what than the overridden declaration"""
def explain =
val existingDecl: String = existing.map(_.showDcl).mkString(" \n")
em"""|There must be a non-final field or method with the name ${member.name} and the
|same parameter list in a super class of ${member.owner} to override it.
|
| ${member.showDcl}
|
|The super classes of ${member.owner} contain the following members
|named ${member.name}:
| ${existingDecl}
|"""
}
class OverrideError(override val msg: String) extends DeclarationMsg(OverrideErrorID):
def explain = ""
class OverrideTypeMismatchError(override val msg: String, memberTp: Type, otherTp: Type)(using Context)
extends DeclarationMsg(OverrideTypeMismatchErrorID):
def explain = err.whyNoMatchStr(memberTp, otherTp)
override def canExplain = true
class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(using Context)
extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) {
def msg = em"${definition.name} is a forward reference extending over the definition of ${value.name}"
def explain =
em"""|${definition.name} is used before you define it, and the definition of ${value.name}
|appears between that use and the definition of ${definition.name}.
|
|Forward references are allowed only, if there are no value definitions between
|the reference and the referred method definition.
|
|Define ${definition.name} before it is used,
|or move the definition of ${value.name} so it does not appear between
|the declaration of ${definition.name} and its use,
|or define ${value.name} as lazy.
|""".stripMargin
}
class ExpectedTokenButFound(expected: Token, found: Token)(using Context)
extends SyntaxMsg(ExpectedTokenButFoundID) {
private lazy val foundText = Tokens.showToken(found)
def msg =
val expectedText =
if (Tokens.isIdentifier(expected)) "an identifier"
else Tokens.showToken(expected)
em"""${expectedText} expected, but ${foundText} found"""
def explain =
if (Tokens.isIdentifier(expected) && Tokens.isKeyword(found))
s"""
|If you want to use $foundText as identifier, you may put it in backticks: `${Tokens.tokenString(found)}`.""".stripMargin
else
""
}
class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(using Context)
extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) {
def msg =
val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative"
val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative"
em"${op1} (${op1Asso}) and ${op2} ($op2Asso) have same precedence and may not be mixed"
def explain =
s"""|The operators ${op1} and ${op2} are used as infix operators in the same expression,
|but they bind to different sides:
|${op1} is applied to the operand to its ${if (op2LeftAssoc) "right" else "left"}
|${op2} is applied to the operand to its ${if (op2LeftAssoc) "left" else "right"}
|As both have the same precedence the compiler can't decide which to apply first.
|
|You may use parenthesis to make the application order explicit,
|or use method application syntax operand1.${op1}(operand2).
|
|Operators ending in a colon ${hl(":")} are right-associative. All other operators are left-associative.
|
|Infix operator precedence is determined by the operator's first character. Characters are listed
|below in increasing order of precedence, with characters on the same line having the same precedence.
| (all letters)
| |
| ^
| &
| = !
| < >
| :
| + -
| * / %
| (all other special characters)
|Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc.
|""".stripMargin
}
class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(using Context)
extends TypeMsg(CantInstantiateAbstractClassOrTraitID) {
private val traitOrAbstract = if (isTrait) "a trait" else "abstract"
def msg = em"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated"""
def explain =
em"""|Abstract classes and traits need to be extended by a concrete class or object
|to make their functionality accessible.
|
|You may want to create an anonymous class extending ${cls.name} with
| ${s"class ${cls.name} { }"}
|
|or add a companion object with
| ${s"object ${cls.name} extends ${cls.name}"}
|
|You need to implement any abstract members in both cases.
|""".stripMargin
}
class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID):
def msg = em"unreducible application of higher-kinded type $tycon to wildcard arguments"
def explain =
em"""|An abstract type constructor cannot be applied to wildcard arguments.
|Such applications are equivalent to existential types, which are not
|supported in Scala 3."""
class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(using Context)
extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) {
def msg = em"""Overloaded or recursive $cycleSym needs return type"""
def explain =
em"""Case 1: $cycleSym is overloaded
|If there are multiple methods named $cycleSym and at least one definition of
|it calls another, you need to specify the calling method's return type.
|
|Case 2: $cycleSym is recursive
|If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type
|of $cycleSym or of a definition it's mutually recursive with.
|""".stripMargin
}
class RecursiveValueNeedsResultType(cycleSym: Symbol)(using Context)
extends CyclicMsg(RecursiveValueNeedsResultTypeID) {
def msg = em"""Recursive $cycleSym needs type"""
def explain =
em"""The definition of $cycleSym is recursive and you need to specify its type.
|""".stripMargin
}
class CyclicReferenceInvolving(denot: SymDenotation)(using Context)
extends CyclicMsg(CyclicReferenceInvolvingID) {
def msg =
val where = if denot.exists then s" involving $denot" else ""
em"Cyclic reference$where"
def explain =
em"""|$denot is declared as part of a cycle which makes it impossible for the
|compiler to decide upon ${denot.name}'s type.
|To avoid this error, try giving ${denot.name} an explicit type.
|""".stripMargin
}
class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(using Context)
extends CyclicMsg(CyclicReferenceInvolvingImplicitID) {
def msg = em"""Cyclic reference involving implicit $cycleSym"""
def explain =
em"""|$cycleSym is declared as part of a cycle which makes it impossible for the
|compiler to decide upon ${cycleSym.name}'s type.
|This might happen when the right hand-side of $cycleSym's definition involves an implicit search.
|To avoid this error, try giving ${cycleSym.name} an explicit type.
|""".stripMargin
}
class SkolemInInferred(tree: tpd.Tree, pt: Type, argument: tpd.Tree)(using Context)
extends TypeMsg(SkolemInInferredID):
private def argStr =
if argument.isEmpty then ""
else i" from argument of type ${argument.tpe.widen}"
def msg =
em"""Failure to generate given instance for type $pt$argStr)
|
|I found: $tree
|But the part corresponding to `<skolem>` is not a reference that can be generated.
|This might be because resolution yielded as given instance a function that is not
|known to be total and side-effect free."""
def explain =
em"""The part of given resolution that corresponds to `<skolem>` produced a term that
|is not a stable reference. Therefore a given instance could not be generated.
|
|To trouble-shoot the problem, try to supply an explicit expression instead of
|relying on implicit search at this point."""
class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(using Context)
extends ReferenceMsg(SuperQualMustBeParentID) {
def msg = em"""|$qual does not name a parent of $cls"""
def explain =
val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted
em"""|When a qualifier ${hl("T")} is used in a ${hl("super")} prefix of the form ${hl("C.super[T]")},
|${hl("T")} must be a parent type of ${hl("C")}.
|
|In this case, the parents of $cls are:
|${parents.mkString(" - ", "\n - ", "")}
|""".stripMargin
}
class VarArgsParamMustComeLast()(using Context)
extends SyntaxMsg(VarArgsParamMustComeLastID) {
def msg = em"""${hl("varargs")} parameter must come last"""
def explain =
em"""|The ${hl("varargs")} field must be the last field in the method signature.
|Attempting to define a field in a method signature after a ${hl("varargs")} field is an error.
|"""
}
import typer.Typer.BindingPrec
class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context)
extends ReferenceMsg(AmbiguousReferenceID) {
/** A string which explains how something was bound; Depending on `prec` this is either
* imported by <tree>
* or defined in <symbol>
*/
private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "") = {
val howVisible = prec match {
case BindingPrec.Definition => "defined"
case BindingPrec.Inheritance => "inherited"
case BindingPrec.NamedImport => "imported by name"
case BindingPrec.WildImport => "imported"
case BindingPrec.PackageClause => "found"
case BindingPrec.NothingBound => assert(false)
}
if (prec.isImportPrec) {
ex"""$howVisible$qualifier by ${em"${whereFound.importInfo}"}"""
} else
ex"""$howVisible$qualifier in ${em"${whereFound.owner}"}"""
}
def msg =
i"""|Reference to ${em"$name"} is ambiguous,
|it is both ${bindingString(newPrec, ctx)}
|and ${bindingString(prevPrec, prevCtx, " subsequently")}"""
def explain =
em"""|The compiler can't decide which of the possible choices you
|are referencing with $name: A definition of lower precedence
|in an inner scope, or a definition with higher precedence in
|an outer scope.
|Note:
| - Definitions in an enclosing scope take precedence over inherited definitions
| - Definitions take precedence over imports
| - Named imports take precedence over wildcard imports
| - You may replace a name when imported using
| ${hl("import")} scala.{ $name => ${name.show + "Tick"} }
|"""
}
class MethodDoesNotTakeParameters(tree: tpd.Tree)(using Context)
extends TypeMsg(MethodDoesNotTakeParametersId) {
def methodSymbol: Symbol =
def recur(t: tpd.Tree): Symbol =
val sym = tpd.methPart(t).symbol
if sym == defn.Any_typeCast then
t match
case TypeApply(Select(qual, _), _) => recur(qual)
case _ => sym
else sym
recur(tree)
def msg = {
val more = if (tree.isInstanceOf[tpd.Apply]) " more" else ""
val meth = methodSymbol
val methStr = if (meth.exists) meth.showLocated else "expression"
em"$methStr does not take$more parameters"
}
def explain = {
val isNullary = methodSymbol.info.isInstanceOf[ExprType]
val addendum =
if (isNullary) "\nNullary methods may not be called with parenthesis"
else ""
"You have specified more parameter lists than defined in the method definition(s)." + addendum
}
}
class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type, addendum: String = "")(
implicit ctx: Context)
extends ReferenceMsg(AmbiguousOverloadID) {
private def all = if (alternatives.length == 2) "both" else "all"
def msg =
em"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)}
|$all match ${err.expectedTypeStr(pt)}$addendum""".stripMargin
def explain =
em"""|There are ${alternatives.length} methods that could be referenced as the compiler knows too little
|about the expected type.
|You may specify the expected type e.g. by
|- assigning it to a value with a specified type, or
|- adding a type ascription as in ${hl("instance.myMethod: String => Int")}
|"""
}
class ReassignmentToVal(name: Name)(using Context)
extends TypeMsg(ReassignmentToValID) {
def msg = em"""Reassignment to val $name"""
def explain =
em"""|You can not assign a new value to $name as values can't be changed.
|Keep in mind that every statement has a value, so you may e.g. use
| ${hl("val")} $name ${hl("= if (condition) 2 else 5")}
|In case you need a reassignable name, you can declare it as
|variable
| ${hl("var")} $name ${hl("=")} ...
|""".stripMargin
}
class TypeDoesNotTakeParameters(tpe: Type, params: List[Trees.Tree[Trees.Untyped]])(using Context)
extends TypeMsg(TypeDoesNotTakeParametersID) {
private def fboundsAddendum =
if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then
"\n(Note that F-bounds of type parameters may not be type lambdas)"
else ""
def msg = em"$tpe does not take type parameters$fboundsAddendum"
def explain =
val ps =
if (params.size == 1) s"a type parameter ${params.head}"
else s"type parameters ${params.map(_.show).mkString(", ")}"
i"""You specified ${NoColor(ps)} for ${em"$tpe"}, which is not
|declared to take any.
|"""
}
class ParameterizedTypeLacksArguments(psym: Symbol)(using Context)
extends TypeMsg(ParameterizedTypeLacksArgumentsID) {
def msg = em"Parameterized $psym lacks argument list"
def explain =
em"""The $psym is declared with non-implicit parameters, you may not leave
|out the parameter list when extending it.
|"""
}
class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context)
extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) {
def varOrVal = if (mutable) em"${hl("var")}" else em"${hl("val")}"
def msg = s"$varOrVal parameters may not be call-by-name"
def explain =
em"""${hl("var")} and ${hl("val")} parameters of classes and traits may no be call-by-name. In case you
|want the parameter to be evaluated on demand, consider making it just a parameter
|and a ${hl("def")} in the class such as
| ${s"class MyClass(${name}Tick: => String) {"}
| ${s" def $name() = ${name}Tick"}
| ${hl("}")}
|"""
}
class MissingTypeParameterFor(tpe: Type)(using Context)
extends SyntaxMsg(MissingTypeParameterForID) {
def msg =
if (tpe.derivesFrom(defn.AnyKindClass)) em"${tpe} cannot be used as a value type"
else em"Missing type parameter for ${tpe}"
def explain = ""
}
class MissingTypeParameterInTypeApp(tpe: Type)(using Context)
extends TypeMsg(MissingTypeParameterInTypeAppID) {
def numParams = tpe.typeParams.length
def parameters = if (numParams == 1) "parameter" else "parameters"
def msg = em"Missing type $parameters for $tpe"
def explain = em"A fully applied type is expected but $tpe takes $numParams $parameters"
}
class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context)
extends TypeMismatchMsg(tpe, bound)(DoesNotConformToBoundID) {
def msg = em"Type argument ${tpe} does not conform to $which bound $bound"
}
class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol,
otherSelf: Type, relation: String, other: Symbol)(
implicit ctx: Context)
extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID) {
def msg = em"""$category: self type $selfType of $cls does not conform to self type $otherSelf
|of $relation $other"""
}
class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)(
implicit ctx: Context)
extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID) {
def msg = em"""$tp does not conform to its self type $selfType; cannot be instantiated"""
}
class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symbol)(using Context)
extends TypeMismatchMsg(found, expected)(IllegalParameterInitID):
def msg =
em"""illegal parameter initialization of $param.
|
| The argument passed for $param has type: $found
| but $cls expects $param to have type: $expected"""
class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)(
implicit ctx: Context)
extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) {
def msg = em"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier"""
def explain = ""
}
class TopLevelCantBeImplicit(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(TopLevelCantBeImplicitID) {
def msg = em"""${hl("implicit")} modifier cannot be used for top-level definitions"""
def explain = ""
}
class TypesAndTraitsCantBeImplicit()(using Context)
extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) {
def msg = em"""${hl("implicit")} modifier cannot be used for types or traits"""
def explain = ""
}
class OnlyClassesCanBeAbstract(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(OnlyClassesCanBeAbstractID) {
def explain = ""
def msg = em"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members"""
}
class AbstractOverrideOnlyInTraits(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) {
def msg = em"""${hl("abstract override")} modifier only allowed for members of traits"""
def explain = ""
}
class TraitsMayNotBeFinal(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(TraitsMayNotBeFinalID) {
def msg = em"""$sym may not be ${hl("final")}"""
def explain =
"A trait can never be final since it is abstract and must be extended to be useful."
}
class NativeMembersMayNotHaveImplementation(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) {
def msg = em"""${hl("@native")} members may not have an implementation"""
def explain = ""
}
class TraitMayNotDefineNativeMethod(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(TraitMayNotDefineNativeMethodID) {
def msg = em"""A trait cannot define a ${hl("@native")} method."""
def explain = ""
}
class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)(
implicit ctx: Context)
extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) {
private def varNote =
if (sym.is(Mutable)) "Note that variables need to be initialized to be defined."
else ""
def msg = em"""Declaration of $sym not allowed here: only classes can have declared but undefined members"""
def explain = s"$varNote"
}
class CannotExtendAnyVal(sym: Symbol)(using Context)
extends SyntaxMsg(CannotExtendAnyValID) {
def msg = em"""$sym cannot extend ${hl("AnyVal")}"""
def explain =
em"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend
|${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members.
|Universal traits can be mixed into classes that extend ${hl("AnyVal")}.
|"""
}
class CannotExtendJavaEnum(sym: Symbol)(using Context)
extends SyntaxMsg(CannotExtendJavaEnumID) {
def msg = em"""$sym cannot extend ${hl("java.lang.Enum")}: only enums defined with the ${hl("enum")} syntax can"""
def explain = ""
}
class CannotExtendContextFunction(sym: Symbol)(using Context)
extends SyntaxMsg(CannotExtendFunctionID) {
def msg = em"""$sym cannot extend a context function class"""
def explain = ""
}
class JavaEnumParentArgs(parent: Type)(using Context)
extends TypeMsg(JavaEnumParentArgsID) {
def msg = em"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}"""
def explain = ""
}
class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context)
extends SyntaxMsg(CannotHaveSameNameAsID) {
import CannotHaveSameNameAs._
def reasonMessage: String = reason match {
case CannotBeOverridden => "class definitions cannot be overridden"
case DefinedInSelf(self) =>
s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}.
|(Note: this can be resolved by using another name)
|""".stripMargin
}
def msg = em"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage
def explain = ""
}
object CannotHaveSameNameAs {
sealed trait Reason
case object CannotBeOverridden extends Reason
case class DefinedInSelf(self: tpd.ValDef) extends Reason
}
class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotDefineInnerID) {
def msg = em"""Value classes may not define an inner class"""
def explain = ""
}
class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) {
def msg = em"""Value classes may not define non-parameter field"""
def explain = ""
}
class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) {
def msg = em"""Value classes may not define a secondary constructor"""
def explain = ""
}
class ValueClassesMayNotContainInitalization(valueClass: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) {
def msg = em"""Value classes may not contain initialization statements"""
def explain = ""
}
class ValueClassesMayNotBeAbstract(valueClass: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotBeAbstractID) {
def msg = em"""Value classes may not be ${hl("abstract")}"""
def explain = ""
}
class ValueClassesMayNotBeContainted(valueClass: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotBeContaintedID) {
private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class"
def msg = s"""Value classes may not be a $localOrMember"""
def explain = ""
}
class ValueClassesMayNotWrapAnotherValueClass(valueClass: Symbol)(using Context)
extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID) {
def msg = """A value class may not wrap another user-defined value class"""
def explain = ""
}
class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(using Context)
extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) {
def msg = em"""A value class parameter may not be a ${hl("var")}"""
def explain =
em"""A value class must have exactly one ${hl("val")} parameter."""
}
class ValueClassNeedsOneValParam(valueClass: Symbol)(using Context)
extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) {
def msg = em"""Value class needs one ${hl("val")} parameter"""
def explain = ""
}
class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(using Context)
extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) {
def msg = s"Value class parameter `${param.name}` may not be call-by-name"
def explain = ""
}
class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context)
extends SyntaxMsg(SuperCallsNotAllowedInlineableID) {
def msg = em"Super call not allowed in inlineable $symbol"
def explain = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called."
}
class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID):
def msg = em"$tp is not a valid $usage, since it is not an immutable path"
def explain =
i"""An immutable path is
| - a reference to an immutable value, or
| - a reference to `this`, or
| - a selection of an immutable path with an immutable value."""
class WrongNumberOfParameters(expected: Int)(using Context)
extends SyntaxMsg(WrongNumberOfParametersID) {
def msg = s"Wrong number of parameters, expected: $expected"
def explain = ""
}
class DuplicatePrivateProtectedQualifier()(using Context)
extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) {
def msg = "Duplicate private/protected qualifier"
def explain =
em"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes"
}
class ExpectedStartOfTopLevelDefinition()(using Context)
extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) {
def msg = "Expected start of definition"
def explain =
em"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers"
}
class NoReturnFromInlineable(owner: Symbol)(using Context)
extends SyntaxMsg(NoReturnFromInlineableID) {
def msg = em"No explicit ${hl("return")} allowed from inlineable $owner"
def explain =
em"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements.
|Instead, you should rely on the last expression's value being
|returned from a method.
|"""
}
class ReturnOutsideMethodDefinition(owner: Symbol)(using Context)
extends SyntaxMsg(ReturnOutsideMethodDefinitionID) {
def msg = em"${hl("return")} outside method definition"
def explain =
em"""You used ${hl("return")} in ${owner}.
|${hl("return")} is a keyword and may only be used within method declarations.
|"""
}
class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(using Context)
extends SyntaxMsg(ExtendFinalClassID) {
def msg = em"$clazz cannot extend ${hl("final")} $finalClazz"
def explain =
em"""A class marked with the ${hl("final")} keyword cannot be extended"""
}
class ExpectedTypeBoundOrEquals(found: Token)(using Context)
extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) {
def msg = em"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found"
def explain =
em"""Type parameters and abstract types may be constrained by a type bound.
|Such type bounds limit the concrete values of the type variables and possibly
|reveal more information about the members of such types.
|
|A lower type bound ${hl("B >: A")} expresses that the type variable ${hl("B")}
|refers to a supertype of type ${hl("A")}.
|
|An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")}
|refers to a subtype of type ${hl("A")}.
|"""
}
class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context)
extends NamingMsg(ClassAndCompanionNameClashID) {
def msg = em"Name clash: both ${cls.owner} and its companion object defines ${cls.name.stripModuleClassSuffix}"
def explain =
em"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name:
| - ${cls.owner} defines ${cls}
| - ${other.owner} defines ${other}"""
}
class TailrecNotApplicable(symbol: Symbol)(using Context)
extends SyntaxMsg(TailrecNotApplicableID) {
def msg = {
val reason =
if (!symbol.is(Method)) em"$symbol isn't a method"
else if (symbol.is(Deferred)) em"$symbol is abstract"
else if (!symbol.isEffectivelyFinal) em"$symbol is neither ${hl("private")} nor ${hl("final")} so can be overridden"
else em"$symbol contains no recursive calls"
s"TailRec optimisation not applicable, $reason"
}
def explain = ""
}
class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context)
extends Message(FailureToEliminateExistentialID) {
def kind: String = "Compatibility"
def msg =
val originalType = ctx.printer.dclsText(boundSyms, "; ").show
em"""An existential type that came from a Scala-2 classfile for $classRoot
|cannot be mapped accurately to a Scala-3 equivalent.
|original type : $tp forSome ${originalType}
|reduces to : $tp1
|type used instead: $tp2
|This choice can cause follow-on type errors or hide type errors.
|Proceed at own risk."""
def explain =
em"""Existential types in their full generality are no longer supported.
|Scala-3 does applications of class types to wildcard type arguments.
|Other forms of existential types that come from Scala-2 classfiles
|are only approximated in a best-effort way."""
}
class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context)
extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) {
def msg = em"Only function types can be followed by ${hl("_")} but the current expression has type $tp"
def explain =
em"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function.
|To convert to a function value, you need to explicitly write ${hl("() => x")}"""
}
class MissingEmptyArgumentList(method: String)(using Context)
extends SyntaxMsg(MissingEmptyArgumentListID) {
def msg = em"$method must be called with ${hl("()")} argument"
def explain = {
val codeExample =
"""def next(): T = ...
|next // is expanded to next()"""
em"""Previously an empty argument list () was implicitly inserted when calling a nullary method without arguments. E.g.
|
|$codeExample
|
|In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax.
|Excluded from this rule are methods that are defined in Java or that override methods defined in Java."""
}
}
class DuplicateNamedTypeParameter(name: Name)(using Context)
extends SyntaxMsg(DuplicateNamedTypeParameterID) {
def msg = em"Type parameter $name was defined multiple times."
def explain = ""
}
class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(using Context)
extends SyntaxMsg(UndefinedNamedTypeParameterID) {
def msg = em"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}."
def explain = ""
}
class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) {
def msg =
if isStat then
"this kind of statement is not allowed here"
else
val addendum = if isModifier then ": this modifier is not allowed here" else ""
s"Illegal start of $what$addendum"
def explain =
i"""A statement is an import or export, a definition or an expression.
|Some statements are only allowed in certain contexts"""
}
class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) {
def msg = em"$symbol is not a trait"
def explain = {
val errorCodeExample =
"""class A
|class B
|
|val a = new A with B // will fail with a compile error - class B is not a trait""".stripMargin
val codeExample =
"""class A
|trait B
|
|val a = new A with B // compiles normally""".stripMargin
em"""Only traits can be mixed into classes using a ${hl("with")} keyword.
|Consider the following example:
|
|$errorCodeExample
|
|The example mentioned above would fail because B is not a trait.
|But if you make B a trait it will be compiled without any errors:
|
|$codeExample
|"""
}
}
class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) {
def msg = em"Traits cannot redefine final $method from ${hl("class AnyRef")}."
def explain = ""
}
class AlreadyDefined(name: Name, owner: Symbol, conflicting: Symbol)(using Context) extends NamingMsg(AlreadyDefinedID):
private def where: String =
if conflicting.effectiveOwner.is(Package) && conflicting.associatedFile != null then
i" in ${conflicting.associatedFile}"
else if conflicting.owner == owner then ""
else i" in ${conflicting.owner}"
private def note =
if owner.is(Method) || conflicting.is(Method) then
"\n\nNote that overloaded methods must all be defined in the same group of toplevel definitions"
else ""
def msg =
if conflicting.isTerm != name.isTermName then
em"$name clashes with $conflicting$where; the two must be defined together"
else
em"$name is already defined as $conflicting$where$note"
def explain = ""
class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) {
lazy val (where, or) =
if pkg.associatedFile == null then ("", "")
else (s" in ${pkg.associatedFile}", " or delete the containing class file")
def msg = em"""${pkg.name} is the name of $pkg$where.
|It cannot be used at the same time as the name of a package."""
def explain =
em"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}.
|Rename either one of them$or."""
}
class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(using Context)
extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) {
def msg = em"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)"
def explain =
em"""The Unapply method of $qual was used with incorrect number of arguments.
|Expected usage would be something like:
|case $qual(${argTypes.map(_ => '_')}%, %) => ...
|
|where subsequent arguments would have following types: ($argTypes%, %).
|""".stripMargin
}
class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Context)
extends DeclarationMsg(UnapplyInvalidReturnTypeID) {
def msg =
val addendum =
if Feature.migrateTo3 && unapplyName == nme.unapplySeq
then "\nYou might want to try to rewrite the extractor to use `unapply` instead."
else ""
em"""| ${Red(i"$unapplyResult")} is not a valid result type of an $unapplyName method of an ${Magenta("extractor")}.$addendum"""
def explain = if (unapplyName.show == "unapply")
em"""
|To be used as an extractor, an unapply method has to return a type that either:
| - has members ${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} (usually an ${Green("Option[S]")})
| - is a ${Green("Boolean")}
| - is a ${Green("Product")} (like a ${Magenta("Tuple2[T1, T2]")})
|
|class A(val i: Int)
|
|object B {
| def unapply(a: A): ${Green("Option[Int]")} = Some(a.i)
|}
|
|object C {
| def unapply(a: A): ${Green("Boolean")} = a.i == 2
|}
|
|object D {
| def unapply(a: A): ${Green("(Int, Int)")} = (a.i, a.i)
|}
|
|object Test {
| def test(a: A) = a match {
| ${Magenta("case B(1)")} => 1
| ${Magenta("case a @ C()")} => 2
| ${Magenta("case D(3, 3)")} => 3
| }
|}
""".stripMargin
else
em"""
|To be used as an extractor, an unapplySeq method has to return a type which has members
|${Magenta("isEmpty: Boolean")} and ${Magenta("get: S")} where ${Magenta("S <: Seq[V]")} (usually an ${Green("Option[Seq[V]]")}):
|
|object CharList {
| def unapplySeq(s: String): ${Green("Option[Seq[Char]")} = Some(s.toList)
|
| "example" match {
| ${Magenta("case CharList(c1, c2, c3, c4, _, _, _)")} =>
| println(s"$$c1,$$c2,$$c3,$$c4")
| case _ =>
| println("Expected *exactly* 7 characters!")
| }
|}
""".stripMargin
}
class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) {
def msg = em"${hl("@static")} $member in ${member.owner} must be defined inside a static ${hl("object")}."
def explain =
em"${hl("@static")} members are only allowed inside objects."
}
class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) {
def msg = em"${hl("@static")} $member in ${member.owner} must be defined before non-static fields."
def explain = {
val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef])
val codeExample = s"""object ${member.owner.name.firstPart} {
| @static ${member} = ...
| ${nonStatics.map(m => s"${m.symbol} = ...").mkString("\n ")}
| ...
|}"""
em"""The fields annotated with @static should precede any non @static fields.
|This ensures that we do not introduce surprises for users in initialization order of this class.
|Static field are initialized when class loading the code of Foo.
|Non static fields are only initialized the first time that Foo is accessed.
|
|The definition of ${member.name} should have been before the non ${hl("@static val")}s:
|$codeExample
|"""
}
}
class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) {
def msg = em"Cyclic inheritance: $symbol extends itself$addendum"
def explain = {
val codeExample = "class A extends A"
em"""Cyclic inheritance is prohibited in Dotty.
|Consider the following example:
|
|$codeExample
|
|The example mentioned above would fail because this type of inheritance hierarchy
|creates a "cycle" where a not yet defined class A extends itself which makes
|impossible to instantiate an object of this class"""
}
}
class BadSymbolicReference(denot: SymDenotation)(using Context)
extends ReferenceMsg(BadSymbolicReferenceID) {
def msg = {
val denotationOwner = denot.owner
val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name)
val file = denot.symbol.associatedFile
val (location, src) =
if (file != null) (s" in $file", file.toString)
else ("", "the signature")
em"""Bad symbolic reference. A signature$location
|refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available.
|It may be completely missing from the current classpath, or the version on
|the classpath might be incompatible with the version used when compiling $src."""
}
def explain = ""
}
class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID) {
def msg = em"Cannot extend ${hl("sealed")} $pclazz in a different source file"
def explain = "A sealed class or trait can only be extended in the same file as its declaration"
}
class SymbolHasUnparsableVersionNumber(symbol: Symbol, errorMessage: String)(using Context)
extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) {
def msg = em"${symbol.showLocated} has an unparsable version number: $errorMessage"
def explain =
em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics
|between versions and the ${hl("-Xmigration")} settings is used to warn about constructs
|whose behavior may have changed since version change."""
}
class SymbolChangedSemanticsInVersion(
symbol: Symbol,
migrationVersion: ScalaVersion,
migrationMessage: String
)(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) {
def msg = em"${symbol.showLocated} has changed semantics in version $migrationVersion: $migrationMessage"
def explain =
em"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics
|between versions and the ${hl("-Xmigration")} settings is used to warn about constructs
|whose behavior may have changed since version change."""
}
class UnableToEmitSwitch()(using Context)
extends SyntaxMsg(UnableToEmitSwitchID) {
def msg = em"Could not emit switch for ${hl("@switch")} annotated match"
def explain = {
val codeExample =
"""val ConstantB = 'B'
|final val ConstantC = 'C'
|def tokenMe(ch: Char) = (ch: @switch) match {
| case '\t' | '\n' => 1
| case 'A' => 2
| case ConstantB => 3 // a non-literal may prevent switch generation: this would not compile
| case ConstantC => 4 // a constant value is allowed
| case _ => 5
|}""".stripMargin
em"""If annotated with ${hl("@switch")}, the compiler will verify that the match has been compiled to a
|tableswitch or lookupswitch and issue an error if it instead compiles into a series of conditional
|expressions. Example usage:
|
|$codeExample
|
|The compiler will not apply the optimisation if:
|- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")}
|- the matched value is not a constant literal
|- there are less than three cases"""
}
}
class MissingCompanionForStatic(member: Symbol)(using Context)
extends SyntaxMsg(MissingCompanionForStaticID) {
def msg = em"${member.owner} does not have a companion class"
def explain =
em"An object that contains ${hl("@static")} members must have a companion class."
}
class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(using Context)
extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) {
def msg = em"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed"
def explain =
em"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because
|$rsym does not override any method in $parentSym. Structural refinement does not allow for
|polymorphic methods."""
}
class ParamsNoInline(owner: Symbol)(using Context)
extends SyntaxMsg(ParamsNoInlineID) {
def msg = em"""${hl("inline")} modifier can only be used for parameters of inline methods"""
def explain = ""
}
class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID) {
def msg = {
val kind =
if (symbol is Package) em"$symbol"
else em"Java defined ${hl("class " + symbol.name)}"
s"$kind is not a value"
}
def explain = ""
}
class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(using Context) extends NamingMsg(DoubleDefinitionID) {
def msg = {
def nameAnd = if (decl.name != previousDecl.name) " name and" else ""
def details(using Context): String =
if (decl.isRealMethod && previousDecl.isRealMethod) {
import Signature.MatchDegree._
// compare the signatures when both symbols represent methods
decl.signature.matchDegree(previousDecl.signature) match {
case NoMatch =>
// If the signatures don't match at all at the current phase, then
// they might match after erasure.
if ctx.phase.id <= elimErasedValueTypePhase.id then
atPhase(elimErasedValueTypePhase.next)(details)
else
"" // shouldn't be reachable
case ParamMatch =>
"have matching parameter types."
case MethodNotAMethodMatch =>
"neither has parameters."
case FullMatch =>
val hint =
if !decl.hasAnnotation(defn.TargetNameAnnot)
&& !previousDecl.hasAnnotation(defn.TargetNameAnnot)
then
i"""
|
|Consider adding a @targetName annotation to one of the conflicting definitions
|for disambiguation."""
else ""
i"have the same$nameAnd type after erasure.$hint"
}
}
else ""
def symLocation(sym: Symbol) = {
val lineDesc =
if (sym.span.exists && sym.span != sym.owner.span)
s" at line ${sym.srcPos.line + 1}"
else ""
i"in ${sym.owner}${lineDesc}"
}
val clashDescription =
if (decl.owner eq previousDecl.owner)
"Double definition"
else if ((decl.owner eq base) || (previousDecl eq base))
"Name clash between defined and inherited member"
else
"Name clash between inherited members"
em"""$clashDescription:
|${previousDecl.showDcl} ${symLocation(previousDecl)} and
|${decl.showDcl} ${symLocation(decl)}
|""" + details
}
def explain = ""
}
class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) {
def msg = s"${ident.show} is renamed twice on the same import line."
def explain = ""
}
class TypeTestAlwaysSucceeds(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysSucceedsID) {
def msg = {
val addendum =
if (scrutTp != testTp) s" is a subtype of ${testTp.show}"
else " is the same as the tested type"
s"The highlighted type test will always succeed since the scrutinee type ${scrutTp.show}" + addendum
}
def explain = ""
}
// Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType
class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(using Context)
extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) {
def msg = em"""$cycleSym needs result type because its right-hand side attempts implicit search"""
def explain =
em"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position.
|To avoid this error, give `$cycleSym` an explicit type.
|""".stripMargin
}
class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID) {
def msg = em"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited."""
def explain = ""
}
class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) {
def msg = em"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method"
def explain =
em"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow:
| - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")}
| - If it returns a single sub-value of type T, return an ${hl("Option[T]")}
| - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")}
|
|Sometimes, the number of sub-values isn't fixed and we would like to return a sequence.
|For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}.
|This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""".stripMargin
}
class MemberWithSameNameAsStatic()(using Context)
extends SyntaxMsg(MemberWithSameNameAsStaticID) {
def msg = em"Companion classes cannot define members with same name as a ${hl("@static")} member"
def explain = ""
}
class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context)
extends Message(PureExpressionInStatementPositionID) {
def kind = "Potential Issue"
def msg = "A pure expression does nothing in statement position; you may be omitting necessary parentheses"
def explain =
em"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere.
|It can be removed without changing the semantics of the program. This may indicate an error.""".stripMargin
}
class TraitCompanionWithMutableStatic()(using Context)
extends SyntaxMsg(TraitCompanionWithMutableStaticID) {
def msg = em"Companion of traits cannot define mutable @static fields"
def explain = ""
}
class LazyStaticField()(using Context)
extends SyntaxMsg(LazyStaticFieldID) {
def msg = em"Lazy @static fields are not supported"
def explain = ""
}
class StaticOverridingNonStaticMembers()(using Context)
extends SyntaxMsg(StaticOverridingNonStaticMembersID) {
def msg = em"${hl("@static")} members cannot override or implement non-static ones"
def explain = ""
}
class OverloadInRefinement(rsym: Symbol)(using Context)
extends DeclarationMsg(OverloadInRefinementID) {
def msg = "Refinements cannot introduce overloaded definitions"
def explain =
em"""The refinement `$rsym` introduces an overloaded definition.
|Refinements cannot contain overloaded definitions.""".stripMargin
}
class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(using Context)
extends TypeMsg(NoMatchingOverloadID) {
def msg =
em"""None of the ${err.overloadedAltsStr(alternatives)}
|match ${err.expectedTypeStr(pt)}"""
def explain = ""
}
class StableIdentPattern(tree: untpd.Tree, pt: Type)(using Context)
extends TypeMsg(StableIdentPatternID) {
def msg =
em"""Stable identifier required, but $tree found"""
def explain = ""
}
class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name,
acc: Symbol, accTp: Type,
other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID) {
def msg = {
// The mixin containing a super-call that requires a super-accessor
val accMixin = acc.owner
// The class or trait that the super-accessor should resolve too in `base`
val otherMixin = other.owner
// The super-call in `accMixin`
val superCall = hl(i"super.$memberName")
// The super-call that the super-accesors in `base` forwards to
val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName")
// The super-call that we would have called if `super` in traits behaved like it
// does in classes, i.e. followed the linearization of the trait itself.
val staticSuperCall = {
val staticSuper = accMixin.asClass.info.parents.reverse
.find(_.nonPrivateMember(memberName)
.matchingDenotation(accMixin.thisType, acc.info, targetName).exists)
val staticSuperName = staticSuper match {
case Some(parent) =>
parent.classSymbol.name.show
case None => // Might be reachable under separate compilation
"SomeParent"
}
hl(i"super[$staticSuperName].$memberName")
}
ex"""$base cannot be defined due to a conflict between its parents when
|implementing a super-accessor for $memberName in $accMixin:
|
|1. One of its parent (${accMixin.name}) contains a call $superCall in its body,
| and when a super-call in a trait is written without an explicit parent
| listed in brackets, it is implemented by a generated super-accessor in
| the class that extends this trait based on the linearization order of
| the class.
|2. Because ${otherMixin.name} comes before ${accMixin.name} in the linearization
| order of ${base.name}, and because ${otherMixin.name} overrides $memberName,
| the super-accessor in ${base.name} is implemented as a call to
| $resolvedSuperCall.
|3. However,
| ${otherTp.widenExpr} (the type of $resolvedSuperCall in ${base.name})
| is not a subtype of
| ${accTp.widenExpr} (the type of $memberName in $accMixin).
| Hence, the super-accessor that needs to be generated in ${base.name}
| is illegal.
|
|Here are two possible ways to resolve this:
|
|1. Change the linearization order of ${base.name} such that
| ${accMixin.name} comes before ${otherMixin.name}.
|2. Alternatively, replace $superCall in the body of $accMixin by a
| super-call to a specific parent, e.g. $staticSuperCall
|""".stripMargin
}
def explain = ""
}
class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context)
extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) {
def msg =
s"${cls.show} cannot extend from a parent that is derived via its own parameters"
def explain =
ex"""
|The parent class/trait that ${cls.show} extends from is obtained from
|the parameter of ${cls.show}. This is disallowed in order to prevent
|outer-related Null Pointer Exceptions in Scala.
|
|In order to fix this issue consider directly extending from the parent rather
|than obtaining it from the parameters of ${cls.show}.
|""".stripMargin
}
class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context)
extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) {
def msg =
em"""no enclosing class or object is named '${hl(name.show)}'"""
def explain =
ex"""
|The class or object named '${hl(name.show)}' was used as a visibility
|modifier, but could not be resolved. Make sure that
|'${hl(name.show)}' is not misspelled and has been imported into the
|current scope.
""".stripMargin
}
class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using Context)
extends CyclicMsg(IllegalCyclicTypeReferenceID) {
def msg =
val lastCheckedStr =
try lastChecked.show
catch case ex: CyclicReference => "..."
i"illegal cyclic type reference: ${where} ${hl(lastCheckedStr)} of $sym refers back to the type itself"
def explain = ""
}
class ErasedTypesCanOnlyBeFunctionTypes()(using Context)
extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) {
def msg = "Types with erased keyword can only be function types `(erased ...) => ...`"
def explain = ""
}
class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context)
extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) {
def msg =
em"""|A ${hl("case class")} must have at least one non-implicit parameter list"""
def explain =
em"""|${cdef.name} must have at least one non-implicit parameter list,
| if you're aiming to have a case class parametrized only by implicit ones, you should
| add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""".stripMargin
}
class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context)
extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) {
def msg = "Enumerations must contain at least one case"
def explain =
em"""|Enumeration ${cdef.name} must contain at least one case
|Example Usage:
| ${hl("enum")} ${cdef.name} {
| ${hl("case")} Option1, Option2
| }
|""".stripMargin
}
class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(using Context)
extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) {
def msg = i"explicit extends clause needed because both enum case and enum class have type parameters"
def explain =
em"""Enumerations where the enum class as well as the enum case have type parameters need
|an explicit extends.
|for example:
| ${hl("enum")} ${enumDef.name}[T] {
| ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U]
| }
|""".stripMargin
}
class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(using Context)
extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) {
def msg = em"illegal redefinition of standard $kindType $name"
def explain =
em"""| "$name" is a standard Scala core `$kindType`
| Please choose a different name to avoid conflicts
|""".stripMargin
}
class NoExtensionMethodAllowed(mdef: untpd.DefDef)(using Context)
extends SyntaxMsg(NoExtensionMethodAllowedID) {
def msg = em"No extension method allowed here, since collective parameters are given"
def explain =
em"""|Extension method:
| `${mdef}`
|is defined inside an extension clause which has collective parameters.
|""".stripMargin
}
class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context)
extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) {
def msg = i"Extension method cannot have type parameters since some were already given previously"
def explain =
em"""|Extension method:
| `${mdef}`
|has type parameters `[${mdef.leadingTypeParams.map(_.show).mkString(",")}]`, while the extension clause has
|it's own type parameters. Please consider moving these to the extension clause's type parameter list.
|""".stripMargin
}
class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context)
extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) {
def msg = em"Only methods allowed here, since collective parameters are given"
def explain =
em"""Extension clauses can only have `def`s
| `${mdef.show}` is not a valid expression here.
|""".stripMargin
}
class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context)
extends SyntaxMsg(UnexpectedPatternForSummonFromID) {
def msg = em"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}"
def explain =
em"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")},
| needs to be of the form ${hl("`x: T`")} or ${hl("`_`")}.
|
| Example usage:
| inline def a = summonFrom {
| case x: T => ???
| }
|
| or
| inline def a = summonFrom {
| case _ => ???
| }
|""".stripMargin
}
class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context)
extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) {
def msg = i"anonymous instance must implement a type or have at least one extension method"
def explain =
em"""|Anonymous instances cannot be defined with an empty body. The block
|`${impl.show}` should either contain an implemented type or at least one extension method.
|""".stripMargin
}
class TypeSpliceInValPattern(expr: untpd.Tree)(using Context)
extends SyntaxMsg(TypeSpliceInValPatternID) {
def msg = "Type splices cannot be used in val patterns. Consider using `match` instead."
def explain =
em"""|Type splice: `$$${expr.show}` cannot be used in a `val` pattern. Consider rewriting the `val` pattern
|as a `match` with a corresponding `case` to replace the `val`.
|""".stripMargin
}
class ModifierNotAllowedForDefinition(flag: Flag)(using Context)
extends SyntaxMsg(ModifierNotAllowedForDefinitionID) {
def msg = em"Modifier ${hl(flag.flagsString)} is not allowed for this definition"
def explain = ""
}
class RedundantModifier(flag: Flag)(using Context)
extends SyntaxMsg(RedundantModifierID) {
def msg = em"Modifier ${hl(flag.flagsString)} is redundant for this definition"
def explain = ""
}
class InvalidReferenceInImplicitNotFoundAnnotation(typeVar: String, owner: String)(using Context)
extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID) {
def msg = em"""|Invalid reference to a type variable ${hl(typeVar)} found in the annotation argument.
|The variable does not occur as a parameter in the scope of ${hl(owner)}.
|""".stripMargin
def explain = ""
}
class CaseClassInInlinedCode(tree: tpd.Tree)(using Context)
extends SyntaxMsg(CaseClassInInlinedCodeID) {
def defKind = if tree.symbol.is(Module) then "object" else "class"
def msg = s"Case $defKind definitions are not allowed in inline methods or quoted code. Use a normal $defKind instead."
def explain =
em"""Case class/object definitions generate a considerable fooprint in code size.
|Inlining such definition would multiply this footprint for each call site.
|""".stripMargin
}
class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context)
extends TypeMsg(ImplicitSearchTooLargeID):
override def showAlways = true
def showQuery(query: (Candidate, Type)): String =
i" ${query._1.ref.symbol.showLocated} for ${query._2}}"
def msg =
em"""Implicit search problem too large.
|an implicit search was terminated with failure after trying $limit expressions.
|The root candidate for the search was:
|
|${showQuery(openSearchPairs.last)}
|
|You can change the behavior by setting the `-Ximplicit-search-limit` value.
|Smaller values cause the search to fail faster.
|Larger values might make a very large search problem succeed.
|"""
def explain =
em"""The overflow happened with the following lists of tried expressions and target types,
|starting with the root query:
|
|${openSearchPairs.reverse.map(showQuery)}%\n%
"""
| dotty-staging/dotty | compiler/src/dotty/tools/dotc/reporting/messages.scala | Scala | apache-2.0 | 105,365 |
package com.adendamedia.cornucopia.redis
trait Operation {
def key: String
val message: String
def ordinal: Int
}
object UNSUPPORTED extends Operation {
val key = "dummy"
val message = "Invalid"
val ordinal = 0
}
// Event partition operations.
object ADD_MASTER extends Operation {
val key = "+master"
val message = "Add master"
val ordinal = UNSUPPORTED.ordinal + 1
}
object ADD_SLAVE extends Operation {
val key = "+slave"
val message = "Add slave"
val ordinal = ADD_MASTER.ordinal + 1
}
object RESHARD extends Operation {
val key = "*reshard"
val message = "Reshard"
val ordinal = ADD_SLAVE.ordinal + 1
}
object CLUSTER_TOPOLOGY extends Operation {
val key = "?topology"
val message = "Cluster topology"
val ordinal = RESHARD.ordinal + 1
}
// Node removal partition operations.
object REMOVE_MASTER extends Operation {
val key = "-master"
val message = "Remove master"
val ordinal = CLUSTER_TOPOLOGY.ordinal + 1
}
object REMOVE_SLAVE extends Operation {
val key = "-slave"
val message = "Remove slave"
val ordinal = REMOVE_MASTER.ordinal + 1
}
| adenda/cornucopia | src/main/scala/com/adendamedia/cornucopia/redis/Operation.scala | Scala | lgpl-3.0 | 1,105 |
package org.json4s.benchmark
import com.google.caliper.SimpleBenchmark
/* from sirthias/scala-benchmarking-template */
trait SimpleScalaBenchmark extends SimpleBenchmark {
def repeat[@specialized A](reps: Int)(snippet: => A) = {
val zero = 0.asInstanceOf[A]
var i = 0
var result = zero
while (i < reps) {
val res = snippet
if (res != zero) result = res
i = i + 1
}
result
}
} | geggo98/json4s | benchmark/src/main/scala/org/json4s/benchmark/SimpleScalaBenchmark.scala | Scala | apache-2.0 | 424 |
package org.jetbrains.plugins.dotty.codeInspection.deprecated
import com.intellij.codeInspection.ProblemHighlightType.LIKE_DEPRECATED
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.dotty.codeInspection.deprecated.WithTypeInspection._
import org.jetbrains.plugins.dotty.lang.psi.impl.base.types.DottyAndTypeElementImpl
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection, InspectionBundle}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes.{kWITH, tAND}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createElement
/**
* @author adkozlov
*/
class WithTypeInspection extends AbstractInspection(id, name) {
override def actionFor(holder: ProblemsHolder): PartialFunction[PsiElement, Unit] = {
case typeElement: DottyAndTypeElementImpl =>
typeElement.findChildrenByType(kWITH).foreach { token =>
holder.registerProblem(token, message, LIKE_DEPRECATED, new ReplaceWithTypeQuickFix(token))
}
}
}
class ReplaceWithTypeQuickFix(token: PsiElement) extends AbstractFixOnPsiElement(name, token) {
override def doApplyFix(project: Project): Unit = getElement match {
case element if element.isValid =>
element.replace(createElement(tAND.toString, _ => {})(element.getManager))
}
}
object WithTypeInspection {
private[codeInspection] val id = "WithTypeDeprecated"
private[codeInspection] val name = InspectionBundle.message("replace.with.ampersand")
private[codeInspection] val message = s"With type is deprecated in Dotty. $name"
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/dotty/codeInspection/deprecated/WithTypeInspection.scala | Scala | apache-2.0 | 1,667 |
package com.imaginea.activegrid.core.models
import com.typesafe.scalalogging.Logger
import org.elasticsearch.common.settings.ImmutableSettings
import org.elasticsearch.index.query.{BoolQueryBuilder, QueryBuilders}
import org.elasticsearch.node.NodeBuilder
import org.slf4j.LoggerFactory
import spray.json.RootJsonFormat
import scala.collection.JavaConversions._
/**
* Created by nagulmeeras on 23/11/16.
*/
object EsManager {
val settings = ImmutableSettings.settingsBuilder()
.put("cluster.name", "elasticsearch")
.put("node.name", this.getClass.getProtectionDomain.getCodeSource.getLocation.toURI.getPath + "/config/names.txt")
.build()
val mayBeNode = Option(NodeBuilder.nodeBuilder().local(false).settings(settings).build())
.flatMap(node => Option(node.start()))
val mayBeClient = mayBeNode.flatMap(node => Option(node.client()))
val searchHitsSize = 1000
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
def shutDown(): Unit = {
mayBeNode match {
case Some(node) => node.stop().close()
case None =>
logger.warn("Unable to load the node")
throw new Exception("Unable to load the node")
}
}
def indexEntity[T <: BaseEntity](entity: T, index: String, indexType: String)(implicit formateObj: RootJsonFormat[T]): Unit = {
entity.id.foreach { id =>
Option(formateObj.write(entity)).foreach { jsValue =>
mayBeClient.foreach { client =>
Option(client.prepareIndex(index.toLowerCase, indexType, id.toString))
.flatMap(indexRequest => Option(indexRequest.setSource(jsValue.toString())))
.flatMap(indexRequest => Option(indexRequest.execute()))
.foreach(actionFuture => actionFuture.actionGet())
}
}
}
}
def boolQuery(esSearchQuery: EsSearchQuery): List[EsSearchResponse] = {
val index = esSearchQuery.index.toLowerCase
mayBeClient match {
case Some(client) => val searchRequest = client.prepareSearch(index)
searchRequest.addField(esSearchQuery.outputField)
esSearchQuery.types.foreach(searchType => searchRequest.setTypes(searchType))
val query = new BoolQueryBuilder
esSearchQuery.queryFields.foreach {
queryField =>
val qsQuery = QueryBuilders.queryString(addWildCard(queryField.value))
qsQuery.field(addWildCard(queryField.key))
qsQuery.analyzeWildcard(true)
esSearchQuery.queryType match {
case EsQueryType.OR =>
query.should(qsQuery)
case EsQueryType.AND =>
query.must(qsQuery)
}
}
searchRequest.setQuery(query)
searchRequest.setSize(searchHitsSize)
val response = searchRequest.execute.actionGet
response.getHits.toList.map {
hit =>
EsSearchResponse(hit.getType, esSearchQuery.outputField, hit.getId)
}
case None =>
logger.warn("Unable to load the client")
throw new Exception("Unable to load the client")
}
}
def addWildCard(queryString: String): String = {
if ("_all".equals(queryString)) {
queryString
} else {
val wildCardStr = new StringBuilder
if (!queryString.startsWith("*")) wildCardStr.append("*").append(queryString)
if (!queryString.endsWith("*")) wildCardStr.append(queryString).append("*")
wildCardStr.toString
}
}
def fieldMappings(indexName: String, mappingType: String): Set[String] = {
val index = indexName.toLowerCase
mayBeClient match {
case Some(client) =>
val mayBeClusterState = Option(client.admin())
.flatMap(admin => Option(admin.cluster()))
.flatMap(cluster => Option(cluster.prepareState()))
.flatMap(state => Option(state.setFilterIndices(index)))
.flatMap(filter => Option(filter.execute()))
.flatMap(execution => Option(execution.actionGet()))
.flatMap(action => Option(action.getState))
val mayBeMapping = mayBeClusterState
.flatMap(clusterState => Option(clusterState.getMetaData))
.flatMap(metaData => Option(metaData.index(index)))
.flatMap(index => Option(index.mapping(mappingType)))
mayBeMapping match {
case Some(mapping) => flattenKeys(mapping.getSourceAsMap, mappingType)
case None =>
logger.warn(s"No mappings found with $mappingType")
Set.empty[String]
}
case None =>
logger.warn("Unable to load the client")
throw new Exception("Unable to load the client")
}
}
def flattenKeys(map: java.util.Map[String, AnyRef], parent: String): Set[String] = {
if (map.contains("properties")) {
val properties = map("properties").asInstanceOf[java.util.Map[String, AnyRef]]
properties.flatMap {
case (key, value) =>
val interMap = value.asInstanceOf[java.util.Map[String, AnyRef]]
if (interMap.contains("dynamic")) {
flattenKeys(interMap, parent + "." + key)
} else {
Set(parent)
}
}.toSet
} else {
Set(parent)
}
}
} | eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/EsManager.scala | Scala | apache-2.0 | 5,174 |
package org.broadinstitute.dsde.firecloud
import org.broadinstitute.dsde.rawls.model.ErrorReport
class FireCloudException(message: String = null, cause: Throwable = null) extends Exception(message, cause)
class FireCloudExceptionWithErrorReport(val errorReport: ErrorReport) extends FireCloudException(errorReport.toString)
| broadinstitute/firecloud-orchestration | src/main/scala/org/broadinstitute/dsde/firecloud/FireCloudException.scala | Scala | bsd-3-clause | 327 |
package ml.wolfe.nlp.converters
import edu.arizona.sista.processors.{CorefChains => SISTACorefChains, Document => SISTADocument, Sentence => SISTASent}
import edu.arizona.sista.struct.{Tree => SistaTree}
import ml.wolfe.nlp._
import ml.wolfe.nlp.ie.{CorefAnnotation, EntityMention, CorefMention}
import ml.wolfe.nlp.syntax._
import scala.collection.mutable.ArrayBuffer
/**
* Conversion code for the SISTA processor package.
* @author Sebastian Riedel
*/
object SISTAConverter {
def toWolfeCoreference(chains: SISTACorefChains): Seq[CorefMention] = {
val ret = for ((chain, clusterID) <- chains.getChains.zipWithIndex; mention <- chain) yield {
CorefMention(clusterID, mention.sentenceIndex, mention.startOffset, mention.endOffset, mention.headIndex)
}
ret.toSeq
}
def toWolfeSentence(sentence: SISTASent): Sentence = {
val tokens = for (i <- 0 until sentence.size) yield toWolfeToken(i, sentence)
Sentence(tokens)
}
def toWolfeToken(index: Int, sent: SISTASent): Token = {
def asOption[T](array:Option[Array[T]]):Option[T] = array.map(_.view.apply(index))
def asNull[T <: AnyRef](array:Option[Array[T]]):T = asOption(array).getOrElse(null.asInstanceOf[T])
Token(
word = sent.words(index),
offsets = CharOffsets(sent.startOffsets(index),sent.endOffsets(index)),
posTag = asNull(sent.tags),
lemma = asNull(sent.lemmas)
)
}
def toFullWolfeSentence(sent: SISTASent): Sentence = {
val tokens = for (i <- 0 until sent.size) yield toWolfeToken(i, sent)
val ctree = toWolfeConstituentTree(sent)
val dtree = toWolfeDependencyTree(sent)
val entities = toWolfeEntities(sent)
Sentence(tokens,
ie = new IEAnnotation(entityMentions = Some(entities)),
syntax = new SyntaxAnnotation(constituency = Some(ctree), dependencies = Some(dtree)))
}
def sistaToWolfeDocument(doc: SISTADocument, text: String = ""): Document = {
val sentences = doc.sentences map toFullWolfeSentence
val corefSeq = doc.coreferenceChains.map(toWolfeCoreference(_).toArray)
Document(text, sentences, coref = corefSeq.map(CorefAnnotation(_)).getOrElse(CorefAnnotation.empty))
}
def toWolfeConstituentTree(sent: SISTASent): ConstituentTree = {
sent.syntacticTree match {
case Some(tree) => treeToTree(tree)
case _=> ConstituentTree.empty
}
}
def toWolfeDependencyTree(sent: SISTASent): DependencyTree = {
sent.dependencies match {
case Some(dependencies) => {
val tokens = for (i <- 0 until sent.size) yield toWolfeToken(i, sent)
new DependencyTree(tokens, sent.dependencies.get.outgoingEdges.zipWithIndex.flatMap {
case(x, i) => x.map { y => Arc(i, y._1, Some(y._2)) }
})
}
case None => DependencyTree.empty
}
}
def treeToTree(tree: SistaTree, leftMost: Int = 0): ConstituentTree = {
if (tree.isPreTerminal) {
new ConstituentTree(new PreterminalNode(start = leftMost, end = leftMost + 1, label = tree.value, word = tree.children.get.head.value))
}
else {
var tmpLeftMost = leftMost
val children = tree.children.get.map { t =>
val child = treeToTree(t, leftMost = tmpLeftMost)
tmpLeftMost = child.end
child
}
val rightMost = children.last.end
new ConstituentTree(new NonterminalNode(start = leftMost, end = rightMost, label = tree.value),
children = children.toList)
}
}
def toWolfeEntities(sent: SISTASent): IndexedSeq[EntityMention] = {
sent.entities match {
case Some(entities) => {
var lastIndex = -1
var lastSymbol = "O"
val stored = new ArrayBuffer[EntityMention]
entities.zipWithIndex.foreach { case(label, idx) =>
if (idx == 0 && label != "O") lastIndex = 0
else if (label != lastSymbol && lastSymbol != "O") {
stored += new EntityMention(lastSymbol, lastIndex, idx)
if (label != "O") lastIndex = idx
}
else if (label != lastSymbol && lastSymbol == "O") {
lastIndex = idx
}
else if (label != "O" && idx+1 == entities.size) {
stored += new EntityMention(label, lastIndex, idx)
}
lastSymbol = label
}
stored.toIndexedSeq
}
case _ => IndexedSeq()
}
}
}
| wolfe-pack/wolfe | wolfe-nlp/src/main/scala/ml/wolfe/nlp/converters/SISTAConverter.scala | Scala | apache-2.0 | 4,371 |
package de.htwg.zeta.generatorControl.start
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
import scala.concurrent.duration.FiniteDuration
import scala.language.implicitConversions
import akka.actor.ActorSystem
import akka.actor.PoisonPill
import akka.actor.Props
import akka.cluster.singleton.ClusterSingletonManager
import akka.cluster.singleton.ClusterSingletonManagerSettings
import akka.persistence.journal.leveldb.SharedLeveldbStore
import de.htwg.zeta.common.cluster.ClusterManager
import de.htwg.zeta.generatorControl.actors.master.Master
import org.rogach.scallop.ScallopOption
import play.api.libs.ws.ahc.AhcWSClient
/**
*/
class MasterStarter(config: MasterConfig) extends Starter {
def start(): Unit = {
debug(MasterStarter.LogStart.format(config.toString))
if (config.num == 2) {
Thread.sleep(MasterStarter.MilliSecToWaitForFirstMaster)
}
val system = createActor(config)
startSharedLevelDbOnFirstMaster(config, system)
val journalAddress = ClusterManager.getJournalPath(config.port, config.seeds)
debug(MasterStarter.LogJournalAddress.format(journalAddress.toString))
setSharedJournal(system, journalAddress)
}
private def createActor(config: MasterConfig) = {
implicit val system = createActorSystem(MasterStarter.ActorRole, config.seeds, config.port)
//implicit val mat = ActorMaterializer()
implicit val client = AhcWSClient()
system.actorOf(
ClusterSingletonManager.props(
Master.props(MasterStarter.WorkerTimeout, MasterStarter.SessionDuration),
PoisonPill,
ClusterSingletonManagerSettings(system).withRole(MasterStarter.ActorRole)
),
MasterStarter.ActorName
)
system
}
private def startSharedLevelDbOnFirstMaster(config: MasterConfig, system: ActorSystem) = {
if (config.seeds.isEmpty) {
system.actorOf(Props[SharedLeveldbStore], MasterStarter.ActorSharedLevelDbStore)
}
}
}
object MasterStarter {
val ActorName = "master"
val ActorRole = "backend"
val ActorSharedLevelDbStore = "store"
val LogJournalAddress = "Journal Address : %s"
val LogStart = "Start master actor: %s"
val MilliSecToWaitForFirstMaster = 10000
/**
* This time specify how long the session is, to access the database in a docker container (to execute generator, filter, etc..)
* Note: This time should be longer as the workTimeout, because if workTimeout was reached, the system should be
* able to store the log of the docker container
*/
val SessionDuration: FiniteDuration = Starter.WorkTimeout.plus(Duration(2, TimeUnit.MINUTES))
/**
* The time after which a worker will be marked as unreachable
*/
val WorkerTimeout: FiniteDuration = Duration(5, TimeUnit.MINUTES)
def apply(cmd: Commands): Option[MasterStarter] = {
val config: ScallopOption[MasterConfig] = for {
port <- cmd.masterPort
seeds <- cmd.masterSeeds
num <- cmd.masterNum
} yield {
MasterConfig(port, seeds, num)
}
config.toOption.map(new MasterStarter(_))
}
}
case class MasterConfig(port: Int, seeds: List[String], num: Int)
| Zeta-Project/zeta | api/generatorControl/src/main/scala/de/htwg/zeta/generatorControl/start/MasterStarter.scala | Scala | bsd-2-clause | 3,145 |
package org.bitcoins.crypto
import org.bitcoins.protocol.script.{P2SHScriptSignature, P2SHScriptPubKey, ScriptPubKey}
import org.bitcoins.protocol.transaction.Transaction
import org.bitcoins.script.flag.ScriptFlag
/**
* Created by chris on 4/6/16.
*/
trait TransactionSignatureComponentFactory {
private sealed case class TransactionSignatureComponentImpl(transaction : Transaction, inputIndex : Int,
scriptPubKey : ScriptPubKey, flags : Seq[ScriptFlag]) extends TransactionSignatureComponent
def factory(transaction : Transaction, inputIndex : Int, scriptPubKey : ScriptPubKey,
flags : Seq[ScriptFlag]) : TransactionSignatureComponent = {
TransactionSignatureComponentImpl(transaction,inputIndex,scriptPubKey, flags)
}
/**
* This factory method is used for changing the scriptPubKey inside of a txSignatureComponent
* @param oldTxSignatureComponent
* @param scriptPubKey
* @return
*/
def factory(oldTxSignatureComponent : TransactionSignatureComponent, scriptPubKey : ScriptPubKey) : TransactionSignatureComponent = {
TransactionSignatureComponentImpl(oldTxSignatureComponent.transaction,
oldTxSignatureComponent.inputIndex,scriptPubKey, oldTxSignatureComponent.flags)
}
}
object TransactionSignatureComponentFactory extends TransactionSignatureComponentFactory | Christewart/scalacoin | src/main/scala/org/bitcoins/crypto/TransactionSignatureComponentFactory.scala | Scala | mit | 1,331 |
/*
* Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.forclift.bugs
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.Matchers
import org.scalatest.Spec
import edu.ucla.cs.starai.forclift._
import edu.ucla.cs.starai.forclift.examples.models._
import edu.ucla.cs.starai.forclift.inference.AllMarginalsExact
@RunWith(classOf[JUnitRunner])
class TestBug21 extends ModelBehaviours {
describe("Bug21Model subdomains") {
// val correctLogWMC = math.log(math.pow(math.pow(2,n)-1,n)) +- 0.00001
def model = new MLNModel {
def theoryString = s"""
person = {male, female}
male = 2 {}
female = 3 {}
friends(person,person)
friends(x,y), x in male, y in female.
1 friends(x,y)
"""
}
it("An exception should be thrown when trying to compute all marginals") {
intercept[IllegalArgumentException] {
val allmarginals = new AllMarginalsExact(true)
allmarginals.computeAllMarginals(model.theory)
}
}
}
}
| UCLA-StarAI/Forclift | src/test/scala/edu/ucla/cs/starai/forclift/bugs/TestBug21.scala | Scala | apache-2.0 | 1,606 |
class InputScl4081 {
def foo {
val validations = List(0)
val bar = 1
val as = /*start*/validations.size + bar/*end*/
as
}
}
/*
class InputScl4081 {
def foo {
val validations = List(0)
val bar = 1
val as = /*start*/testMethodName(bar, validations)/*end*/
as
}
def testMethodName(bar: Int, validations: List[Int]): Int = {
validations.size + bar
}
}
*/ | LPTK/intellij-scala | testdata/extractMethod/input/InputScl4081.scala | Scala | apache-2.0 | 399 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import java.util.Properties
import scala.collection.mutable.ListBuffer
import org.json4s.jackson.JsonMethods._
import org.scalatest.BeforeAndAfter
import org.apache.spark._
import org.apache.spark.LocalSparkContext._
import org.apache.spark.executor.ExecutorMetrics
import org.apache.spark.internal.config
import org.apache.spark.internal.config.Status._
import org.apache.spark.rdd.RDD
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.scheduler._
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.connector.{CustomMetric, CustomTaskMetric, RangeInputPartition, SimpleScanBuilder}
import org.apache.spark.sql.connector.read.{InputPartition, PartitionReader, PartitionReaderFactory}
import org.apache.spark.sql.execution.{LeafExecNode, QueryExecution, SparkPlanInfo, SQLExecution}
import org.apache.spark.sql.execution.adaptive.DisableAdaptiveExecution
import org.apache.spark.sql.execution.datasources.v2.BatchScanExec
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.functions.count
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.UI_RETAINED_EXECUTIONS
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.StructType
import org.apache.spark.status.ElementTrackingStore
import org.apache.spark.util.{AccumulatorMetadata, JsonProtocol, LongAccumulator}
import org.apache.spark.util.kvstore.InMemoryStore
class SQLAppStatusListenerSuite extends SharedSparkSession with JsonTestUtils
with BeforeAndAfter {
import testImplicits._
override protected def sparkConf = {
super.sparkConf.set(LIVE_ENTITY_UPDATE_PERIOD, 0L).set(ASYNC_TRACKING_ENABLED, false)
}
private var kvstore: ElementTrackingStore = _
after {
if (kvstore != null) {
kvstore.close()
kvstore = null
}
}
private def createTestDataFrame: DataFrame = {
Seq(
(1, 1),
(2, 2)
).toDF().filter("_1 > 1")
}
private def createProperties(executionId: Long): Properties = {
val properties = new Properties()
properties.setProperty(SQLExecution.EXECUTION_ID_KEY, executionId.toString)
properties
}
private def createStageInfo(stageId: Int, attemptId: Int): StageInfo = {
new StageInfo(stageId = stageId,
attemptId = attemptId,
numTasks = 8,
// The following fields are not used in tests
name = "",
rddInfos = Nil,
parentIds = Nil,
details = "",
resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID)
}
private def createTaskInfo(
taskId: Int,
attemptNumber: Int,
accums: Map[Long, Long] = Map.empty): TaskInfo = {
val info = new TaskInfo(
taskId = taskId,
attemptNumber = attemptNumber,
index = taskId.toInt,
// The following fields are not used in tests
launchTime = 0,
executorId = "",
host = "",
taskLocality = null,
speculative = false)
info.markFinished(TaskState.FINISHED, 1L)
info.setAccumulables(createAccumulatorInfos(accums))
info
}
private def createAccumulatorInfos(accumulatorUpdates: Map[Long, Long]): Seq[AccumulableInfo] = {
accumulatorUpdates.map { case (id, value) =>
val acc = new LongAccumulator
acc.metadata = AccumulatorMetadata(id, None, false)
acc.toInfo(Some(value), None)
}.toSeq
}
private def assertJobs(
exec: Option[SQLExecutionUIData],
running: Seq[Int] = Nil,
completed: Seq[Int] = Nil,
failed: Seq[Int] = Nil): Unit = {
val actualRunning = new ListBuffer[Int]()
val actualCompleted = new ListBuffer[Int]()
val actualFailed = new ListBuffer[Int]()
exec.get.jobs.foreach { case (jobId, jobStatus) =>
jobStatus match {
case JobExecutionStatus.RUNNING => actualRunning += jobId
case JobExecutionStatus.SUCCEEDED => actualCompleted += jobId
case JobExecutionStatus.FAILED => actualFailed += jobId
case _ => fail(s"Unexpected status $jobStatus")
}
}
assert(actualRunning.sorted === running)
assert(actualCompleted.sorted === completed)
assert(actualFailed.sorted === failed)
}
private def createStatusStore(): SQLAppStatusStore = {
val conf = sparkContext.conf
kvstore = new ElementTrackingStore(new InMemoryStore, conf)
val listener = new SQLAppStatusListener(conf, kvstore, live = true)
new SQLAppStatusStore(kvstore, Some(listener))
}
test("basic") {
def checkAnswer(actual: Map[Long, String], expected: Map[Long, Long]): Unit = {
assert(actual.size == expected.size)
expected.foreach { case (id, value) =>
// The values in actual can be SQL metrics meaning that they contain additional formatting
// when converted to string. Verify that they start with the expected value.
assert(actual.contains(id))
val v = actual(id).trim
if (v.contains("\\n")) {
// The actual value can be "total (max, ...)\\n6 ms (5 ms, ...)".
assert(v.split("\\n")(1).startsWith(value.toString), s"Wrong value for accumulator $id")
} else {
assert(v.startsWith(value.toString), s"Wrong value for accumulator $id")
}
}
}
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
val accumulatorIds =
SparkPlanGraph(SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan))
.allNodes.flatMap(_.metrics.map(_.accumulatorId))
// Assume all accumulators are long
var accumulatorValue = 0L
val accumulatorUpdates = accumulatorIds.map { id =>
accumulatorValue += 1L
(id, accumulatorValue)
}.toMap
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
System.currentTimeMillis()))
listener.onJobStart(SparkListenerJobStart(
jobId = 0,
time = System.currentTimeMillis(),
stageInfos = Seq(
createStageInfo(0, 0),
createStageInfo(1, 0)
),
createProperties(executionId)))
listener.onStageSubmitted(SparkListenerStageSubmitted(createStageInfo(0, 0)))
listener.onTaskStart(SparkListenerTaskStart(0, 0, createTaskInfo(0, 0)))
listener.onTaskStart(SparkListenerTaskStart(0, 0, createTaskInfo(1, 0)))
assert(statusStore.executionMetrics(executionId).isEmpty)
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate("", Seq(
// (task id, stage id, stage attempt, accum updates)
(0L, 0, 0, createAccumulatorInfos(accumulatorUpdates)),
(1L, 0, 0, createAccumulatorInfos(accumulatorUpdates))
)))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 2).toMap)
// Driver accumulator updates don't belong to this execution should be filtered and no
// exception will be thrown.
listener.onOtherEvent(SparkListenerDriverAccumUpdates(0, Seq((999L, 2L))))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 2).toMap)
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate("", Seq(
// (task id, stage id, stage attempt, accum updates)
(0L, 0, 0, createAccumulatorInfos(accumulatorUpdates)),
(1L, 0, 0, createAccumulatorInfos(accumulatorUpdates.mapValues(_ * 2).toMap))
)))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 3).toMap)
// Retrying a stage should reset the metrics
listener.onStageSubmitted(SparkListenerStageSubmitted(createStageInfo(0, 1)))
listener.onTaskStart(SparkListenerTaskStart(0, 1, createTaskInfo(0, 0)))
listener.onTaskStart(SparkListenerTaskStart(0, 1, createTaskInfo(1, 0)))
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate("", Seq(
// (task id, stage id, stage attempt, accum updates)
(0L, 0, 1, createAccumulatorInfos(accumulatorUpdates)),
(1L, 0, 1, createAccumulatorInfos(accumulatorUpdates))
)))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 2).toMap)
// Ignore the task end for the first attempt
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 0,
stageAttemptId = 0,
taskType = "",
reason = null,
createTaskInfo(0, 0, accums = accumulatorUpdates.mapValues(_ * 100).toMap),
new ExecutorMetrics,
null))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 2).toMap)
// Finish two tasks
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 0,
stageAttemptId = 1,
taskType = "",
reason = null,
createTaskInfo(0, 0, accums = accumulatorUpdates.mapValues(_ * 2).toMap),
new ExecutorMetrics,
null))
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 0,
stageAttemptId = 1,
taskType = "",
reason = null,
createTaskInfo(1, 0, accums = accumulatorUpdates.mapValues(_ * 3).toMap),
new ExecutorMetrics,
null))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 5).toMap)
// Summit a new stage
listener.onStageSubmitted(SparkListenerStageSubmitted(createStageInfo(1, 0)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, createTaskInfo(0, 0)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, createTaskInfo(1, 0)))
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate("", Seq(
// (task id, stage id, stage attempt, accum updates)
(0L, 1, 0, createAccumulatorInfos(accumulatorUpdates)),
(1L, 1, 0, createAccumulatorInfos(accumulatorUpdates))
)))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 7).toMap)
// Finish two tasks
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 1,
stageAttemptId = 0,
taskType = "",
reason = null,
createTaskInfo(0, 0, accums = accumulatorUpdates.mapValues(_ * 3).toMap),
new ExecutorMetrics,
null))
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 1,
stageAttemptId = 0,
taskType = "",
reason = null,
createTaskInfo(1, 0, accums = accumulatorUpdates.mapValues(_ * 3).toMap),
new ExecutorMetrics,
null))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 11).toMap)
assertJobs(statusStore.execution(executionId), running = Seq(0))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 0,
time = System.currentTimeMillis(),
JobSucceeded
))
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
assertJobs(statusStore.execution(executionId), completed = Seq(0))
checkAnswer(statusStore.executionMetrics(executionId),
accumulatorUpdates.mapValues(_ * 11).toMap)
}
test("control a plan explain mode in listeners via SQLConf") {
def checkPlanDescription(mode: String, expected: Seq[String]): Unit = {
var checkDone = false
val listener = new SparkListener {
override def onOtherEvent(event: SparkListenerEvent): Unit = {
event match {
case SparkListenerSQLExecutionStart(_, _, _, planDescription, _, _) =>
assert(expected.forall(planDescription.contains))
checkDone = true
case _ => // ignore other events
}
}
}
spark.sparkContext.addSparkListener(listener)
withSQLConf(SQLConf.UI_EXPLAIN_MODE.key -> mode) {
createTestDataFrame.collect()
try {
spark.sparkContext.listenerBus.waitUntilEmpty()
assert(checkDone)
} finally {
spark.sparkContext.removeSparkListener(listener)
}
}
}
Seq(("simple", Seq("== Physical Plan ==")),
("extended", Seq("== Parsed Logical Plan ==", "== Analyzed Logical Plan ==",
"== Optimized Logical Plan ==", "== Physical Plan ==")),
("codegen", Seq("WholeStageCodegen subtrees")),
("cost", Seq("== Optimized Logical Plan ==", "Statistics(sizeInBytes")),
("formatted", Seq("== Physical Plan ==", "Output", "Arguments"))).foreach {
case (mode, expected) =>
checkPlanDescription(mode, expected)
}
}
test("onExecutionEnd happens before onJobEnd(JobSucceeded)") {
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
System.currentTimeMillis()))
listener.onJobStart(SparkListenerJobStart(
jobId = 0,
time = System.currentTimeMillis(),
stageInfos = Nil,
createProperties(executionId)))
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 0,
time = System.currentTimeMillis(),
JobSucceeded
))
assertJobs(statusStore.execution(executionId), completed = Seq(0))
}
test("onExecutionEnd happens before multiple onJobEnd(JobSucceeded)s") {
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
System.currentTimeMillis()))
listener.onJobStart(SparkListenerJobStart(
jobId = 0,
time = System.currentTimeMillis(),
stageInfos = Nil,
createProperties(executionId)))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 0,
time = System.currentTimeMillis(),
JobSucceeded
))
listener.onJobStart(SparkListenerJobStart(
jobId = 1,
time = System.currentTimeMillis(),
stageInfos = Nil,
createProperties(executionId)))
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 1,
time = System.currentTimeMillis(),
JobSucceeded
))
assertJobs(statusStore.execution(executionId), completed = Seq(0, 1))
}
test("onExecutionEnd happens before onJobEnd(JobFailed)") {
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
System.currentTimeMillis()))
listener.onJobStart(SparkListenerJobStart(
jobId = 0,
time = System.currentTimeMillis(),
stageInfos = Seq.empty,
createProperties(executionId)))
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 0,
time = System.currentTimeMillis(),
JobFailed(new RuntimeException("Oops"))
))
assertJobs(statusStore.execution(executionId), failed = Seq(0))
}
test("onJobStart happens after onExecutionEnd shouldn't overwrite kvstore") {
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
System.currentTimeMillis()))
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
listener.onJobStart(SparkListenerJobStart(
jobId = 0,
time = System.currentTimeMillis(),
stageInfos = Seq(createStageInfo(0, 0)),
createProperties(executionId)))
listener.onStageSubmitted(SparkListenerStageSubmitted(createStageInfo(0, 0)))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 0,
time = System.currentTimeMillis(),
JobFailed(new RuntimeException("Oops"))))
assert(listener.noLiveData())
assert(statusStore.execution(executionId).get.completionTime.nonEmpty)
}
test("handle one execution with multiple jobs") {
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
System.currentTimeMillis()))
var stageId = 0
def twoStageJob(jobId: Int): Unit = {
val stages = Seq(stageId, stageId + 1).map { id => createStageInfo(id, 0)}
stageId += 2
listener.onJobStart(SparkListenerJobStart(
jobId = jobId,
time = System.currentTimeMillis(),
stageInfos = stages,
createProperties(executionId)))
stages.foreach { s =>
listener.onStageSubmitted(SparkListenerStageSubmitted(s))
listener.onStageCompleted(SparkListenerStageCompleted(s))
}
listener.onJobEnd(SparkListenerJobEnd(
jobId = jobId,
time = System.currentTimeMillis(),
JobSucceeded
))
}
// submit two jobs with the same executionId
twoStageJob(0)
twoStageJob(1)
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
assertJobs(statusStore.execution(0), completed = 0 to 1)
assert(statusStore.execution(0).get.stages === (0 to 3).toSet)
}
test("SPARK-11126: no memory leak when running non SQL jobs") {
val listener = spark.sharedState.statusStore.listener.get
// At the beginning of this test case, there should be no live data in the listener.
assert(listener.noLiveData())
spark.sparkContext.parallelize(1 to 10).foreach(i => ())
spark.sparkContext.listenerBus.waitUntilEmpty()
// Listener should ignore the non-SQL stages, as the stage data are only removed when SQL
// execution ends, which will not be triggered for non-SQL jobs.
assert(listener.noLiveData())
}
test("driver side SQL metrics") {
val statusStore = spark.sharedState.statusStore
val oldCount = statusStore.executionsList().size
val expectedAccumValue = 12345L
val expectedAccumValue2 = 54321L
val physicalPlan = MyPlan(sqlContext.sparkContext, expectedAccumValue, expectedAccumValue2)
val dummyQueryExecution = new QueryExecution(spark, LocalRelation()) {
override lazy val sparkPlan = physicalPlan
override lazy val executedPlan = physicalPlan
}
SQLExecution.withNewExecutionId(dummyQueryExecution) {
physicalPlan.execute().collect()
}
// Wait until the new execution is started and being tracked.
while (statusStore.executionsCount() < oldCount) {
Thread.sleep(100)
}
// Wait for listener to finish computing the metrics for the execution.
while (statusStore.executionsList().isEmpty ||
statusStore.executionsList().last.metricValues == null) {
Thread.sleep(100)
}
val execId = statusStore.executionsList().last.executionId
val metrics = statusStore.executionMetrics(execId)
val driverMetric = physicalPlan.metrics("dummy")
val driverMetric2 = physicalPlan.metrics("dummy2")
val expectedValue = SQLMetrics.stringValue(driverMetric.metricType,
Array(expectedAccumValue), Array.empty[Long])
val expectedValue2 = SQLMetrics.stringValue(driverMetric2.metricType,
Array(expectedAccumValue2), Array.empty[Long])
assert(metrics.contains(driverMetric.id))
assert(metrics(driverMetric.id) === expectedValue)
assert(metrics.contains(driverMetric2.id))
assert(metrics(driverMetric2.id) === expectedValue2)
}
test("roundtripping SparkListenerDriverAccumUpdates through JsonProtocol (SPARK-18462)") {
val event = SparkListenerDriverAccumUpdates(1L, Seq((2L, 3L)))
val json = JsonProtocol.sparkEventToJson(event)
assertValidDataInJson(json,
parse("""
|{
| "Event": "org.apache.spark.sql.execution.ui.SparkListenerDriverAccumUpdates",
| "executionId": 1,
| "accumUpdates": [[2,3]]
|}
""".stripMargin))
JsonProtocol.sparkEventFromJson(json) match {
case SparkListenerDriverAccumUpdates(executionId, accums) =>
assert(executionId == 1L)
accums.foreach { case (a, b) =>
assert(a == 2L)
assert(b == 3L)
}
}
// Test a case where the numbers in the JSON can only fit in longs:
val longJson = parse(
"""
|{
| "Event": "org.apache.spark.sql.execution.ui.SparkListenerDriverAccumUpdates",
| "executionId": 4294967294,
| "accumUpdates": [[4294967294,3]]
|}
""".stripMargin)
JsonProtocol.sparkEventFromJson(longJson) match {
case SparkListenerDriverAccumUpdates(executionId, accums) =>
assert(executionId == 4294967294L)
accums.foreach { case (a, b) =>
assert(a == 4294967294L)
assert(b == 3L)
}
}
}
test("eviction should respect execution completion time") {
val conf = sparkContext.conf.clone().set(UI_RETAINED_EXECUTIONS.key, "2")
kvstore = new ElementTrackingStore(new InMemoryStore, conf)
val listener = new SQLAppStatusListener(conf, kvstore, live = true)
val statusStore = new SQLAppStatusStore(kvstore, Some(listener))
var time = 0
val df = createTestDataFrame
// Start execution 1 and execution 2
time += 1
listener.onOtherEvent(SparkListenerSQLExecutionStart(
1,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
time))
time += 1
listener.onOtherEvent(SparkListenerSQLExecutionStart(
2,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
time))
// Stop execution 2 before execution 1
time += 1
listener.onOtherEvent(SparkListenerSQLExecutionEnd(2, time))
time += 1
listener.onOtherEvent(SparkListenerSQLExecutionEnd(1, time))
// Start execution 3 and execution 2 should be evicted.
time += 1
listener.onOtherEvent(SparkListenerSQLExecutionStart(
3,
"test",
"test",
df.queryExecution.toString,
SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan),
time))
assert(statusStore.executionsCount === 2)
assert(statusStore.execution(2) === None)
}
test("SPARK-29894 test Codegen Stage Id in SparkPlanInfo",
DisableAdaptiveExecution("WSCG rule is applied later in AQE")) {
// with AQE on, the WholeStageCodegen rule is applied when running QueryStageExec.
val df = createTestDataFrame.select(count("*"))
val sparkPlanInfo = SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan)
assert(sparkPlanInfo.nodeName === "WholeStageCodegen (2)")
}
test("SPARK-32615,SPARK-33016: SQLMetrics validation after sparkPlanInfo updated in AQE") {
val statusStore = createStatusStore()
val listener = statusStore.listener.get
val executionId = 0
val df = createTestDataFrame
// oldPlan SQLMetrics
// SQLPlanMetric(duration,0,timing)
// SQLPlanMetric(number of output rows,1,sum)
// SQLPlanMetric(number of output rows,2,sum)
val oldPlan = SparkPlanInfo.fromSparkPlan(df.queryExecution.executedPlan)
val oldAccumulatorIds =
SparkPlanGraph(oldPlan)
.allNodes.flatMap(_.metrics.map(_.accumulatorId))
listener.onOtherEvent(SparkListenerSQLExecutionStart(
executionId,
"test",
"test",
df.queryExecution.toString,
oldPlan,
System.currentTimeMillis()))
listener.onJobStart(SparkListenerJobStart(
jobId = 0,
time = System.currentTimeMillis(),
stageInfos = Seq(createStageInfo(0, 0)),
createProperties(executionId)))
listener.onStageSubmitted(SparkListenerStageSubmitted(createStageInfo(0, 0)))
listener.onTaskStart(SparkListenerTaskStart(0, 0, createTaskInfo(0, 0)))
assert(statusStore.executionMetrics(executionId).isEmpty)
// update old metrics with Id 1 & 2, since 0 is timing metrics,
// timing metrics has a complicated string presentation so we don't test it here.
val oldMetricsValueMap = oldAccumulatorIds.sorted.tail.map(id => (id, 100L)).toMap
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate("", Seq(
(0L, 0, 0, createAccumulatorInfos(oldMetricsValueMap))
)))
assert(statusStore.executionMetrics(executionId).size == 2)
statusStore.executionMetrics(executionId).foreach { m =>
assert(m._2 == "100")
}
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 0,
stageAttemptId = 0,
taskType = "",
reason = null,
createTaskInfo(0, 0),
new ExecutorMetrics,
null))
listener.onStageCompleted(SparkListenerStageCompleted(createStageInfo(0, 0)))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 0,
time = System.currentTimeMillis(),
JobSucceeded
))
val df2 = createTestDataFrame.filter("_2 > 2")
// newPlan SQLMetrics
// SQLPlanMetric(duration,3,timing)
// SQLPlanMetric(number of output rows,4,sum)
// SQLPlanMetric(number of output rows,5,sum)
val newPlan = SparkPlanInfo.fromSparkPlan(df2.queryExecution.executedPlan)
val newAccumulatorIds =
SparkPlanGraph(newPlan)
.allNodes.flatMap(_.metrics.map(_.accumulatorId))
// Assume that AQE update sparkPlanInfo with newPlan
// ExecutionMetrics will be appended using newPlan's SQLMetrics
listener.onOtherEvent(SparkListenerSQLAdaptiveExecutionUpdate(
executionId,
"test",
newPlan))
listener.onJobStart(SparkListenerJobStart(
jobId = 1,
time = System.currentTimeMillis(),
stageInfos = Seq(createStageInfo(1, 0)),
createProperties(executionId)))
listener.onStageSubmitted(SparkListenerStageSubmitted(createStageInfo(1, 0)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, createTaskInfo(0, 0)))
// historical metrics will be kept despite of the newPlan updated.
assert(statusStore.executionMetrics(executionId).size == 2)
// update new metrics with Id 4 & 5, since 3 is timing metrics,
// timing metrics has a complicated string presentation so we don't test it here.
val newMetricsValueMap = newAccumulatorIds.sorted.tail.map(id => (id, 500L)).toMap
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate("", Seq(
(0L, 1, 0, createAccumulatorInfos(newMetricsValueMap))
)))
assert(statusStore.executionMetrics(executionId).size == 4)
statusStore.executionMetrics(executionId).foreach { m =>
assert(m._2 == "100" || m._2 == "500")
}
listener.onTaskEnd(SparkListenerTaskEnd(
stageId = 1,
stageAttemptId = 0,
taskType = "",
reason = null,
createTaskInfo(0, 0),
new ExecutorMetrics,
null))
listener.onStageCompleted(SparkListenerStageCompleted(createStageInfo(1, 0)))
listener.onJobEnd(SparkListenerJobEnd(
jobId = 1,
time = System.currentTimeMillis(),
JobSucceeded
))
// aggregateMetrics should contains all metrics from job 0 and job 1
val aggregateMetrics = listener.liveExecutionMetrics(executionId)
if (aggregateMetrics.isDefined) {
assert(aggregateMetrics.get.keySet.size == 4)
}
listener.onOtherEvent(SparkListenerSQLExecutionEnd(
executionId, System.currentTimeMillis()))
}
test("SPARK-34338: Report metrics from Datasource v2 scan") {
val statusStore = spark.sharedState.statusStore
val oldCount = statusStore.executionsList().size
val schema = new StructType().add("i", "int").add("j", "int")
val physicalPlan = BatchScanExec(schema.toAttributes, new CustomMetricScanBuilder())
val dummyQueryExecution = new QueryExecution(spark, LocalRelation()) {
override lazy val sparkPlan = physicalPlan
override lazy val executedPlan = physicalPlan
}
SQLExecution.withNewExecutionId(dummyQueryExecution) {
physicalPlan.execute().collect()
}
// Wait until the new execution is started and being tracked.
while (statusStore.executionsCount() < oldCount) {
Thread.sleep(100)
}
// Wait for listener to finish computing the metrics for the execution.
while (statusStore.executionsList().isEmpty ||
statusStore.executionsList().last.metricValues == null) {
Thread.sleep(100)
}
val execId = statusStore.executionsList().last.executionId
val metrics = statusStore.executionMetrics(execId)
val expectedMetric = physicalPlan.metrics("custom_metric")
val expectedValue = "custom_metric: 12345, 12345"
assert(metrics.contains(expectedMetric.id))
assert(metrics(expectedMetric.id) === expectedValue)
}
}
/**
* A dummy [[org.apache.spark.sql.execution.SparkPlan]] that updates a [[SQLMetrics]]
* on the driver.
*/
private case class MyPlan(sc: SparkContext, expectedValue: Long, expectedValue2: Long)
extends LeafExecNode {
override def sparkContext: SparkContext = sc
override def output: Seq[Attribute] = Seq()
override val metrics: Map[String, SQLMetric] = Map(
"dummy" -> SQLMetrics.createMetric(sc, "dummy"),
"dummy2" -> SQLMetrics.createMetric(sc, "dummy2"))
override def doExecute(): RDD[InternalRow] = {
longMetric("dummy") += expectedValue
longMetric("dummy2") += expectedValue2
// postDriverMetricUpdates may happen multiple time in a query.
// (normally from different operators, but for the sake of testing, from one operator)
SQLMetrics.postDriverMetricUpdates(
sc,
sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY),
Seq(metrics("dummy")))
SQLMetrics.postDriverMetricUpdates(
sc,
sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY),
Seq(metrics("dummy2")))
sc.emptyRDD
}
}
class SQLAppStatusListenerMemoryLeakSuite extends SparkFunSuite {
test("no memory leak") {
val conf = new SparkConf()
.setMaster("local")
.setAppName("test")
.set(config.TASK_MAX_FAILURES, 1) // Don't retry the tasks to run this test quickly
.set(UI_RETAINED_EXECUTIONS.key, "50") // Set it to 50 to run this test quickly
.set(ASYNC_TRACKING_ENABLED, false)
withSpark(new SparkContext(conf)) { sc =>
quietly {
val spark = new SparkSession(sc)
import spark.implicits._
// Run 100 successful executions and 100 failed executions.
// Each execution only has one job and one stage.
for (i <- 0 until 100) {
val df = Seq(
(1, 1),
(2, 2)
).toDF()
df.collect()
try {
df.foreach(_ => throw new RuntimeException("Oops"))
} catch {
case e: SparkException => // This is expected for a failed job
}
}
sc.listenerBus.waitUntilEmpty()
val statusStore = spark.sharedState.statusStore
assert(statusStore.executionsCount() <= 50)
assert(statusStore.planGraphCount() <= 50)
// No live data should be left behind after all executions end.
assert(statusStore.listener.get.noLiveData())
}
}
}
}
class SimpleCustomMetric extends CustomMetric {
override def name(): String = "custom_metric"
override def description(): String = "a simple custom metric"
override def aggregateTaskMetrics(taskMetrics: Array[Long]): String = {
s"custom_metric: ${taskMetrics.mkString(", ")}"
}
}
// The followings are for custom metrics of V2 data source.
object CustomMetricReaderFactory extends PartitionReaderFactory {
override def createReader(partition: InputPartition): PartitionReader[InternalRow] = {
val RangeInputPartition(start, end) = partition
new PartitionReader[InternalRow] {
private var current = start - 1
override def next(): Boolean = {
current += 1
current < end
}
override def get(): InternalRow = InternalRow(current, -current)
override def close(): Unit = {}
override def currentMetricsValues(): Array[CustomTaskMetric] = {
val metric = new CustomTaskMetric {
override def name(): String = "custom_metric"
override def value(): Long = 12345
}
Array(metric)
}
}
}
}
class CustomMetricScanBuilder extends SimpleScanBuilder {
override def planInputPartitions(): Array[InputPartition] = {
Array(RangeInputPartition(0, 5), RangeInputPartition(5, 10))
}
override def supportedCustomMetrics(): Array[CustomMetric] = {
Array(new SimpleCustomMetric)
}
override def createReaderFactory(): PartitionReaderFactory = CustomMetricReaderFactory
}
| BryanCutler/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala | Scala | apache-2.0 | 34,612 |
package component
import core._
import Roles._
import akka.actor.{Actor, ActorRefFactory, ActorLogging, ActorRef, Props}
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.headers.Accept
import akka.http.scaladsl.server.{Directive1, Route, RouteResult}
import akka.http.scaladsl.server.Directives._
import akka.pattern.ask
import akka.util.Timeout
import com.github.rjeschke.txtmark.Processor
import java.util.UUID
import org.joda.time.DateTime
import scala.concurrent.Future
import scala.concurrent.duration._
case class UseModel(modelActor: Option[ActorRef])
case class GetServiceRoute(optUser: Option[User])
case class GetServiceLinks(optUser: Option[User])
class BlogService(prefix: String) extends Actor with ActorLogging {
import scala.concurrent.ExecutionContext.Implicits.global
implicit val timeout = Timeout(3.seconds)
def receive = {
case UseModel(Some(modelActor)) => context.become(process(modelActor))
case msg => log.warning("Unknown message: {}", msg)
}
def process(modelActor: ActorRef): Receive = {
case GetServiceRoute(optUser) =>
sender ! BlogDirectives.blogService(prefix, modelActor ? _)(optUser)
case UseModel(None) => context.become(receive)
case msg => log.warning("Unknown message: {}", msg)
}
}
object BlogService {
def apply(prefix: String)(implicit factory: ActorRefFactory) =
factory.actorOf(Props(new BlogService(prefix)))
}
object BlogDirectives extends CommentDirectives
with CommonDirectives with BlogFormats with CommentFormats
{
import scala.concurrent.ExecutionContext.Implicits.global
val blogService = (prefix: String, modelFunction: Model.Function) =>
(optUser: Option[User]) => pathPrefix(prefix) {
handleBlogs(modelFunction)(optUser)
}
def handleBlogs(modelFunction: Model.Function)(optUser: Option[User]) = pathEnd {
val itemMethods = Right.mapActions(optUser, Map(
GET -> Authenticated, PUT -> RoleModifyAll, DELETE -> RoleDeleteAll))
val links = Right.mapActions(optUser, Map(
blogListLink("self") -> Everybody,
blogItemLink("item", methods = itemMethods) -> Authenticated,
blogItemLink("new", "new") -> RoleAddNew))
respondWithLinks(links:_*) {
headComplete ~
getList[Blog](modelFunction, Blog)()()
}
} ~
handleNewBlogs(optUser) ~
pathPrefix(Segment)(handleBlog(modelFunction)(optUser))
def handleNewBlogs(optUser: Option[User]) = path("new") {
get {
Right.checkRight(optUser, RoleAddNew) {
val uuid = UUID.randomUUID.toString
val accountId = optUser.flatMap(_.login).getOrElse("")
respondWithLinks(blogItemLink("self", uuid, methods = List(PUT))) {
complete(Blog(uuid, accountId, DateTime.now, "", ""))
}
}
}
}
def handleBlog(modelFunction: Model.Function)(optUser: Option[User])(blogId: String) =
pathEnd
{
getBlogDirective(modelFunction)(blogId) {
case Some(blog: Blog) => {
val isOwnBlog = CustomRight(
() => optUser.flatMap(_.login).getOrElse("") == blog.accountId)
val putRight = (RoleModifyAll or (RoleModifyOwn and isOwnBlog))
val deleteRight = (RoleDeleteAll or (RoleDeleteOwn and isOwnBlog))
val itemMethods = Right.mapActions(optUser, Map(
GET -> Authenticated,
PUT -> putRight,
DELETE -> deleteRight))
respondBlogLinks(blogId, itemMethods:_*) {
headComplete ~
Right.checkRight(optUser, Authenticated) {
get {
parameters('forEdit ? false) { (forEdit: Boolean) =>
ctx => (modelFunction(GetEntity(blogId))) flatMap {
case Some(entity: Blog) => if (forEdit) {
ctx.complete(entity)
} else {
val blog = entity.copy(note = Processor.process(entity.note))
ctx.complete(blog)
}
case None => ctx.reject()
}
}
}
//getEntity[Blog](modelBlog, blogId)
} ~
Right.checkRight(optUser, putRight) {
putEntity[Blog](modelFunction, _.copy(id = blogId), blogId)()
} ~
Right.checkRight(optUser, deleteRight) {
deleteEntity[Blog](modelFunction, blogId)()
}
}
}
case None =>
Right.checkRight(optUser, RoleAddNew) {
respondBlogLinks(blogId, PUT) {
headComplete ~
putEntity[Blog](modelFunction, _.copy(id = blogId), blogId)()
}
}
}
} ~
pathPrefix("comments") {
blogLinks(optUser) { handleComments(optUser, blogId) } ~
pathPrefix(Segment)(handleComment(blogId) _)
}
def respondBlogLinks(blogId: String, methods: HttpMethod*) = respondWithLinks(
blogListLink("blogs"),
blogItemLink("self", blogId, methods.toList),
blogItemLink("edit", blogId + "?forEdit=true", List(GET)),
commentListLink(blogId, "comments"),
commentItemLink(blogId, "new", "new", List(GET))
)
def getBlogDirective(modelFunction: Model.Function)(blogId: String):
Directive1[Option[Blog]] =
{
onSuccess(modelFunction(GetEntity(blogId))) flatMap {
case Some(blog: Blog) => provide(Some(blog))
case None => provide(None)
}
}
def blogListLink(rel: String, methods: List[HttpMethod] = List(GET)) =
collectionLink("/blogs", rel, "List Blogs",
"title accountId date:date", methods:_*)
def blogItemLink(rel: String, blogId: String = ":blogId",
methods: List[HttpMethod] = List(GET)) =
mtLink(s"/blogs/$blogId", rel, `application/vnd.enpassant.blog+json`,
methods:_*)
def blogLinks(optUser: Option[User]) = {
respondWithLinks(
collectionLink("/blogs", "blogs", "List Blogs",
"title accountId date:date", GET)
)
}
}
| enpassant/jeeves | src/main/scala/component/BlogDirectives.scala | Scala | apache-2.0 | 5,910 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package collection
/** View defined in terms of indexing a range */
trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] { self =>
override def view: IndexedSeqView[A] = this
@deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0")
override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until)
override def iterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewIterator(this)
override def reverseIterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewReverseIterator(this)
override def appended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Appended(this, elem)
override def prepended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Prepended(elem, this)
override def take(n: Int): IndexedSeqView[A] = new IndexedSeqView.Take(this, n)
override def takeRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.TakeRight(this, n)
override def drop(n: Int): IndexedSeqView[A] = new IndexedSeqView.Drop(this, n)
override def dropRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.DropRight(this, n)
override def map[B](f: A => B): IndexedSeqView[B] = new IndexedSeqView.Map(this, f)
override def reverse: IndexedSeqView[A] = new IndexedSeqView.Reverse(this)
override def slice(from: Int, until: Int): IndexedSeqView[A] = new IndexedSeqView.Slice(this, from, until)
override def tapEach[U](f: A => U): IndexedSeqView[A] = new IndexedSeqView.Map(this, { (a: A) => f(a); a})
def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix)
def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix)
def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(prefix, this)
override protected[this] def stringPrefix: String = "IndexedSeqView"
}
object IndexedSeqView {
@SerialVersionUID(3L)
private final class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable {
private[this] var current = 0
private[this] var remainder = self.size
override def knownSize: Int = remainder
def hasNext = remainder > 0
def next(): A =
if (hasNext) {
val r = self.apply(current)
current += 1
remainder -= 1
r
} else Iterator.empty.next()
override def drop(n: Int): Iterator[A] = {
if (n > 0) {
current += n
remainder = Math.max(0, remainder - n)
}
this
}
override protected def sliceIterator(from: Int, until: Int): Iterator[A] = {
def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value
val formatFrom = formatRange(from)
val formatUntil = formatRange(until)
remainder = Math.max(0, formatUntil - formatFrom)
current = current + formatFrom
this
}
}
@SerialVersionUID(3L)
private final class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable {
private[this] var pos = self.size - 1
private[this] var remainder = self.size
def hasNext: Boolean = remainder > 0
def next(): A =
if (pos < 0) throw new NoSuchElementException
else {
val r = self(pos)
pos -= 1
remainder -= 1
r
}
override def drop(n: Int): Iterator[A] = {
if (n > 0) {
pos -= n
remainder = Math.max(0, remainder - n)
}
this
}
override def sliceIterator(from: Int, until: Int): Iterator[A] = {
val startCutoff = pos
val untilCutoff = startCutoff - remainder + 1
val nextStartCutoff = if (from < 0) startCutoff else if (startCutoff - from < 0) 0 else startCutoff - from
val nextUntilCutoff = if (until < 0) startCutoff else if (startCutoff - until < untilCutoff) untilCutoff else startCutoff - until + 1
remainder = Math.max(0, nextStartCutoff - nextUntilCutoff + 1)
pos = nextStartCutoff
this
}
}
/** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */
type SomeIndexedSeqOps[A] = IndexedSeqOps[A, AnyConstr, _]
@SerialVersionUID(3L)
class Id[+A](underlying: SomeIndexedSeqOps[A])
extends SeqView.Id(underlying) with IndexedSeqView[A]
@SerialVersionUID(3L)
class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)
extends SeqView.Appended(underlying, elem) with IndexedSeqView[A]
@SerialVersionUID(3L)
class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])
extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A]
@SerialVersionUID(3L)
class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])
extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A]
@SerialVersionUID(3L)
class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)
extends SeqView.Take(underlying, n) with IndexedSeqView[A]
@SerialVersionUID(3L)
class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)
extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A]
@SerialVersionUID(3L)
class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)
extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A]
@SerialVersionUID(3L)
class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)
extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A]
@SerialVersionUID(3L)
class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)
extends SeqView.Map(underlying, f) with IndexedSeqView[B]
@SerialVersionUID(3L)
class Reverse[A](underlying: SomeIndexedSeqOps[A]) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] {
override def reverse: IndexedSeqView[A] = underlying match {
case x: IndexedSeqView[A] => x
case _ => super.reverse
}
}
@SerialVersionUID(3L)
class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int) extends AbstractIndexedSeqView[A] {
protected val lo = from max 0
protected val hi = (until max 0) min underlying.length
protected val len = (hi - lo) max 0
@throws[IndexOutOfBoundsException]
def apply(i: Int): A = underlying(lo + i)
def length: Int = len
}
}
/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */
@SerialVersionUID(3L)
abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A]
| martijnhoekstra/scala | src/library/scala/collection/IndexedSeqView.scala | Scala | apache-2.0 | 6,874 |
package compiler
import soot.tagkit._
import soot.jimple.toolkits.annotation.tags._
class FreshTag extends Tag {
var value = 0
override def getName = "FreshTag"
override def getValue =
Array[Byte](
((value >> 24) & 0xff).asInstanceOf[Byte],
((value >> 16) & 0xff).asInstanceOf[Byte],
((value >> 8) & 0xff).asInstanceOf[Byte],
((value ) & 0xff).asInstanceOf[Byte]
)
}
object BoxTag extends Tag with OneByteCodeTag {
override def getName = "BoxTag"
override def getValue = Array(0)
}
object UnboxTag extends Tag with OneByteCodeTag {
override def getName = "UnBoxTag"
override def getValue = Array(0)
} | JamesIry/SootPrototype | src/main/scala/compiler/Tags.scala | Scala | apache-2.0 | 657 |
package io.jvm.uuid
private[uuid] object RichUUID {
/** Upper-case hexadecimal translation lookup. */
private val UppercaseLookup: Array[Char] = "0123456789ABCDEF".toCharArray
/** Lower-case hexadecimal translation lookup. */
private val LowercaseLookup: Array[Char] = "0123456789abcdef".toCharArray
/** Oracle optimized toString in 9, no sense to compete with future versions */
private val UseNativeToString: Boolean =
try {
sys.props("java.vendor") == "Oracle Corporation" &&
sys.props("java.specification.version").toInt >= 9
} catch {
case _: Exception => false
}
/** Char buffer to be used by the optimized .string method */
private val charBuffer: ThreadLocal[Array[Char]] = new ThreadLocal[Array[Char]] {
override def initialValue(): Array[Char] = new Array[Char](36)
}
}
/** Pimp-my-library pattern, wrapping the underlying `java.util.UUID`.
*
* This class extends AnyVal, making all the extension methods have
* little-to-no runtime overhead.
*
* The pimp is complete through an implicit conversion in the
* [[Imports]] trait or the [[io.jvm.uuid.package uuid]] package object. */
final class RichUUID private[uuid](private val uuid: UUID) extends AnyVal with Ordered[UUID] {
/** Returns the most significant 64 bits of this `UUID`. */
@inline def mostSigBits: Long = uuid.getMostSignificantBits
/** Returns the least significant 64 bits of this `UUID`. */
@inline def leastSigBits: Long = uuid.getLeastSignificantBits
/** Encodes this `UUID` as a `Long` array with 2 elements. */
def longArray: Array[Long] = {
val buffer = new Array[Long](2)
toLongArray(buffer, 0)
buffer
}
/** Writes this `UUID` to the provided `Long` array. */
@inline def toLongArray(buffer: Array[Long], offset: Int): Unit = {
buffer(offset ) = uuid.getMostSignificantBits
buffer(offset + 1) = uuid.getLeastSignificantBits
}
/** Encodes this `UUID` as an `Int` array with 4 elements. */
def intArray: Array[Int] = {
val buffer = new Array[Int](4)
toIntArray(buffer, 0)
buffer
}
/** Writes this `UUID` to the provided `Int` array. */
@inline def toIntArray(buffer: Array[Int], offset: Int): Unit = {
val msb = uuid.getMostSignificantBits
buffer(offset ) = (msb >> 32).toInt
buffer(offset + 1) = msb.toInt
val lsb = uuid.getLeastSignificantBits
buffer(offset + 2) = (lsb >> 32).toInt
buffer(offset + 3) = lsb.toInt
}
/** Encodes this `UUID` as a `Short` array with 8 elements. */
def shortArray: Array[Short] = {
val buffer = new Array[Short](8)
toShortArray(buffer, 0)
buffer
}
/** Writes this `UUID` to the provided `Short` array. */
@inline def toShortArray(buffer: Array[Short], offset: Int): Unit = {
val msb = uuid.getMostSignificantBits
val msbh = (msb >> 32).toInt
buffer(offset ) = (msbh >> 16).toShort
buffer(offset + 1) = msbh.toShort
val msbl = msb.toInt
buffer(offset + 2) = (msbl >> 16).toShort
buffer(offset + 3) = msbl.toShort
val lsb = uuid.getLeastSignificantBits
val lsbh = (lsb >> 32).toInt
buffer(offset + 4) = (lsbh >> 16).toShort
buffer(offset + 5) = lsbh.toShort
val lsbl = lsb.toInt
buffer(offset + 6) = (lsbl >> 16).toShort
buffer(offset + 7) = lsbl.toShort
}
/** Encodes this `UUID` as a `Byte` array with 16 elements. */
def byteArray: Array[Byte] = {
val buffer = new Array[Byte](16)
toByteArray(buffer, 0)
buffer
}
/** Writes this `UUID` to the provided `Byte` array. */
@inline def toByteArray(buffer: Array[Byte], offset: Int): Unit = {
val msb = uuid.getMostSignificantBits
buffer(offset ) = (msb >>> 56).toByte
buffer(offset + 1) = (msb >>> 48).toByte
buffer(offset + 2) = (msb >>> 40).toByte
buffer(offset + 3) = (msb >>> 32).toByte
buffer(offset + 4) = (msb >>> 24).toByte
buffer(offset + 5) = (msb >>> 16).toByte
buffer(offset + 6) = (msb >>> 8).toByte
buffer(offset + 7) = (msb ).toByte
val lsb = uuid.getLeastSignificantBits
buffer(offset + 8) = (lsb >>> 56).toByte
buffer(offset + 9) = (lsb >>> 48).toByte
buffer(offset + 10) = (lsb >>> 40).toByte
buffer(offset + 11) = (lsb >>> 32).toByte
buffer(offset + 12) = (lsb >>> 24).toByte
buffer(offset + 13) = (lsb >>> 16).toByte
buffer(offset + 14) = (lsb >>> 8).toByte
buffer(offset + 15) = (lsb ).toByte
}
/** Encodes this `UUID` as a `Char` array with 36 elements in `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` format. */
def charArray: Array[Char] = {
val buffer = new Array[Char](36)
toCharArrayViaLookup(buffer, 0, RichUUID.LowercaseLookup)
buffer
}
/** Writes this `UUID` to the provided `Char` array in `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` format. */
def toCharArray(buffer: Array[Char], offset: Int): Unit =
toCharArrayViaLookup(buffer, offset, RichUUID.LowercaseLookup)
/** Serializes this `UUID` to the provided `Char` array via a translation matrix. */
@inline private[this] def toCharArrayViaLookup(buffer: Array[Char], offset: Int, lookup: Array[Char]): Unit = {
val msb = uuid.getMostSignificantBits
val msbh = (msb >>> 32).toInt
buffer(offset ) = lookup((msbh >>> 28) )
buffer(offset + 1) = lookup((msbh >>> 24) & 0xf)
buffer(offset + 2) = lookup((msbh >>> 20) & 0xf)
buffer(offset + 3) = lookup((msbh >>> 16) & 0xf)
buffer(offset + 4) = lookup((msbh >>> 12) & 0xf)
buffer(offset + 5) = lookup((msbh >>> 8) & 0xf)
buffer(offset + 6) = lookup((msbh >>> 4) & 0xf)
buffer(offset + 7) = lookup((msbh ) & 0xf)
buffer(offset + 8) = '-'
val msbl = msb.toInt
buffer(offset + 9) = lookup((msbl >>> 28) )
buffer(offset + 10) = lookup((msbl >>> 24) & 0xf)
buffer(offset + 11) = lookup((msbl >>> 20) & 0xf)
buffer(offset + 12) = lookup((msbl >>> 16) & 0xf)
buffer(offset + 13) = '-'
buffer(offset + 14) = lookup((msbl >>> 12) & 0xf)
buffer(offset + 15) = lookup((msbl >>> 8) & 0xf)
buffer(offset + 16) = lookup((msbl >>> 4) & 0xf)
buffer(offset + 17) = lookup((msbl ) & 0xf)
buffer(offset + 18) = '-'
val lsb = uuid.getLeastSignificantBits
val lsbh = (lsb >>> 32).toInt
buffer(offset + 19) = lookup((lsbh >>> 28) )
buffer(offset + 20) = lookup((lsbh >>> 24) & 0xf)
buffer(offset + 21) = lookup((lsbh >>> 20) & 0xf)
buffer(offset + 22) = lookup((lsbh >>> 16) & 0xf)
buffer(offset + 23) = '-'
buffer(offset + 24) = lookup((lsbh >>> 12) & 0xf)
buffer(offset + 25) = lookup((lsbh >>> 8) & 0xf)
buffer(offset + 26) = lookup((lsbh >>> 4) & 0xf)
buffer(offset + 27) = lookup((lsbh ) & 0xf)
val lsbl = lsb.toInt
buffer(offset + 28) = lookup((lsbl >>> 28) )
buffer(offset + 29) = lookup((lsbl >>> 24) & 0xf)
buffer(offset + 30) = lookup((lsbl >>> 20) & 0xf)
buffer(offset + 31) = lookup((lsbl >>> 16) & 0xf)
buffer(offset + 32) = lookup((lsbl >>> 12) & 0xf)
buffer(offset + 33) = lookup((lsbl >>> 8) & 0xf)
buffer(offset + 34) = lookup((lsbl >>> 4) & 0xf)
buffer(offset + 35) = lookup((lsbl ) & 0xf)
}
/** Returns this `UUID` as a `String` in `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` format.
* Hexadecimal characters will be lower-cased.
* This method is an optimized drop in replacement for the legacy `toString` method. */
def string: String =
if (RichUUID.UseNativeToString) {
uuid.toString
} else {
toStringViaLookup(RichUUID.LowercaseLookup)
}
/** Alias for `string` which implicitly returns a lower-cased `String`. */
@inline def toLowerCase: String = string
/** Returns this `UUID` as a `String` in `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` format.
* Hexadecimal characters will be upper-cased. */
def toUpperCase: String = toStringViaLookup(RichUUID.UppercaseLookup)
/** Translate this `UUID` to a `String` via the provided lookup.
* This method should be inlined, to constant-fold the offset. */
@inline private[this] def toStringViaLookup(lookup: Array[Char]): String = {
val buffer = RichUUID.charBuffer.get()
toCharArrayViaLookup(buffer, 0, lookup)
new String(buffer) // return ownership of the buffer to ThreadLocal
}
/** WARNING: JVM sorts UUIDs differently to the rest of the world (languages and databases).
* This is due to default signed Long ordering and has been marked as a Will Not Fix
* due to legacy code: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=7025832 */
override def compareTo(that: UUID): Int = uuid compareTo that
/** This comparison allows for sanity compatible unsigned ordering */
override def compare(that: UUID): Int = {
val umsb = uuid.getMostSignificantBits
val tmsb = that.getMostSignificantBits
if (umsb != tmsb) {
if (umsb + Long.MinValue < tmsb + Long.MinValue) -1 else 1
} else {
val ulsb = uuid.getLeastSignificantBits
val tlsb = that.getLeastSignificantBits
if (ulsb != tlsb) {
if (ulsb + Long.MinValue < tlsb + Long.MinValue) -1 else 1
} else {
0
}
}
}
}
| melezov/scala-uuid | src/main/scala/io/jvm/uuid/RichUUID.scala | Scala | bsd-3-clause | 9,184 |
package at.iem.point.eh.sketches
import annotation.tailrec
/**
* A chord made of a sequence of notes. Notes must be in ascending order with respect to their
* pitches.
*/
final case class Chord(notes: IIdxSeq[OffsetNote]) {
require(notes.isSortedBy(_.pitch))
def minOffset: Double = notes.minBy(_.offset).offset
def maxStop: Double = notes.maxBy(_.stop).stop
/**
* The number of notes in the chord.
*/
def size: Int = notes.size
/**
* Calculates the arithmetic mean of all offset times.
*/
def avgOffset: Double = notes.map(_.offset).sum / notes.size
/**
* Calculates the arithmetic mean of all stop times.
*/
def avgStop: Double = notes.map(_.stop).sum / notes.size
/**
* Calculates the __geometric__ mean of all durations.
*/
def avgDuration: Double = Math.pow(notes.map(_.duration).product, 1.0 / notes.size)
/**
* Collects the pitches of the chord.
*
* @return the pitches in ascending order
*/
def pitches: IIdxSeq[Pitch] = notes.map(_.pitch)
/**
* Returns the framing interval which is the interval between lowest and highest pitch in the chord.
*/
def frameInterval: UndirectedInterval = (notes.last.pitch interval notes.head.pitch).undirected
/**
* Returns a sequence of subsequent intervals
*/
def layeredIntervals: IIdxSeq[UndirectedInterval] = pitches.intervals.map(_.undirected)
/**
* Returns a sequence of all intervals between all pairs of pitches
*/
def allIntervals: IIdxSeq[Interval] = {
val b = IIdxSeq.newBuilder[Interval]
@tailrec def loop(sq: List[Pitch]) {
sq match {
case head :: tail =>
tail.foreach(t => b += t interval head)
loop(tail)
case _ =>
}
}
loop(pitches.toList)
b.result().sorted
}
} | iem-projects/PointLib | src/main/scala/at/iem/point/eh/sketches/Chord.scala | Scala | gpl-2.0 | 1,797 |
package konstructs
import scala.collection.JavaConverters._
import scala.collection.mutable
import akka.actor.{ Actor, ActorRef, Props }
import konstructs.api.{ Position, BlockFactory, BoxQuery, BoxData, BlockTypeId,
BoxQueryRawResult, BoxQueryResult, ReplaceBlocks }
object Db {
val ChunkSize = 32
val ShardSize = 8
val Header = 2
val Version = 1.toByte
}
case class ShardPosition(m: Int, n: Int, o: Int)
object ShardPosition {
def apply(c: ChunkPosition): ShardPosition = {
// For negative values we need to "round down", i.e. -0.01 should be -1 and not 0
val m = (if(c.p < 0) (c.p - Db.ShardSize + 1) else c.p) / Db.ShardSize
val n = (if(c.q < 0) (c.q - Db.ShardSize + 1) else c.q) / Db.ShardSize
val o = (if(c.k < 0) (c.k - Db.ShardSize + 1) else c.k) / Db.ShardSize
ShardPosition(m, n, o)
}
def apply(p: Position): ShardPosition =
ShardPosition(ChunkPosition(p))
}
class DbActor(universe: ActorRef, generator: ActorRef, binaryStorage: ActorRef,
blockFactory: BlockFactory)
extends Actor {
import DbActor._
def shardActorId(r: ShardPosition) = s"shard-${r.m}-${r.n}-${r.o}"
def getShardActor(pos: Position): ActorRef =
getShardActor(ShardPosition(pos))
def getShardActor(chunk: ChunkPosition): ActorRef =
getShardActor(ShardPosition(chunk))
def getShardActor(shard: ShardPosition): ActorRef = {
val rid = shardActorId(shard)
context.child(rid) match {
case Some(a) => a
case None =>
context.actorOf(ShardActor.props(self, shard, binaryStorage, generator, blockFactory), rid)
}
}
def receive = {
case p: DbActor.PutBlock =>
getShardActor(p.pos) forward p
case r: DbActor.RemoveBlock =>
getShardActor(r.pos) forward r
case v: DbActor.ViewBlock =>
getShardActor(v.pos) forward v
case s: SendBlocks =>
getShardActor(s.chunk) forward s
case q: BoxQuery =>
val chunkBoxes = q.box.chunked
val resultActor = context.actorOf(BoxQueryResultActor.props(sender, q, chunkBoxes, blockFactory))
chunkBoxes.foreach { box =>
getShardActor(box.start).tell(BoxQuery(box), resultActor)
}
case ReplaceBlocks(filter, blocks) =>
for((chunk, blocks) <- splitList[BlockTypeId](blocks)) {
getShardActor(chunk) forward ShardActor.ReplaceBlocks(chunk, filter, blocks)
}
case b: BlockList =>
universe ! b
}
}
object DbActor {
case class SendBlocks(chunk: ChunkPosition)
case class BlockList(chunk: ChunkPosition, data: ChunkData)
case class PutBlock(pos: Position, w: Int, initiator: ActorRef)
case class UnableToPut(pos: Position, w: Int, initiator: ActorRef)
case class RemoveBlock(pos: Position, initiator: ActorRef)
case class BlockRemoved(pos: Position, w: Int, initiator: ActorRef)
case class ViewBlock(pos: Position, initiator: ActorRef)
case class BlockViewed(pos: Position, w: Int, intitator: ActorRef)
def splitList[T](placed: java.util.Map[Position, T]):
Map[ChunkPosition, Map[Position, T]] = {
val shards = mutable.HashMap[ChunkPosition, mutable.Map[Position, T]]()
for((position, i) <- placed.asScala) {
val pos = ChunkPosition(position)
val map: mutable.Map[Position, T] =
shards.getOrElse(pos, mutable.HashMap[Position, T]())
map += position -> i
shards += pos -> map
}
(shards.map { case (k, v) =>
k -> v.toMap
}).toMap
}
def props(universe: ActorRef, generator: ActorRef, binaryStorage: ActorRef,
blockFactory: BlockFactory) =
Props(classOf[DbActor], universe, generator, binaryStorage, blockFactory)
}
class BoxQueryResultActor(initiator: ActorRef, blockFactory: BlockFactory,
query: BoxQuery, boxes: Set[Box])
extends Actor {
var receivedBoxes: Set[BoxData[Int]] = Set()
def receive = {
case r: BoxQueryRawResult =>
receivedBoxes += r.result
if(receivedBoxes.map(_.box) == boxes) {
val data = new Array[BlockTypeId](query.box.blocks)
for(subData <- receivedBoxes) {
for((position, typeId) <- subData.toPlaced.asScala) {
data(query.box.index(position)) = blockFactory.wMapping(typeId)
}
}
initiator ! BoxQueryResult(BoxData(query.box, java.util.Arrays.asList(data:_*)))
context.stop(self)
}
}
}
object BoxQueryResultActor {
def props(initiator: ActorRef, query: BoxQuery, boxes: Set[Box],
blockFactory: BlockFactory) =
Props(classOf[BoxQueryResultActor], initiator, blockFactory, query, boxes)
}
| Henningstone/server | src/main/scala/konstructs/db.scala | Scala | mit | 4,557 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.log._
import java.io.File
import org.I0Itec.zkclient.ZkClient
import org.scalatest.junit.JUnit3Suite
import org.easymock.EasyMock
import org.junit._
import org.junit.Assert._
import kafka.common._
import kafka.cluster.Replica
import kafka.utils.{SystemTime, KafkaScheduler, TestUtils, MockTime, Utils}
import java.util.concurrent.atomic.AtomicBoolean
class HighwatermarkPersistenceTest extends JUnit3Suite {
val configs = TestUtils.createBrokerConfigs(2).map(new KafkaConfig(_))
val topic = "foo"
val logManagers = configs.map(config => new LogManager(logDirs = config.logDirs.map(new File(_)).toArray,
topicConfigs = Map(),
defaultConfig = LogConfig(),
cleanerConfig = CleanerConfig(),
flushCheckMs = 30000,
flushCheckpointMs = 10000L,
retentionCheckMs = 30000,
scheduler = new KafkaScheduler(1),
time = new MockTime))
@After
def teardown() {
for(manager <- logManagers; dir <- manager.logDirs)
Utils.rm(dir)
}
def testHighWatermarkPersistenceSinglePartition() {
// mock zkclient
val zkClient = EasyMock.createMock(classOf[ZkClient])
EasyMock.replay(zkClient)
// create kafka scheduler
val scheduler = new KafkaScheduler(2)
scheduler.startup
// create replica manager
val replicaManager = new ReplicaManager(configs.head, new MockTime(), zkClient, scheduler, logManagers(0), new AtomicBoolean(false))
replicaManager.startup()
replicaManager.checkpointHighWatermarks()
var fooPartition0Hw = hwmFor(replicaManager, topic, 0)
assertEquals(0L, fooPartition0Hw)
val partition0 = replicaManager.getOrCreatePartition(topic, 0, 1)
// create leader and follower replicas
val log0 = logManagers(0).createLog(TopicAndPartition(topic, 0), LogConfig())
val leaderReplicaPartition0 = new Replica(configs.head.brokerId, partition0, SystemTime, 0, Some(log0))
partition0.addReplicaIfNotExists(leaderReplicaPartition0)
val followerReplicaPartition0 = new Replica(configs.last.brokerId, partition0, SystemTime)
partition0.addReplicaIfNotExists(followerReplicaPartition0)
replicaManager.checkpointHighWatermarks()
fooPartition0Hw = hwmFor(replicaManager, topic, 0)
assertEquals(leaderReplicaPartition0.highWatermark, fooPartition0Hw)
try {
followerReplicaPartition0.highWatermark
fail("Should fail with KafkaException")
}catch {
case e: KafkaException => // this is ok
}
// set the highwatermark for local replica
partition0.getReplica().get.highWatermark = 5L
replicaManager.checkpointHighWatermarks()
fooPartition0Hw = hwmFor(replicaManager, topic, 0)
assertEquals(leaderReplicaPartition0.highWatermark, fooPartition0Hw)
EasyMock.verify(zkClient)
}
def testHighWatermarkPersistenceMultiplePartitions() {
val topic1 = "foo1"
val topic2 = "foo2"
// mock zkclient
val zkClient = EasyMock.createMock(classOf[ZkClient])
EasyMock.replay(zkClient)
// create kafka scheduler
val scheduler = new KafkaScheduler(2)
scheduler.startup
// create replica manager
val replicaManager = new ReplicaManager(configs.head, new MockTime(), zkClient, scheduler, logManagers(0), new AtomicBoolean(false))
replicaManager.startup()
replicaManager.checkpointHighWatermarks()
var topic1Partition0Hw = hwmFor(replicaManager, topic1, 0)
assertEquals(0L, topic1Partition0Hw)
val topic1Partition0 = replicaManager.getOrCreatePartition(topic1, 0, 1)
// create leader log
val topic1Log0 = logManagers(0).createLog(TopicAndPartition(topic1, 0), LogConfig())
// create a local replica for topic1
val leaderReplicaTopic1Partition0 = new Replica(configs.head.brokerId, topic1Partition0, SystemTime, 0, Some(topic1Log0))
topic1Partition0.addReplicaIfNotExists(leaderReplicaTopic1Partition0)
replicaManager.checkpointHighWatermarks()
topic1Partition0Hw = hwmFor(replicaManager, topic1, 0)
assertEquals(leaderReplicaTopic1Partition0.highWatermark, topic1Partition0Hw)
// set the highwatermark for local replica
topic1Partition0.getReplica().get.highWatermark = 5L
replicaManager.checkpointHighWatermarks()
topic1Partition0Hw = hwmFor(replicaManager, topic1, 0)
assertEquals(5L, leaderReplicaTopic1Partition0.highWatermark)
assertEquals(5L, topic1Partition0Hw)
// add another partition and set highwatermark
val topic2Partition0 = replicaManager.getOrCreatePartition(topic2, 0, 1)
// create leader log
val topic2Log0 = logManagers(0).createLog(TopicAndPartition(topic2, 0), LogConfig())
// create a local replica for topic2
val leaderReplicaTopic2Partition0 = new Replica(configs.head.brokerId, topic2Partition0, SystemTime, 0, Some(topic2Log0))
topic2Partition0.addReplicaIfNotExists(leaderReplicaTopic2Partition0)
replicaManager.checkpointHighWatermarks()
var topic2Partition0Hw = hwmFor(replicaManager, topic2, 0)
assertEquals(leaderReplicaTopic2Partition0.highWatermark, topic2Partition0Hw)
// set the highwatermark for local replica
topic2Partition0.getReplica().get.highWatermark = 15L
assertEquals(15L, leaderReplicaTopic2Partition0.highWatermark)
// change the highwatermark for topic1
topic1Partition0.getReplica().get.highWatermark = 10L
assertEquals(10L, leaderReplicaTopic1Partition0.highWatermark)
replicaManager.checkpointHighWatermarks()
// verify checkpointed hw for topic 2
topic2Partition0Hw = hwmFor(replicaManager, topic2, 0)
assertEquals(15L, topic2Partition0Hw)
// verify checkpointed hw for topic 1
topic1Partition0Hw = hwmFor(replicaManager, topic1, 0)
assertEquals(10L, topic1Partition0Hw)
EasyMock.verify(zkClient)
}
def hwmFor(replicaManager: ReplicaManager, topic: String, partition: Int): Long = {
replicaManager.highWatermarkCheckpoints(new File(replicaManager.config.logDirs(0)).getAbsolutePath).read.getOrElse(TopicAndPartition(topic, partition), 0L)
}
} | unix1986/universe | tool/kafka-0.8.1.1-src/core/src/test/scala/unit/kafka/server/HighwatermarkPersistenceTest.scala | Scala | bsd-2-clause | 7,232 |
package jp.sf.amateras
import org.apache.poi.ss.usermodel._
import scala.language.implicitConversions
package object poi4s {
implicit def RichWorkbook(workbook: Workbook): RichWorkbook = new RichWorkbook(workbook)
implicit def RichSheet(sheet: Sheet): RichSheet = new RichSheet(sheet)
implicit def RichCell(cell: Cell): RichCell = new RichCell(cell)
}
| takezoe/poi4s | src/main/scala/jp/sf/amateras/poi4s/package.scala | Scala | apache-2.0 | 364 |
package scredis.commands
import scredis.io.NonBlockingConnection
import scredis.protocol.requests.PubSubRequests._
import scredis.serialization.Writer
import scala.concurrent.Future
/**
* This trait implements pub/sub commands.
*
* @define e [[scredis.exceptions.RedisErrorResponseException]]
* @define none `None`
* @define true '''true'''
* @define false '''false'''
*/
trait PubSubCommands { self: NonBlockingConnection =>
/**
* Publishes a message to a channel.
*
* @param channel the name of the channel
* @param message the message payload
* @return the number of clients that received the message
*
* @since 2.0.0
*/
def publish[W: Writer](channel: String, message: W): Future[Long] = send(
Publish(channel, message)
)
/**
* Lists the currently active channels. An active channel is a Pub/Sub channel with one or more
* subscribers (not including clients subscribed to patterns).
*
* @note If no pattern is specified, all the channels are listed, otherwise if pattern is
* specified only channels matching the specified glob-style pattern are listed.
*
* @param patternOpt optional pattern to filter returned channels
* @return the currently active channels, optionally matching the specified pattern
*
* @since 2.8.0
*/
def pubSubChannels(patternOpt: Option[String] = None): Future[List[String]] = send(
PubSubChannels[List](patternOpt)
)
/**
* Returns the number of subscribers (not counting clients subscribed to patterns) for the
* specified channels.
*
* @param channels channel name(s)
* @return a map of channels to number of subscribers for every provided channel
*
* @since 2.8.0
*/
def pubSubNumSub(channels: String*): Future[Map[String, Int]] = send(
PubSubNumSub[Map](channels: _*)
)
/**
* Returns the number of subscriptions to patterns (that are performed using the
* PSUBSCRIBE command).
*
* @note Note that this is not just the count of clients subscribed to patterns but the total
* number of patterns all the clients are subscribed to.
*
* @return the number of subscriptions to patterns
*
* @since 2.8.0
*/
def pubSubNumPat(): Future[Long] = send(PubSubNumPat())
} | Livestream/scredis | src/main/scala/scredis/commands/PubSubCommands.scala | Scala | apache-2.0 | 2,262 |
package com.github.mdr.mash.inference
import java.time.{ Instant, LocalDate }
import java.util.IdentityHashMap
import com.github.mdr.mash.classes._
import com.github.mdr.mash.functions._
import com.github.mdr.mash.ns.collections.GroupClass
import com.github.mdr.mash.ns.core._
import com.github.mdr.mash.ns.time.{ DateClass, DateTimeClass }
import com.github.mdr.mash.runtime._
import scala.collection.immutable.ListMap
import scala.collection.mutable.ArrayBuffer
object ValueTypeDetector {
def getType(x: MashValue): Type = new ValueTypeDetector().getType(x)
private val visitedMap: IdentityHashMap[MashValue, Type] = new IdentityHashMap
}
/** Detect the type of runtime values **/
class ValueTypeDetector {
import ValueTypeDetector._
def buildBindings(bindings: Map[String, MashValue]): Map[String, Type] =
for ((k, v) ← bindings)
yield k -> getType(v)
private val visitingMap: IdentityHashMap[MashValue, Boolean] = new IdentityHashMap
def getType(x: MashValue): Type = ValueTypeDetector.synchronized {
Option(visitedMap.get(x)).getOrElse {
if (visitingMap containsKey x)
Type.Any
else {
visitingMap.put(x, true)
try {
val type_ = getType_(x)
visitedMap.put(x, type_)
type_
} finally
visitingMap.remove(x)
}
}
}
private def getType_(x: MashValue): Type = x match {
case MashNull ⇒ NullClass
case f: AnonymousFunction ⇒ getUserFunctionType(f)
case f: UserDefinedFunction ⇒ getUserFunctionType(f)
case f: MashFunction ⇒ Type.BuiltinFunction(f)
case BoundMethod(target, method: UserDefinedMethod, _) ⇒ getBoundMethodType(target, method)
case BoundMethod(target, method, _) ⇒ Type.BoundBuiltinMethod(getType(target), method)
case MashString(_, None) ⇒ StringClass
case MashString(_, Some(tagClass)) ⇒ StringClass taggedWith tagClass
case MashNumber(_, None) ⇒ NumberClass
case MashNumber(_, Some(tagClass)) ⇒ NumberClass taggedWith tagClass
case _: MashBoolean ⇒ BooleanClass
case MashWrapped(_: Instant) ⇒ DateTimeClass
case MashWrapped(_: LocalDate) ⇒ DateClass
case userClass: UserDefinedClass ⇒ getUserClassType(userClass)
case _: MashClass ⇒ ClassClass
case MashUnit ⇒ UnitClass
case xs: MashList ⇒ xs.elements.headOption.map(getType).getOrElse(Type.Any).seq
case obj@MashObject(_, None) ⇒ getSimpleObjectType(obj)
case obj@MashObject(_, Some(GroupClass)) ⇒ getTypeOfGroup(obj)
case obj@MashObject(_, Some(TimedResultClass)) ⇒ getTypeOfTimedResult(obj)
case MashObject(_, Some(userClass: UserDefinedClass)) ⇒ Type.UserClassInstance(getUserClassType(userClass))
case MashObject(_, Some(klass)) ⇒ klass
case _ ⇒ Type.Any
}
private def getSimpleObjectType(obj: MashObject) = {
val fields = for { (field, value) ← obj.stringFields } yield field -> getType(value)
Type.Object(fields)
}
private def getUserFunctionType(function: AnonymousFunction): Type.UserDefinedFunction = {
val AnonymousFunction(parameterModel, body, context) = function
val functionBindings = buildBindings(context.scopeStack.bindings)
Type.UserDefinedFunction(docCommentOpt = None, isPrivate = false, None, parameterModel, body, functionBindings)
}
private def getUserFunctionType(function: UserDefinedFunction): Type.UserDefinedFunction = {
val UserDefinedFunction(docCommentOpt, name, parameterModel, body, context, _) = function
val functionBindings = buildBindings(context.scopeStack.bindings)
Type.UserDefinedFunction(docCommentOpt, isPrivate = false, Some(name), parameterModel, body, functionBindings)
}
private def getUserClassType(userClass: UserDefinedClass): Type.UserClass = {
val UserDefinedClass(_, name, _, params, methods) = userClass
Type.UserClass(name, params, getMethodTypes(methods))
}
private def getBoundMethodType(target: MashValue, method: UserDefinedMethod) = {
val UserDefinedMethod(docCommentOpt, name, params, _, body, context, isPrivate, _, _) = method
val bindings = buildBindings(context.scopeStack.bindings)
val methodType = Type.UserDefinedFunction(docCommentOpt, isPrivate, Some(name), params, body, bindings)
Type.BoundUserDefinedMethod(getType(target), methodType)
}
def instanceType(userClass: UserDefinedClass): Type.UserClassInstance = {
val UserDefinedClass(_, name, _, params, methods) = userClass
Type.UserClassInstance(Type.UserClass(name, params, getMethodTypes(methods)))
}
private def getMethodTypes(methods: Seq[UserDefinedMethod]): ListMap[String, Type.UserDefinedFunction] = {
var methodBindings = Map[String, Type]() // TODO: Should also include parent methods
val methodNameTypePairs: ArrayBuffer[(String, Type.UserDefinedFunction)] = ArrayBuffer()
for (method ← methods) {
val bindings = methodBindings ++ buildBindings(method.context.scopeStack.bindings)
val functionType = Type.UserDefinedFunction(method.docCommentOpt, method.isPrivate, Some(method.name),
method.params, method.body, bindings)
for (name ← method.name +: method.aliases) {
methodNameTypePairs += name -> functionType
methodBindings += name -> functionType
}
}
ListMap(methodNameTypePairs: _*)
}
private def getTypeOfTimedResult(obj: MashObject): Type = {
val resultType = obj.get(TimedResultClass.Fields.Result).map(getType) getOrElse Type.Any
TimedResultClass.withGenerics(resultType)
}
private def getTypeOfGroup(obj: MashObject): Type = {
val groupTypeOpt =
for {
key ← obj.get(GroupClass.Fields.Key)
keyType = getType(key)
values ← obj.get(GroupClass.Fields.Values)
valueType = getType(values) match {
case Type.Seq(elementType) ⇒ elementType
case _ ⇒ Type.Any
}
} yield GroupClass.withGenerics(keyType, valueType)
groupTypeOpt.getOrElse(GroupClass.withGenerics(Type.Any, Type.Any))
}
} | mdr/mash | src/main/scala/com/github/mdr/mash/inference/ValueTypeDetector.scala | Scala | mit | 6,653 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2013 Association du Paris Java User Group.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package models
import library.Redis
import models.ConferenceDescriptor.ConferenceProposalConfigurations
/**
* Approve or reject a proposal
* Created by Nicolas Martignole on 29/01/2014.
*/
object ApprovedProposal {
val getTotal: Map[String, Int] = Map(
("conf.label", ConferenceProposalConfigurations.CONF.slotsCount)
, ("uni.label", ConferenceProposalConfigurations.UNI.slotsCount)
, ("tia.label", ConferenceProposalConfigurations.TIA.slotsCount)
, ("lab.label", ConferenceProposalConfigurations.LAB.slotsCount)
, ("quick.label", ConferenceProposalConfigurations.QUICK.slotsCount)
, ("bof.label", ConferenceProposalConfigurations.BOF.slotsCount)
, ("key.label", ConferenceProposalConfigurations.KEY.slotsCount)
, ("hack.label", ConferenceProposalConfigurations.HACK.slotsCount)
, ("other.label", ConferenceProposalConfigurations.OTHER.slotsCount)
)
def countApproved(talkType: String): Long = Redis.pool.withClient {
client =>
talkType match {
case null => 0
case "all" =>
client.scard("Approved:conf") + client.scard("Approved:lab") + client.scard("Approved:bof") + client.scard("Approved:key") + client.scard("Approved:tia") + client.scard("Approved:uni") + client.scard("Approved:quick")
case other =>
client.scard(s"Approved:$talkType")
}
}
def countRefused(talkType: String): Long = Redis.pool.withClient {
client =>
talkType match {
case null => 0
case "all" =>
client.scard("Refused:conf") + client.scard("Refused:lab") + client.scard("Refused:bof") + client.scard("Refused:tia") + client.scard("Refused:uni") + client.scard("Refused:quick")
case other =>
client.scard(s"Refused:$talkType")
}
}
def reflectProposalChanges(proposal: Proposal) = Redis.pool.withClient {
implicit client =>
changeTalkType(proposal.id, proposal.talkType.id)
recomputeAcceptedSpeakers()
}
def recomputeAcceptedSpeakers() = Redis.pool.withClient {
implicit client =>
val allSpeakerIDs = client.keys("ApprovedSpeakers:*")
val tx = client.multi()
allSpeakerIDs.foreach {
speakerId =>
tx.del(s"$speakerId")
}
allApproved().map {
proposal =>
tx.sadd("ApprovedSpeakers:" + proposal.mainSpeaker, proposal.id.toString)
proposal.secondarySpeaker.map(secondarySpeaker => tx.sadd("ApprovedSpeakers:" + secondarySpeaker, proposal.id.toString))
proposal.otherSpeakers.foreach {
otherSpeaker: String =>
tx.sadd("ApprovedSpeakers:" + otherSpeaker, proposal.id.toString)
}
}
tx.exec()
}
// Update Approved or Refused total by conference type
def changeTalkType(proposalId: String, newTalkType: String) = Redis.pool.withClient {
client =>
ConferenceDescriptor.ConferenceProposalTypes.ALL.foreach {
proposalType =>
if (client.sismember(s"Approved:${proposalType.id}", proposalId)) {
val tx = client.multi()
tx.srem(s"Approved:${proposalType.id}", proposalId)
tx.sadd(s"Approved:$newTalkType", proposalId)
tx.exec()
}
if (client.sismember(s"Refused:${proposalType.id}", proposalId)) {
val tx = client.multi()
tx.srem(s"Refused:${proposalType.id}", proposalId)
tx.sadd(s"Refused:$newTalkType", proposalId)
tx.exec()
}
}
}
def isApproved(proposal: Proposal): Boolean = {
isApproved(proposal.id, proposal.talkType.id)
}
def isApproved(proposalId: String, talkType: String): Boolean = Redis.pool.withClient {
client =>
client.sismember(s"Approved:$talkType", proposalId)
}
// This is only for Attic controller, to fix an old bug on data (bug #159)
// The bug was that a conference is approved, but then the speaker changes the
// format to quickie, then the Approved:conf collection is not updated correctly
def _loadApprovedCategoriesForTalk(proposal: Proposal): List[String] = {
ConferenceDescriptor.ConferenceProposalConfigurations.ALL.filter { pc =>
isApproved(proposal.id, pc.id)
}.map(_.id)
}
def isRefused(proposal: Proposal): Boolean = {
isRefused(proposal.id, proposal.talkType.id)
}
def isRefused(proposalId: String, talkType: String): Boolean = Redis.pool.withClient {
client =>
client.sismember(s"Refused:$talkType", proposalId)
}
def remainingSlots(talkType: String): Long = {
var propType = ProposalConfiguration.parse(talkType)
if (propType == ProposalConfiguration.UNKNOWN) {
ProposalConfiguration.totalSlotsCount - countApproved("all")
} else {
propType.slotsCount - countApproved(talkType)
}
}
def approve(proposal: Proposal) = Redis.pool.withClient {
implicit client =>
val tx = client.multi()
tx.sadd("ApprovedById:", proposal.id.toString)
tx.sadd("Approved:" + proposal.talkType.id, proposal.id.toString)
tx.sadd("ApprovedSpeakers:" + proposal.mainSpeaker, proposal.id.toString)
proposal.secondarySpeaker.map(secondarySpeaker => tx.sadd("ApprovedSpeakers:" + secondarySpeaker, proposal.id.toString))
proposal.otherSpeakers.foreach {
otherSpeaker: String =>
tx.sadd("ApprovedSpeakers:" + otherSpeaker, proposal.id.toString)
}
tx.exec()
}
def refuse(proposal: Proposal) = Redis.pool.withClient {
implicit client =>
cancelApprove(proposal)
val tx = client.multi()
tx.sadd("RefusedById:", proposal.id.toString)
tx.sadd("Refused:" + proposal.talkType.id, proposal.id.toString)
tx.sadd("RefusedSpeakers:" + proposal.mainSpeaker, proposal.id.toString)
proposal.secondarySpeaker.map(secondarySpeaker => tx.sadd("RefusedSpeakers:" + secondarySpeaker, proposal.id.toString))
proposal.otherSpeakers.foreach {
otherSpeaker: String =>
tx.sadd("RefusedSpeakers:" + otherSpeaker, proposal.id.toString)
}
tx.exec()
}
def cancelApprove(proposal: Proposal) = Redis.pool.withClient {
implicit client =>
val tx = client.multi()
tx.srem("ApprovedById:", proposal.id.toString)
tx.srem("Approved:" + proposal.talkType.id, proposal.id.toString)
// Buggy without a 'S'
tx.srem("ApprovedSpeaker:" + proposal.mainSpeaker, proposal.id.toString)
// Correct
tx.srem("ApprovedSpeakers:" + proposal.mainSpeaker, proposal.id.toString)
proposal.secondarySpeaker.map {
secondarySpeaker: String =>
// Buggy without a 'S'
tx.srem("ApprovedSpeaker:" + secondarySpeaker, proposal.id.toString)
// Correct
tx.srem("ApprovedSpeakers:" + secondarySpeaker, proposal.id.toString)
}
proposal.otherSpeakers.foreach {
otherSpeaker: String =>
// Buggy without a 'S'
tx.srem("ApprovedSpeaker:" + otherSpeaker, proposal.id.toString)
// and the correct one
tx.srem("ApprovedSpeakers:" + otherSpeaker, proposal.id.toString)
}
tx.exec()
}
def cancelRefuse(proposal: Proposal) = Redis.pool.withClient {
implicit client =>
val tx = client.multi()
tx.srem("RefusedById:", proposal.id.toString)
tx.srem("Refused:" + proposal.talkType.id, proposal.id.toString)
tx.srem("RefusedSpeakers:" + proposal.mainSpeaker, proposal.id.toString)
proposal.secondarySpeaker.map {
secondarySpeaker: String =>
tx.srem("RefusedSpeakers:" + secondarySpeaker, proposal.id.toString)
}
proposal.otherSpeakers.foreach {
otherSpeaker: String =>
tx.srem("RefusedSpeakers:" + otherSpeaker, proposal.id.toString)
}
tx.exec()
}
def allRefusedSpeakerIDs(): Set[String] = Redis.pool.withClient {
implicit client =>
client.keys("RefusedSpeakers:*").map {
key =>
val speakerUUID = key.substring("RefusedSpeakers:".length)
speakerUUID
}
}
def onlySubmittedRefused(): Iterable[Proposal] = Redis.pool.withClient {
implicit client =>
val proposalIDs = client.sinter(s"Proposals:ByState:${ProposalState.SUBMITTED.code}", "RefusedById:")
Proposal.loadAndParseProposals(proposalIDs).values
}
def onlySubmittedNotRefused(): Iterable[Proposal] = Redis.pool.withClient {
implicit client =>
val proposalIDs = client.sdiff(s"Proposals:ByState:${ProposalState.SUBMITTED.code}", "RefusedById:", "ApprovedById:")
Proposal.loadAndParseProposals(proposalIDs).values
}
def allApprovedByTalkType(talkType: String): List[Proposal] = Redis.pool.withClient {
implicit client =>
val allProposalIDs = client.smembers("Approved:" + talkType).diff(client.smembers(s"Proposals:ByState:${ProposalState.ARCHIVED.code}"))
val allProposalWithVotes = Proposal.loadAndParseProposals(allProposalIDs.toSet)
allProposalWithVotes.values.toList
}
def allRefusedByTalkType(talkType: String): List[Proposal] = Redis.pool.withClient {
implicit client =>
val allProposalIDs = client.smembers("Refused:" + talkType).diff(client.smembers(s"Proposals:ByState:${ProposalState.ARCHIVED.code}"))
val allProposalWithVotes = Proposal.loadAndParseProposals(allProposalIDs.toSet)
allProposalWithVotes.values.toList
}
def allApproved(): Set[Proposal] = Redis.pool.withClient {
implicit client =>
val allKeys = client.keys("Approved:*")
val finalList = allKeys.map {
key =>
val allProposalIDs = client.smembers(key).diff(client.smembers(s"Proposals:ByState:${ProposalState.ARCHIVED.code}")).toList
val allProposalWithVotes = Proposal.loadAndParseProposals(allProposalIDs.toSet)
allProposalWithVotes.values.toList
}.flatten
finalList
}
def allApprovedProposalIDs() = Redis.pool.withClient {
implicit client =>
client.smembers("ApprovedById:")
}
def allRefusedProposalIDs() = Redis.pool.withClient {
implicit client =>
client.smembers("RefusedById:")
}
def allApprovedSpeakers(): Set[Speaker] = Redis.pool.withClient {
implicit client =>
client.keys("ApprovedSpeakers:*").flatMap {
key =>
val speakerUUID = key.substring("ApprovedSpeakers:".length)
for (speaker <- Speaker.findByUUID(speakerUUID)) yield speaker
}
}
def allApprovedSpeakerIDs(): Set[String] = Redis.pool.withClient {
implicit client =>
client.keys("ApprovedSpeakers:*").map {
key =>
val speakerUUID = key.substring("ApprovedSpeakers:".length)
speakerUUID
}
}
def allApprovedTalksForSpeaker(speakerId: String): Iterable[Proposal] = Redis.pool.withClient {
implicit client =>
val allApprovedProposals = client.smembers("ApprovedSpeakers:" + speakerId)
val mapOfProposals = Proposal.loadAndParseProposals(allApprovedProposals)
mapOfProposals.values
}
def allAcceptedByTalkType(talkType: String): List[Proposal] = Redis.pool.withClient {
implicit client =>
val allProposalIDs = client.smembers("Approved:" + talkType)
val allProposalWithVotes = Proposal.loadAndParseProposals(allProposalIDs.toSet)
allProposalWithVotes.values.filter(_.state == ProposalState.ACCEPTED).toList
}
def allApprovedSpeakersWithFreePass(): Set[Speaker] = Redis.pool.withClient {
implicit client =>
val allSpeakers = client.keys("ApprovedSpeakers:*").flatMap {
key =>
val speakerUUID = key.substring("ApprovedSpeakers:".length)
for (speaker <- Speaker.findByUUID(speakerUUID)) yield {
(speaker,
Proposal.loadAndParseProposals(client.smembers(key)).values.filter(p => ConferenceDescriptor.ConferenceProposalConfigurations.doesItGivesSpeakerFreeEntrance(p.talkType))
)
}
}
val setOfSpeakers = allSpeakers.filterNot(_._2.isEmpty).map(_._1)
setOfSpeakers
}
}
| rasata/cfp-devoxx | app/models/ApprovedProposal.scala | Scala | mit | 13,159 |
/*
* Copyright 2019 ACINQ SAS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.acinq.eclair.router
import akka.actor.ActorSystem
import akka.actor.typed.scaladsl.adapter.actorRefAdapter
import akka.testkit.{TestKit, TestProbe}
import fr.acinq.bitcoin.Crypto.PrivateKey
import fr.acinq.bitcoin.Script.{pay2wsh, write}
import fr.acinq.bitcoin.{Block, SatoshiLong, Transaction, TxOut}
import fr.acinq.eclair.TestConstants.Alice
import fr.acinq.eclair._
import fr.acinq.eclair.blockchain.bitcoind.ZmqWatcher.{UtxoStatus, ValidateRequest, ValidateResult}
import fr.acinq.eclair.crypto.TransportHandler
import fr.acinq.eclair.io.Peer.PeerRoutingMessage
import fr.acinq.eclair.router.Announcements.{makeChannelAnnouncement, makeChannelUpdate, makeNodeAnnouncement}
import fr.acinq.eclair.router.Router._
import fr.acinq.eclair.transactions.Scripts
import fr.acinq.eclair.wire.protocol.Color
import org.scalatest.funsuite.AnyFunSuiteLike
import scodec.bits._
import scala.concurrent.duration._
class FrontRouterSpec extends TestKit(ActorSystem("test")) with AnyFunSuiteLike {
import FrontRouterSpec._
test("correctly dispatch valid gossip") {
val nodeParams = Alice.nodeParams
val watcher = TestProbe()
val router = system.actorOf(Router.props(nodeParams, watcher.ref))
val system1 = ActorSystem("front-system-1")
val system2 = ActorSystem("front-system-2")
val system3 = ActorSystem("front-system-3")
// we use those to control messages exchanged between front and back routers
val pipe1 = TestProbe()
val pipe2 = TestProbe()
val pipe3 = TestProbe()
val front1 = system1.actorOf(FrontRouter.props(nodeParams.routerConf, pipe1.ref))
val front2 = system2.actorOf(FrontRouter.props(nodeParams.routerConf, pipe2.ref))
val front3 = system3.actorOf(FrontRouter.props(nodeParams.routerConf, pipe3.ref))
pipe1.expectMsg(GetRoutingStateStreaming)
pipe1.send(router, GetRoutingStateStreaming)
pipe1.expectMsg(RoutingStateStreamingUpToDate)
pipe1.forward(front1)
pipe2.expectMsg(GetRoutingStateStreaming)
pipe2.send(router, GetRoutingStateStreaming)
pipe2.expectMsg(RoutingStateStreamingUpToDate)
pipe2.forward(front2)
pipe3.expectMsg(GetRoutingStateStreaming)
pipe3.send(router, GetRoutingStateStreaming)
pipe3.expectMsg(RoutingStateStreamingUpToDate)
pipe3.forward(front3)
val peerConnection1a = TestProbe()
val peerConnection1b = TestProbe()
val peerConnection2a = TestProbe()
val peerConnection3a = TestProbe()
system1.eventStream.subscribe(peerConnection1a.ref, classOf[Rebroadcast])
system1.eventStream.subscribe(peerConnection1b.ref, classOf[Rebroadcast])
system2.eventStream.subscribe(peerConnection2a.ref, classOf[Rebroadcast])
system3.eventStream.subscribe(peerConnection3a.ref, classOf[Rebroadcast])
val origin1a = RemoteGossip(peerConnection1a.ref, randomKey().publicKey)
val origin1b = RemoteGossip(peerConnection1b.ref, randomKey().publicKey)
val origin2a = RemoteGossip(peerConnection2a.ref, randomKey().publicKey)
peerConnection1a.send(front1, PeerRoutingMessage(peerConnection1a.ref, origin1a.nodeId, chan_ab))
pipe1.expectMsg(PeerRoutingMessage(front1, origin1a.nodeId, chan_ab))
pipe1.send(router, PeerRoutingMessage(pipe1.ref, origin1a.nodeId, chan_ab))
assert(watcher.expectMsgType[ValidateRequest].ann === chan_ab)
peerConnection1b.send(front1, PeerRoutingMessage(peerConnection1b.ref, origin1b.nodeId, chan_ab))
pipe1.expectNoMessage()
peerConnection2a.send(front2, PeerRoutingMessage(peerConnection2a.ref, origin2a.nodeId, chan_ab))
pipe2.expectMsg(PeerRoutingMessage(front2, origin2a.nodeId, chan_ab))
pipe2.send(router, PeerRoutingMessage(pipe2.ref, origin2a.nodeId, chan_ab))
pipe2.expectMsg(TransportHandler.ReadAck(chan_ab))
pipe1.expectNoMessage()
pipe2.expectNoMessage()
watcher.send(router, ValidateResult(chan_ab, Right((Transaction(version = 0, txIn = Nil, txOut = TxOut(1000000 sat, write(pay2wsh(Scripts.multiSig2of2(funding_a, funding_b)))) :: Nil, lockTime = 0), UtxoStatus.Unspent))))
pipe1.expectMsg(TransportHandler.ReadAck(chan_ab))
pipe1.expectMsg(GossipDecision.Accepted(chan_ab))
pipe1.forward(front1)
pipe1.expectMsg(ChannelsDiscovered(Seq(SingleChannelDiscovered(chan_ab, 1000000 sat, None, None))))
pipe1.forward(front1)
pipe2.expectMsg(GossipDecision.Accepted(chan_ab))
pipe2.forward(front2)
pipe2.expectMsg(ChannelsDiscovered(Seq(SingleChannelDiscovered(chan_ab, 1000000 sat, None, None))))
pipe2.forward(front2)
pipe3.expectMsg(ChannelsDiscovered(Seq(SingleChannelDiscovered(chan_ab, 1000000 sat, None, None))))
pipe3.forward(front3)
pipe1.expectNoMessage()
pipe2.expectNoMessage()
pipe3.expectNoMessage()
peerConnection1a.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection1b.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection2a.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection1a.expectMsg(GossipDecision.Accepted(chan_ab))
peerConnection1b.expectMsg(GossipDecision.Accepted(chan_ab))
peerConnection2a.expectMsg(GossipDecision.Accepted(chan_ab))
// we have to wait 2 times the broadcast interval because there is an additional per-peer delay
val maxBroadcastDelay = 2 * nodeParams.routerConf.routerBroadcastInterval + 1.second
peerConnection1a.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set(origin1a, origin1b)), updates = Map.empty, nodes = Map.empty))
peerConnection1b.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set(origin1a, origin1b)), updates = Map.empty, nodes = Map.empty))
peerConnection2a.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set(origin2a)), updates = Map.empty, nodes = Map.empty))
peerConnection3a.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set.empty), updates = Map.empty, nodes = Map.empty))
}
test("aggregate gossip") {
val nodeParams = Alice.nodeParams
val watcher = TestProbe()
val router = system.actorOf(Router.props(nodeParams, watcher.ref))
val system1 = ActorSystem("front-system-1")
val system2 = ActorSystem("front-system-2")
val system3 = ActorSystem("front-system-3")
val front1 = system1.actorOf(FrontRouter.props(nodeParams.routerConf, router))
val front2 = system2.actorOf(FrontRouter.props(nodeParams.routerConf, router))
val front3 = system3.actorOf(FrontRouter.props(nodeParams.routerConf, router))
val peerConnection1a = TestProbe("peerconn-1a")
val peerConnection1b = TestProbe("peerconn-1b")
val peerConnection2a = TestProbe("peerconn-2a")
val peerConnection3a = TestProbe("peerconn-3a")
system1.eventStream.subscribe(peerConnection1a.ref, classOf[Rebroadcast])
system1.eventStream.subscribe(peerConnection1b.ref, classOf[Rebroadcast])
system2.eventStream.subscribe(peerConnection2a.ref, classOf[Rebroadcast])
system3.eventStream.subscribe(peerConnection3a.ref, classOf[Rebroadcast])
val origin1a = RemoteGossip(peerConnection1a.ref, randomKey().publicKey)
val origin1b = RemoteGossip(peerConnection1b.ref, randomKey().publicKey)
val origin2a = RemoteGossip(peerConnection2a.ref, randomKey().publicKey)
val origin3a = RemoteGossip(peerConnection3a.ref, randomKey().publicKey)
peerConnection1a.send(front1, PeerRoutingMessage(peerConnection1a.ref, origin1a.nodeId, chan_ab))
assert(watcher.expectMsgType[ValidateRequest].ann === chan_ab)
peerConnection1b.send(front1, PeerRoutingMessage(peerConnection1b.ref, origin1b.nodeId, chan_ab))
peerConnection2a.send(front2, PeerRoutingMessage(peerConnection2a.ref, origin2a.nodeId, chan_ab))
peerConnection1a.send(front1, PeerRoutingMessage(peerConnection1a.ref, origin1a.nodeId, ann_c))
peerConnection1a.expectMsg(TransportHandler.ReadAck(ann_c))
peerConnection1a.expectMsg(GossipDecision.NoKnownChannel(ann_c))
peerConnection3a.send(front3, PeerRoutingMessage(peerConnection3a.ref, origin3a.nodeId, ann_a))
peerConnection3a.send(front3, PeerRoutingMessage(peerConnection3a.ref, origin3a.nodeId, channelUpdate_ba))
peerConnection3a.send(front3, PeerRoutingMessage(peerConnection3a.ref, origin3a.nodeId, channelUpdate_bc))
peerConnection3a.expectMsg(TransportHandler.ReadAck(channelUpdate_bc))
peerConnection3a.expectMsg(GossipDecision.NoRelatedChannel(channelUpdate_bc))
watcher.send(router, ValidateResult(chan_ab, Right((Transaction(version = 0, txIn = Nil, txOut = TxOut(1000000 sat, write(pay2wsh(Scripts.multiSig2of2(funding_a, funding_b)))) :: Nil, lockTime = 0), UtxoStatus.Unspent))))
peerConnection1a.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection1b.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection2a.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection1a.expectMsg(GossipDecision.Accepted(chan_ab))
peerConnection1b.expectMsg(GossipDecision.Accepted(chan_ab))
peerConnection2a.expectMsg(GossipDecision.Accepted(chan_ab))
peerConnection3a.expectMsg(TransportHandler.ReadAck(channelUpdate_ba))
peerConnection3a.expectMsg(GossipDecision.Accepted(channelUpdate_ba))
peerConnection3a.expectMsg(TransportHandler.ReadAck(ann_a))
peerConnection3a.expectMsg(GossipDecision.Accepted(ann_a))
peerConnection1b.send(front1, PeerRoutingMessage(peerConnection1b.ref, origin1b.nodeId, channelUpdate_ab))
peerConnection1b.expectMsg(TransportHandler.ReadAck(channelUpdate_ab))
peerConnection1b.expectMsg(GossipDecision.Accepted(channelUpdate_ab))
peerConnection3a.send(front3, PeerRoutingMessage(peerConnection3a.ref, origin3a.nodeId, ann_b))
peerConnection3a.expectMsg(TransportHandler.ReadAck(ann_b))
peerConnection3a.expectMsg(GossipDecision.Accepted(ann_b))
// we have to wait 2 times the broadcast interval because there is an additional per-peer delay
val maxBroadcastDelay = 2 * nodeParams.routerConf.routerBroadcastInterval + 1.second
peerConnection1a.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set(origin1a, origin1b)), updates = Map(channelUpdate_ab -> Set(origin1b), channelUpdate_ba -> Set.empty), nodes = Map(ann_a -> Set.empty, ann_b -> Set.empty)))
peerConnection1b.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set(origin1a, origin1b)), updates = Map(channelUpdate_ab -> Set(origin1b), channelUpdate_ba -> Set.empty), nodes = Map(ann_a -> Set.empty, ann_b -> Set.empty)))
peerConnection2a.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set(origin2a)), updates = Map(channelUpdate_ab -> Set.empty, channelUpdate_ba -> Set.empty), nodes = Map(ann_a -> Set.empty, ann_b -> Set.empty)))
peerConnection3a.expectMsg(maxBroadcastDelay, Rebroadcast(channels = Map(chan_ab -> Set.empty), updates = Map(channelUpdate_ab -> Set.empty, channelUpdate_ba -> Set(origin3a)), nodes = Map(ann_a -> Set(origin3a), ann_b -> Set(origin3a))))
}
test("do not forward duplicate gossip") {
val nodeParams = Alice.nodeParams
val router = TestProbe()
val system1 = ActorSystem("front-system-1")
val front1 = system1.actorOf(FrontRouter.props(nodeParams.routerConf, router.ref))
router.expectMsg(GetRoutingStateStreaming)
router.send(front1, RoutingStateStreamingUpToDate)
val peerConnection1 = TestProbe()
system1.eventStream.subscribe(peerConnection1.ref, classOf[Rebroadcast])
val origin1 = RemoteGossip(peerConnection1.ref, randomKey().publicKey)
peerConnection1.send(front1, PeerRoutingMessage(peerConnection1.ref, origin1.nodeId, chan_ab))
router.expectMsg(PeerRoutingMessage(front1, origin1.nodeId, chan_ab))
router.send(front1, TransportHandler.ReadAck(chan_ab))
peerConnection1.expectNoMessage()
router.send(front1, GossipDecision.Accepted(chan_ab))
peerConnection1.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection1.expectMsg(GossipDecision.Accepted(chan_ab))
router.send(front1, ChannelsDiscovered(SingleChannelDiscovered(chan_ab, 0.sat, None, None) :: Nil))
peerConnection1.send(front1, PeerRoutingMessage(peerConnection1.ref, origin1.nodeId, chan_ab))
router.expectNoMessage() // announcement is pending rebroadcast
peerConnection1.expectMsg(TransportHandler.ReadAck(chan_ab))
router.send(front1, TickBroadcast)
peerConnection1.expectMsg(Rebroadcast(channels = Map(chan_ab -> Set(origin1)), updates = Map.empty, nodes = Map.empty))
peerConnection1.send(front1, PeerRoutingMessage(peerConnection1.ref, origin1.nodeId, chan_ab))
router.expectNoMessage() // announcement is already known
peerConnection1.expectMsg(TransportHandler.ReadAck(chan_ab))
}
test("acknowledge duplicate gossip") {
val nodeParams = Alice.nodeParams
val router = TestProbe()
val system1 = ActorSystem("front-system-1")
val front1 = system1.actorOf(FrontRouter.props(nodeParams.routerConf, router.ref))
router.expectMsg(GetRoutingStateStreaming)
router.send(front1, RoutingStateStreamingUpToDate)
val peerConnection1 = TestProbe()
system1.eventStream.subscribe(peerConnection1.ref, classOf[Rebroadcast])
val origin1 = RemoteGossip(peerConnection1.ref, randomKey().publicKey)
// first message arrives and is forwarded to router
peerConnection1.send(front1, PeerRoutingMessage(peerConnection1.ref, origin1.nodeId, chan_ab))
router.expectMsg(PeerRoutingMessage(front1, origin1.nodeId, chan_ab))
peerConnection1.expectNoMessage()
// duplicate message is immediately acknowledged
peerConnection1.send(front1, PeerRoutingMessage(peerConnection1.ref, origin1.nodeId, chan_ab))
peerConnection1.expectMsg(TransportHandler.ReadAck(chan_ab))
// router acknowledges the first message
router.send(front1, TransportHandler.ReadAck(chan_ab))
// but we still wait for the decision before acking the original message
peerConnection1.expectNoMessage()
// decision arrives, message is acknowledged
router.send(front1, GossipDecision.Accepted(chan_ab))
peerConnection1.expectMsg(TransportHandler.ReadAck(chan_ab))
peerConnection1.expectMsg(GossipDecision.Accepted(chan_ab))
}
test("do not rebroadcast channel_update for private channels") {
val nodeParams = Alice.nodeParams
val router = TestProbe()
val system1 = ActorSystem("front-system-1")
val front1 = system1.actorOf(FrontRouter.props(nodeParams.routerConf, router.ref))
router.expectMsg(GetRoutingStateStreaming)
router.send(front1, RoutingStateStreamingUpToDate)
val peerConnection1 = TestProbe()
system1.eventStream.subscribe(peerConnection1.ref, classOf[Rebroadcast])
val origin1 = RemoteGossip(peerConnection1.ref, randomKey().publicKey)
// channel_update arrives and is forwarded to router (there is no associated channel, because it is private)
peerConnection1.send(front1, PeerRoutingMessage(peerConnection1.ref, origin1.nodeId, channelUpdate_ab))
router.expectMsg(PeerRoutingMessage(front1, origin1.nodeId, channelUpdate_ab))
peerConnection1.expectNoMessage()
// router acknowledges the message
router.send(front1, TransportHandler.ReadAck(channelUpdate_ab))
// but we still wait for the decision before acking the original message
peerConnection1.expectNoMessage()
// decision arrives, message is acknowledged
router.send(front1, GossipDecision.Accepted(channelUpdate_ab))
peerConnection1.expectMsg(TransportHandler.ReadAck(channelUpdate_ab))
peerConnection1.expectMsg(GossipDecision.Accepted(channelUpdate_ab))
// then the event arrives
front1 ! ChannelUpdatesReceived(channelUpdate_ab :: Nil)
// rebroadcast
front1 ! TickBroadcast
peerConnection1.expectNoMessage()
}
}
object FrontRouterSpec {
val (priv_a, priv_b, priv_c, priv_d, priv_e, priv_f) = (randomKey(), randomKey(), randomKey(), randomKey(), randomKey(), randomKey())
val (a, b, c, d, e, f) = (priv_a.publicKey, priv_b.publicKey, priv_c.publicKey, priv_d.publicKey, priv_e.publicKey, priv_f.publicKey)
val (priv_funding_a, priv_funding_b, priv_funding_c, priv_funding_d, priv_funding_e, priv_funding_f) = (randomKey(), randomKey(), randomKey(), randomKey(), randomKey(), randomKey())
val (funding_a, funding_b, funding_c, funding_d, funding_e, funding_f) = (priv_funding_a.publicKey, priv_funding_b.publicKey, priv_funding_c.publicKey, priv_funding_d.publicKey, priv_funding_e.publicKey, priv_funding_f.publicKey)
val ann_a = makeNodeAnnouncement(priv_a, "node-A", Color(15, 10, -70), Nil, Features(Features.VariableLengthOnion -> FeatureSupport.Optional))
val ann_b = makeNodeAnnouncement(priv_b, "node-B", Color(50, 99, -80), Nil, Features.empty)
val ann_c = makeNodeAnnouncement(priv_c, "node-C", Color(123, 100, -40), Nil, Features(Features.VariableLengthOnion -> FeatureSupport.Optional))
val ann_d = makeNodeAnnouncement(priv_d, "node-D", Color(-120, -20, 60), Nil, Features.empty)
val ann_e = makeNodeAnnouncement(priv_e, "node-E", Color(-50, 0, 10), Nil, Features.empty)
val ann_f = makeNodeAnnouncement(priv_f, "node-F", Color(30, 10, -50), Nil, Features.empty)
val channelId_ab = ShortChannelId(BlockHeight(420000), 1, 0)
val channelId_bc = ShortChannelId(BlockHeight(420000), 2, 0)
val channelId_cd = ShortChannelId(BlockHeight(420000), 3, 0)
val channelId_ef = ShortChannelId(BlockHeight(420000), 4, 0)
def channelAnnouncement(shortChannelId: ShortChannelId, node1_priv: PrivateKey, node2_priv: PrivateKey, funding1_priv: PrivateKey, funding2_priv: PrivateKey) = {
val witness = Announcements.generateChannelAnnouncementWitness(Block.RegtestGenesisBlock.hash, shortChannelId, node1_priv.publicKey, node2_priv.publicKey, funding1_priv.publicKey, funding2_priv.publicKey, Features.empty)
val node1_sig = Announcements.signChannelAnnouncement(witness, node1_priv)
val funding1_sig = Announcements.signChannelAnnouncement(witness, funding1_priv)
val node2_sig = Announcements.signChannelAnnouncement(witness, node2_priv)
val funding2_sig = Announcements.signChannelAnnouncement(witness, funding2_priv)
makeChannelAnnouncement(Block.RegtestGenesisBlock.hash, shortChannelId, node1_priv.publicKey, node2_priv.publicKey, funding1_priv.publicKey, funding2_priv.publicKey, node1_sig, node2_sig, funding1_sig, funding2_sig)
}
val chan_ab = channelAnnouncement(channelId_ab, priv_a, priv_b, priv_funding_a, priv_funding_b)
val chan_bc = channelAnnouncement(channelId_bc, priv_b, priv_c, priv_funding_b, priv_funding_c)
val chan_cd = channelAnnouncement(channelId_cd, priv_c, priv_d, priv_funding_c, priv_funding_d)
val chan_ef = channelAnnouncement(channelId_ef, priv_e, priv_f, priv_funding_e, priv_funding_f)
val channelUpdate_ab = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_a, b, channelId_ab, CltvExpiryDelta(7), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 10, htlcMaximumMsat = 500000000 msat)
val channelUpdate_ba = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_b, a, channelId_ab, CltvExpiryDelta(7), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 10, htlcMaximumMsat = 500000000 msat)
val channelUpdate_bc = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_b, c, channelId_bc, CltvExpiryDelta(5), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 1, htlcMaximumMsat = 500000000 msat)
val channelUpdate_cb = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_c, b, channelId_bc, CltvExpiryDelta(5), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 1, htlcMaximumMsat = 500000000 msat)
val channelUpdate_cd = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_c, d, channelId_cd, CltvExpiryDelta(3), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 4, htlcMaximumMsat = 500000000 msat)
val channelUpdate_dc = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_d, c, channelId_cd, CltvExpiryDelta(3), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 4, htlcMaximumMsat = 500000000 msat)
val channelUpdate_ef = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_e, f, channelId_ef, CltvExpiryDelta(9), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 8, htlcMaximumMsat = 500000000 msat)
val channelUpdate_fe = makeChannelUpdate(Block.RegtestGenesisBlock.hash, priv_f, e, channelId_ef, CltvExpiryDelta(9), htlcMinimumMsat = 0 msat, feeBaseMsat = 10 msat, feeProportionalMillionths = 8, htlcMaximumMsat = 500000000 msat)
}
| ACINQ/eclair | eclair-front/src/test/scala/fr/acinq/eclair/router/FrontRouterSpec.scala | Scala | apache-2.0 | 21,209 |
object ListC {
class T1[A]
class T2[A]
implicit def getT[A]: T1[A] with T2[A] = null
}
class Test {
import ListC._
def foo[T[_], A](implicit a: T[A]): T[A] = a
val baz: T2[Int] = /*start*/foo/*end*/
}
//ListC.T2[Int] | ilinum/intellij-scala | testdata/typeInference/higherKinds/SCL10857_2.scala | Scala | apache-2.0 | 229 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.types._
class ExpressionTypeCheckingSuite extends SparkFunSuite {
val testRelation = LocalRelation(
Symbol("intField").int,
Symbol("stringField").string,
Symbol("booleanField").boolean,
Symbol("decimalField").decimal(8, 0),
Symbol("arrayField").array(StringType),
Symbol("mapField").map(StringType, LongType))
def assertError(expr: Expression, errorMessage: String): Unit = {
val e = intercept[AnalysisException] {
assertSuccess(expr)
}
assert(e.getMessage.contains(
s"cannot resolve '${expr.sql}' due to data type mismatch:"))
assert(e.getMessage.contains(errorMessage))
}
def assertSuccess(expr: Expression): Unit = {
val analyzed = testRelation.select(expr.as("c")).analyze
SimpleAnalyzer.checkAnalysis(analyzed)
}
def assertErrorForDifferingTypes(expr: Expression): Unit = {
assertError(expr,
s"differing types in '${expr.sql}'")
}
test("check types for unary arithmetic") {
assertError(BitwiseNot(Symbol("stringField")), "requires integral type")
}
test("check types for binary arithmetic") {
// We will cast String to Double for binary arithmetic
assertSuccess(Add(Symbol("intField"), Symbol("stringField")))
assertSuccess(Subtract(Symbol("intField"), Symbol("stringField")))
assertSuccess(Multiply(Symbol("intField"), Symbol("stringField")))
assertSuccess(Divide(Symbol("intField"), Symbol("stringField")))
assertSuccess(Remainder(Symbol("intField"), Symbol("stringField")))
// checkAnalysis(BitwiseAnd(Symbol("intField"), Symbol("stringField")))
assertErrorForDifferingTypes(Add(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(Subtract(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(Multiply(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(Divide(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(Remainder(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(BitwiseAnd(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(BitwiseOr(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(BitwiseXor(Symbol("intField"), Symbol("booleanField")))
assertError(Add(Symbol("booleanField"), Symbol("booleanField")),
"requires (numeric or interval or day-time interval or year-month interval) type")
assertError(Subtract(Symbol("booleanField"), Symbol("booleanField")),
"requires (numeric or interval or day-time interval or year-month interval) type")
assertError(Multiply(Symbol("booleanField"), Symbol("booleanField")), "requires numeric type")
assertError(Divide(Symbol("booleanField"), Symbol("booleanField")),
"requires (double or decimal) type")
assertError(Remainder(Symbol("booleanField"), Symbol("booleanField")), "requires numeric type")
assertError(BitwiseAnd(Symbol("booleanField"), Symbol("booleanField")),
"requires integral type")
assertError(BitwiseOr(Symbol("booleanField"), Symbol("booleanField")), "requires integral type")
assertError(BitwiseXor(Symbol("booleanField"), Symbol("booleanField")),
"requires integral type")
}
test("check types for predicates") {
// We will cast String to Double for binary comparison
assertSuccess(EqualTo(Symbol("intField"), Symbol("stringField")))
assertSuccess(EqualNullSafe(Symbol("intField"), Symbol("stringField")))
assertSuccess(LessThan(Symbol("intField"), Symbol("stringField")))
assertSuccess(LessThanOrEqual(Symbol("intField"), Symbol("stringField")))
assertSuccess(GreaterThan(Symbol("intField"), Symbol("stringField")))
assertSuccess(GreaterThanOrEqual(Symbol("intField"), Symbol("stringField")))
// We will transform EqualTo with numeric and boolean types to CaseKeyWhen
assertSuccess(EqualTo(Symbol("intField"), Symbol("booleanField")))
assertSuccess(EqualNullSafe(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(EqualTo(Symbol("intField"), Symbol("mapField")))
assertErrorForDifferingTypes(EqualNullSafe(Symbol("intField"), Symbol("mapField")))
assertErrorForDifferingTypes(LessThan(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(LessThanOrEqual(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(GreaterThan(Symbol("intField"), Symbol("booleanField")))
assertErrorForDifferingTypes(GreaterThanOrEqual(Symbol("intField"), Symbol("booleanField")))
assertError(EqualTo(Symbol("mapField"), Symbol("mapField")),
"EqualTo does not support ordering on type map")
assertError(EqualNullSafe(Symbol("mapField"), Symbol("mapField")),
"EqualNullSafe does not support ordering on type map")
assertError(LessThan(Symbol("mapField"), Symbol("mapField")),
"LessThan does not support ordering on type map")
assertError(LessThanOrEqual(Symbol("mapField"), Symbol("mapField")),
"LessThanOrEqual does not support ordering on type map")
assertError(GreaterThan(Symbol("mapField"), Symbol("mapField")),
"GreaterThan does not support ordering on type map")
assertError(GreaterThanOrEqual(Symbol("mapField"), Symbol("mapField")),
"GreaterThanOrEqual does not support ordering on type map")
assertError(If(Symbol("intField"), Symbol("stringField"), Symbol("stringField")),
"type of predicate expression in If should be boolean")
assertErrorForDifferingTypes(
If(Symbol("booleanField"), Symbol("intField"), Symbol("booleanField")))
assertError(
CaseWhen(Seq((Symbol("booleanField").attr, Symbol("intField").attr),
(Symbol("booleanField").attr, Symbol("mapField").attr))),
"THEN and ELSE expressions should all be same type or coercible to a common type")
assertError(
CaseKeyWhen(Symbol("intField"), Seq(Symbol("intField"), Symbol("stringField"),
Symbol("intField"), Symbol("mapField"))),
"THEN and ELSE expressions should all be same type or coercible to a common type")
assertError(
CaseWhen(Seq((Symbol("booleanField").attr, Symbol("intField").attr),
(Symbol("intField").attr, Symbol("intField").attr))),
"WHEN expressions in CaseWhen should all be boolean type")
}
test("check types for aggregates") {
// We use AggregateFunction directly at here because the error will be thrown from it
// instead of from AggregateExpression, which is the wrapper of an AggregateFunction.
// We will cast String to Double for sum and average
assertSuccess(Sum(Symbol("stringField")))
assertSuccess(Average(Symbol("stringField")))
assertSuccess(Min(Symbol("arrayField")))
assertSuccess(new BoolAnd(Symbol("booleanField")))
assertSuccess(new BoolOr(Symbol("booleanField")))
assertError(Min(Symbol("mapField")), "min does not support ordering on type")
assertError(Max(Symbol("mapField")), "max does not support ordering on type")
assertError(Sum(Symbol("booleanField")), "function sum requires numeric or interval types")
assertError(Average(Symbol("booleanField")),
"function average requires numeric or interval types")
}
test("check types for others") {
assertError(CreateArray(Seq(Symbol("intField"), Symbol("booleanField"))),
"input to function array should all be the same type")
assertError(Coalesce(Seq(Symbol("intField"), Symbol("booleanField"))),
"input to function coalesce should all be the same type")
assertError(Coalesce(Nil), "function coalesce requires at least one argument")
assertError(new Murmur3Hash(Nil), "function hash requires at least one argument")
assertError(new XxHash64(Nil), "function xxhash64 requires at least one argument")
assertError(Explode(Symbol("intField")),
"input to function explode should be array or map type")
assertError(PosExplode(Symbol("intField")),
"input to function explode should be array or map type")
}
test("check types for CreateNamedStruct") {
assertError(
CreateNamedStruct(Seq("a", "b", 2.0)), "even number of arguments")
assertError(
CreateNamedStruct(Seq(1, "a", "b", 2.0)),
"Only foldable string expressions are allowed to appear at odd position")
assertError(
CreateNamedStruct(Seq(Symbol("a").string.at(0), "a", "b", 2.0)),
"Only foldable string expressions are allowed to appear at odd position")
assertError(
CreateNamedStruct(Seq(Literal.create(null, StringType), "a")),
"Field name should not be null")
}
test("check types for CreateMap") {
assertError(CreateMap(Seq("a", "b", 2.0)), "even number of arguments")
assertError(
CreateMap(Seq(Symbol("intField"), Symbol("stringField"),
Symbol("booleanField"), Symbol("stringField"))),
"keys of function map should all be the same type")
assertError(
CreateMap(Seq(Symbol("stringField"), Symbol("intField"),
Symbol("stringField"), Symbol("booleanField"))),
"values of function map should all be the same type")
}
test("check types for ROUND/BROUND") {
assertSuccess(Round(Literal(null), Literal(null)))
assertSuccess(Round(Symbol("intField"), Literal(1)))
assertError(Round(Symbol("intField"), Symbol("intField")),
"Only foldable Expression is allowed")
assertError(Round(Symbol("intField"), Symbol("booleanField")), "requires int type")
assertError(Round(Symbol("intField"), Symbol("mapField")), "requires int type")
assertError(Round(Symbol("booleanField"), Symbol("intField")), "requires numeric type")
assertSuccess(BRound(Literal(null), Literal(null)))
assertSuccess(BRound(Symbol("intField"), Literal(1)))
assertError(BRound(Symbol("intField"), Symbol("intField")),
"Only foldable Expression is allowed")
assertError(BRound(Symbol("intField"), Symbol("booleanField")), "requires int type")
assertError(BRound(Symbol("intField"), Symbol("mapField")), "requires int type")
assertError(BRound(Symbol("booleanField"), Symbol("intField")), "requires numeric type")
}
test("check types for Greatest/Least") {
for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
assertError(operator(Seq(Symbol("booleanField"))), "requires at least two arguments")
assertError(operator(Seq(Symbol("intField"), Symbol("stringField"))),
"should all have the same type")
assertError(operator(Seq(Symbol("mapField"), Symbol("mapField"))),
"does not support ordering")
}
}
}
| BryanCutler/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala | Scala | apache-2.0 | 11,811 |
package constants
import julienrf.variants.Variants
import play.api.libs.json._
/**
* author: pereira
* since: 2016-01-27
*/
object VimeoEncodingStatusSystem {
sealed trait VimeoEncodingStatus {
val name: String
}
case object IN_PROGRESS extends VimeoEncodingStatus {
override val name: String = "IN_PROGRESS"
}
case object DONE extends VimeoEncodingStatus {
override val name: String = "DONE"
}
implicit val reads: Reads[VimeoEncodingStatus] = Variants.reads[VimeoEncodingStatus]
implicit val writes: Writes[VimeoEncodingStatus] = Variants.writes[VimeoEncodingStatus]
} | indarium/hbbTVPlugin | app/constants/VimeoEncodingStatusSystem.scala | Scala | agpl-3.0 | 609 |
package redis.protocol
import akka.util.ByteString
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
object ParseNumberScalaCheck extends Properties("ParseNumber") {
property("parse long") = forAll { (a: Long) =>
val s = a.toString
ParseNumber.parseLong(ByteString(s)) == s.toLong
}
property("parse int") = forAll { (a: Int) =>
val s = a.toString
ParseNumber.parseInt(ByteString(s)) == s.toInt
}
}
| mspielberg/rediscala | src/test/scala/redis/protocol/ParseNumberScalaCheck.scala | Scala | apache-2.0 | 442 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.memory.cqengine.utils
import com.googlecode.cqengine.query.{Query, QueryFactory => QF}
import com.vividsolutions.jts.geom.Geometry
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.memory.cqengine.query.Intersects
import org.locationtech.geomesa.memory.cqengine.utils.SampleFeatures._
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeature
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import org.specs2.specification.core.Fragments
@RunWith(classOf[JUnitRunner])
class CQEngineQueryVisitorTest extends Specification {
sequential
val visitor = new CQEngineQueryVisitor(sft)
val whoAttr = cq.lookup[String]("Who")
val whereAttr = cq.lookup[Geometry]("Where")
val testFilters: Seq[QueryTest] = Seq(
QueryTest(
ECQL.toFilter("BBOX(Where, 0, 0, 180, 90)"),
new Intersects(whereAttr, WKTUtils.read("POLYGON((0 0, 0 90, 180 90, 180 0, 0 0))"))
),
QueryTest(
ECQL.toFilter("INTERSECTS(Where, POLYGON((0 0, 0 90, 180 90, 180 0, 0 0)))"),
new Intersects(whereAttr, WKTUtils.read("POLYGON((0 0, 0 90, 180 90, 180 0, 0 0))"))
),
QueryTest(
ECQL.toFilter("Who IN('Addams', 'Bierce')"),
QF.or(
QF.equal[SimpleFeature, String](whoAttr, "Addams"),
QF.equal[SimpleFeature, String](whoAttr, "Bierce"))
),
QueryTest(
ECQL.toFilter("INTERSECTS(Where, POLYGON((0 0, 0 90, 180 90, 180 0, 0 0))) AND Who IN('Addams', 'Bierce')"),
QF.and(
new Intersects(whereAttr, WKTUtils.read("POLYGON((0 0, 0 90, 180 90, 180 0, 0 0))")),
QF.or(
QF.equal[SimpleFeature, String](whoAttr, "Addams"),
QF.equal[SimpleFeature, String](whoAttr, "Bierce")))
),
QueryTest(
ECQL.toFilter("strToUpperCase(Who) = 'ADDAMS'"),
QF.all(classOf[SimpleFeature])
)
)
"CQEngineQueryVisitor" should {
val fragments = for (i <- testFilters.indices) yield {
("query_" + i.toString) >> {
val t = testFilters(i)
val query = t.filter.accept(visitor, null)
query must equalTo(t.expectedQuery)
}
}
Fragments(fragments: _*)
}
}
case class QueryTest(filter: Filter, expectedQuery: Query[SimpleFeature])
| ddseapy/geomesa | geomesa-memory/geomesa-cqengine/src/test/scala/org/locationtech/geomesa/memory/cqengine/utils/CQEngineQueryVisitorTest.scala | Scala | apache-2.0 | 2,824 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.docgen
import org.neo4j.cypher.ExecutionResult
class PatternTest extends ArticleTest {
override val indexProps: List[String] = List("name")
def assert(name: String, result: ExecutionResult) {}
val graphDescription = List("A KNOWS B", "A KNOWS C", "A KNOWS D", "B KNOWS E", "C KNOWS E", "D KNOWS F")
override val properties = Map(
"A" -> Map("name" -> "Anders"),
"B" -> Map("name" -> "Becky"),
"C" -> Map("name" -> "Cesar"),
"D" -> Map("name" -> "Dilshad"),
"E" -> Map("name" -> "Emil"),
"F" -> Map("name" -> "Filipa")
)
val title = "Pattern"
val section = "Introduction"
val text =
"""
Patterns
========
Patterns are at the very core of Cypher, and are used in a lot of different places.
Using patterns, you describe the shape of the data that you are looking for.
Patterns are used in the `MATCH` clause. Path patterns are expressions.
Since these expressions are collections, they can also be used as predicates (a non-empty collection signifies true).
They are also used to `CREATE`/`CREATE UNIQUE` the graph.
So, understanding patterns is important, to be able to be effective with Cypher.
You describe the pattern, and Cypher will figure out how to get that data for you. The idea is for you to draw your
query on a whiteboard, naming the interesting parts of the pattern, so you can then use values from these parts
to create the result set you are looking for.
Patterns have bound points, or starting points. They are the parts of the pattern that are already ``bound'' to a set of
graph nodes or relationships. All parts of the pattern must be directly or indirectly connected to a starting point -- a pattern
where parts of the pattern are not reachable from any starting point will be rejected.
[options="header", cols=">s,^,^,^,^,^", width="100%"]
|===================
|Clause|Optional|Multiple rel. types|Varlength|Paths|Maps
|Match|Yes|Yes|Yes|Yes|-
|Create|-|-|-|Yes|Yes
|Create Unique|-|-|-|Yes|Yes
|Expressions|-|Yes|Yes|-|-
|===================
== Patterns for related nodes ==
The description of the pattern is made up of one or more paths, separated by commas. A path is a sequence of nodes and
relationships that always start and end in nodes. An example path would be:
+`(a)-->(b)`+
This is a path starting from the pattern node `a`, with an outgoing relationship from it to pattern node `b`.
Paths can be of arbitrary length, and the same node may appear in multiple places in the path.
Node identifiers can be used with or without surrounding parenthesis. The following match is semantically identical to
the one we saw above -- the difference is purely aesthetic.
+`a-->b`+
If you don't care about a node, you don't need to name it. Empty parenthesis are used for these nodes, like so:
+`a-->()<--b`+
== Working with relationships ==
If you need to work with the relationship between two nodes, you can name it.
+`a-[r]->b`+
If you don't care about the direction of the relationship, you can omit the arrow at either end of the relationship, like this:
+`a--b`+
Relationships have types. When you are only interested in a specific relationship type, you can specify this like so:
+`a-[:REL_TYPE]->b`+
If multiple relationship types are acceptable, you can list them, separating them with the pipe symbol `|` like this:
+`a-[r:TYPE1|TYPE2]->b`+
This pattern matches a relationship of type +TYPE1+ or +TYPE2+, going from `a` to `b`. The relationship is named `r`.
Multiple relationship types can not be used with `CREATE` or `CREATE UNIQUE`.
== Optional relationships ==
An optional relationship is matched when it is found, but replaced by a `null` otherwise.
Normally, if no matching relationship is found, that sub-graph is not matched.
Optional relationships could be called the Cypher equivalent of the outer join in SQL.
They can only be used in `MATCH`.
Optional relationships are marked with a question mark.
They allow you to write queries like this one:
###no-results
START me=node(*)
MATCH me-->friend-[?]->friend_of_friend
RETURN friend, friend_of_friend###
The query above says ``for every person, give me all their friends, and their friends friends, if they have any.''
Optionality is transitive -- if a part of the pattern can only be reached from a bound point through an optional relationship,
that part is also optional. In the pattern above, the only bound point in the pattern is `me`. Since the relationship
between `friend` and `children` is optional, `children` is an optional part of the graph.
Also, named paths that contain optional parts are also optional -- if any part of the path is
`null`, the whole path is `null`.
In the following examples, `b` and `p` are all optional and can contain `null`:
###no-results
START a=node(%A%)
MATCH p = a-[?]->b
RETURN b###
###no-results
START a=node(%A%)
MATCH p = a-[?*]->b
RETURN b###
###no-results
START a=node(%A%)
MATCH p = a-[?]->x-->b
RETURN b###
###no-results
START a=node(%A%), x=node(%F%)
MATCH p = shortestPath( a-[?*]->x )
RETURN p###
== Controlling depth ==
A pattern relationship can span multiple graph relationships. These are called variable length relationships, and are
marked as such using an asterisk (`*`):
+`(a)-[*]->(b)`+
This signifies a path starting on the pattern node `a`, following only outgoing relationships, until it reaches pattern
node `b`. Any number of relationships can be followed searching for a path to `b`, so this can be a very expensive query,
depending on what your graph looks like.
You can set a minimum set of steps that can be taken, and/or the maximum number of steps:
+`(a)-[*3..5]->(b)`+
This is a variable length relationship containing at least three graph relationships, and at most five.
Variable length relationships can not be used with `CREATE` and `CREATE UNIQUE`.
As a simple example, let's take the query below:
###
START me=node(%F%)
MATCH me-[:KNOWS*1..2]-remote_friend
RETURN remote_friend###
This query starts from one node, and follows `KNOWS` relationships two or three steps out, and then stops.
== Assigning to path identifiers ==
In a graph database, a path is a very important concept. A path is a collection of nodes and relationships,
that describe a path in the graph. To assign a path to a path identifier, you simply assign a path pattern to an
identifier, like so:
+`p = (a)-[*3..5]->(b)`+
You can do this in `MATCH`, `CREATE` and `CREATE UNIQUE`, but not when using patterns as expressions. Example of the
three in a single query:
###no-results
START me=node(%F%)
MATCH p1 = me-[*2]-friendOfFriend
CREATE p2 = me-[:MARRIED_TO]-(wife {name:"Gunhild"})
CREATE UNIQUE p3 = wife-[:KNOWS]-friendOfFriend
RETURN p1,p2,p3###
== Setting properties ==
Nodes and relationships are important, but Neo4j uses properties on both of these to allow for far denser graphs models.
Properties are expressed in patterns using the map-construct, which is simply curly brackets surrounding a number of
key-expression pairs, separated by commas, e.g. `{ name: "Andres", sport: "BJJ" }`. If the map is supplied through a
parameter, the normal parameter expression is used: `{ paramName }`.
Maps are only used by `CREATE` and `CREATE UNIQUE`. In `CREATE` they are used to set the properties on the newly created
nodes and relationships.
When used with `CREATE UNIQUE`, they are used to try to match a pattern element with the corresponding graph element.
The match is successful if the properties on the pattern element can be matched exactly against properties on the graph
elements. The graph element can have additional properties, and they do not affect the match. If Neo4j fails to find
matching graph elements, the maps is used to set the properties on the newly created elements.
"""
}
| dksaputra/community | cypher/src/test/scala/org/neo4j/cypher/docgen/PatternTest.scala | Scala | gpl-3.0 | 8,571 |
/*
* EMWithBPTest.scala
* Tests for the EM algorithm
*
* Created By: Michael Howard (mhoward@cra.com)
* Creation Date: Jun 6, 2013
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.algorithm.learning
import org.scalatest.Matchers
import org.scalatest.{ PrivateMethodTester, WordSpec }
import com.cra.figaro.algorithm._
import com.cra.figaro.algorithm.factored._
import com.cra.figaro.algorithm.sampling._
import com.cra.figaro.algorithm.learning._
import com.cra.figaro.library.atomic.continuous._
import com.cra.figaro.library.atomic.discrete.Binomial
import com.cra.figaro.library.compound._
import com.cra.figaro.language._
import com.cra.figaro.language.Universe._
import com.cra.figaro.util._
import com.cra.figaro.util.random
import scala.math.abs
import java.io._
import com.cra.figaro.test.tags.NonDeterministic
class EMWithBPTest extends WordSpec with PrivateMethodTester with Matchers {
"Expectation Maximization with belief propagation" when
{
"used to estimate a Beta parameter" should
{
"detect bias after a large enough number of trials" in
{
val universe = Universe.createNew
val b = BetaParameter(2, 2)
for (i <- 1 to 7) {
val f = Flip(b)
f.observe(true)
}
for (i <- 1 to 3) {
val f = Flip(b)
f.observe(false)
}
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement
algorithm.kill
result.prob should be(0.6666 +- 0.01)
}
"take the prior concentration parameters into account" in
{
val universe = Universe.createNew
val b = BetaParameter(3.0, 7.0)
for (i <- 1 to 7) {
val f = Flip(b)
f.observe(true)
}
for (i <- 1 to 3) {
val f = Flip(b)
f.observe(false)
}
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement
algorithm.kill
result.prob should be(0.50 +- 0.01)
}
"learn the bias from observations of binomial elements" in {
val universe = Universe.createNew
val b = BetaParameter(2, 2)
val b1 = Binomial(7, b)
b1.observe(6)
val b2 = Binomial(3, b)
b2.observe(1)
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement
algorithm.kill
result.prob should be(0.6666 +- 0.01)
}
}
"correctly use a uniform prior" in {
val universe = Universe.createNew
val b = BetaParameter(1, 1)
val b1 = Binomial(7, b)
b1.observe(6)
val b2 = Binomial(3, b)
b2.observe(1)
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement
algorithm.kill
result.prob should be(0.7 +- 0.01)
}
"used to estimate a Dirichlet parameter with two concentration parameters" should
{
"detect bias after a large enough number of trials" in
{
val universe = Universe.createNew
val b = DirichletParameter(2, 2)
for (i <- 1 to 7) {
val f = Select(b, true, false)
f.observe(true)
}
for (i <- 1 to 3) {
val f = Select(b, true, false)
f.observe(false)
}
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement(List(true, false))
algorithm.kill
result.probs(0) should be(0.6666 +- 0.01)
}
"take the prior concentration parameters into account" in
{
val universe = Universe.createNew
val b = DirichletParameter(3, 7)
for (i <- 1 to 7) {
val f = Select(b, true, false)
f.observe(true)
}
for (i <- 1 to 3) {
val f = Select(b, true, false)
f.observe(false)
}
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement(List(true, false))
algorithm.kill
result.probs(0) should be(0.50 +- 0.01)
}
}
"used to estimate a Dirichlet parameter with three concentration parameters" should
{
"calculate sufficient statistics in the correct order for long lists of concentration parameters" in
{
val universe = Universe.createNew
val alphas = Seq[Double](0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476, 0.0476)
val d = DirichletParameter(alphas: _*)
val outcomes = List(2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)
val outcome = Select(d, outcomes: _*)
val algorithm = EMWithBP(5, 10, d)
algorithm.start
val result = d.getLearnedElement(outcomes)
algorithm.kill
result.probs(0) should be(0.04 +- 0.01)
result.probs(1) should be(0.04 +- 0.01)
result.probs(2) should be(0.04 +- 0.01)
result.probs(3) should be(0.04 +- 0.01)
result.probs(4) should be(0.04 +- 0.01)
result.probs(5) should be(0.04 +- 0.01)
result.probs(6) should be(0.04 +- 0.01)
result.probs(7) should be(0.04 +- 0.01)
result.probs(8) should be(0.04 +- 0.01)
result.probs(9) should be(0.04 +- 0.01)
result.probs(10) should be(0.04 +- 0.01)
result.probs(11) should be(0.04 +- 0.01)
result.probs(12) should be(0.04 +- 0.01)
result.probs(13) should be(0.04 +- 0.01)
result.probs(14) should be(0.04 +- 0.01)
result.probs(15) should be(0.04 +- 0.01)
result.probs(16) should be(0.04 +- 0.01)
result.probs(17) should be(0.04 +- 0.01)
result.probs(18) should be(0.04 +- 0.01)
result.probs(19) should be(0.04 +- 0.01)
result.probs(20) should be(0.04 +- 0.01)
result.probs(21) should be(0.04 +- 0.01)
}
"calculate sufficient statistics in the correct order for long lists of concentration parameters, taking into account a condition" in
{
val universe = Universe.createNew
val alphas = Seq[Double](1.0476, 1.0476, 1.0476, 1.0476, 1.0476)
val d = DirichletParameter(alphas: _*)
val outcomes = List(2, 3, 4, 5, 6)
for (i <- 1 to 10) {
val outcome = Select(d, outcomes: _*)
outcome.addCondition(x => x >= 3 && x <= 6)
}
val algorithm = EMWithBP(2, 10, d)
algorithm.start
val result = d.getLearnedElement(outcomes)
algorithm.kill
result.probs(0) should be(0.0 +- 0.01)
result.probs(1) should be(0.25 +- 0.01)
result.probs(2) should be(0.25 +- 0.01)
result.probs(3) should be(0.25 +- 0.01)
result.probs(4) should be(0.25 +- 0.01)
}
"detect bias after a large enough number of trials" in
{
val universe = Universe.createNew
val b = DirichletParameter(2, 2, 2)
val outcomes = List(1, 2, 3)
val errorTolerance = 0.01
for (i <- 1 to 8) {
val f = Select(b, outcomes: _*)
f.observe(1)
}
for (i <- 1 to 6) {
val f = Select(b, outcomes: _*)
f.observe(2)
}
for (i <- 1 to 2) {
val f = Select(b, outcomes: _*)
f.observe(3)
}
val algorithm = EMWithBP(15, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement(outcomes)
algorithm.kill
//9/19
result.probs(0) should be(0.47 +- errorTolerance)
//7/19
result.probs(1) should be(0.36 +- errorTolerance)
//3/19
result.probs(2) should be(0.15 +- errorTolerance)
}
"take the prior concentration parameters into account" in
{
val universe = Universe.createNew
val b = DirichletParameter(2.0, 3.0, 2.0)
val outcomes = List(1, 2, 3)
for (i <- 1 to 3) {
val f2 = Select(b, outcomes: _*)
f2.observe(1)
}
for (i <- 1 to 2) {
val f3 = Select(b, outcomes: _*)
f3.observe(2)
}
for (i <- 1 to 3) {
val f1 = Select(b, outcomes: _*)
f1.observe(3)
}
val algorithm = EMWithBP(3, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement(outcomes)
algorithm.kill
result.probs(0) should be(0.33 +- 0.01)
result.probs(1) should be(0.33 +- 0.01)
result.probs(2) should be(0.33 +- 0.01)
}
"correctly use a uniform prior" in
{
val universe = Universe.createNew
val b = DirichletParameter(1.0, 1.0, 1.0)
val outcomes = List(1, 2, 3)
for (i <- 1 to 3) {
val f2 = Select(b, outcomes: _*)
f2.observe(1)
}
for (i <- 1 to 3) {
val f3 = Select(b, outcomes: _*)
f3.observe(2)
}
for (i <- 1 to 3) {
val f1 = Select(b, outcomes: _*)
f1.observe(3)
}
val algorithm = EMWithBP(3, 10, b)(universe)
algorithm.start
val result = b.getLearnedElement(outcomes)
algorithm.kill
result.probs(0) should be(0.33 +- 0.01)
result.probs(1) should be(0.33 +- 0.01)
result.probs(2) should be(0.33 +- 0.01)
}
}
"used to estimate multiple parameters" should
{
"leave parameters having no observations unchanged" in
{
val universe = Universe.createNew
val d = DirichletParameter(2.0, 4.0, 2.0)
val b = BetaParameter(2.0, 2.0)
val outcomes = List(1, 2, 3)
for (i <- 1 to 4) {
val f2 = Select(d, outcomes: _*)
f2.observe(1)
}
for (i <- 1 to 2) {
val f3 = Select(d, outcomes: _*)
f3.observe(2)
}
for (i <- 1 to 4) {
val f1 = Select(d, outcomes: _*)
f1.observe(3)
}
val algorithm = EMWithBP(100, 10, d, b)(universe)
algorithm.start
val result = d.getLearnedElement(outcomes)
algorithm.kill
result.probs(0) should be(0.33 +- 0.01)
result.probs(1) should be(0.33 +- 0.01)
result.probs(2) should be(0.33 +- 0.01)
val betaResult = b.getLearnedElement
betaResult.prob should be(0.5)
}
"correctly estimate all parameters with observations" in
{
val universe = Universe.createNew
val d = DirichletParameter(2.0, 3.0, 2.0)
val b = BetaParameter(3.0, 7.0)
val outcomes = List(1, 2, 3)
for (i <- 1 to 3) {
val f2 = Select(d, outcomes: _*)
f2.observe(1)
}
for (i <- 1 to 2) {
val f3 = Select(d, outcomes: _*)
f3.observe(2)
}
for (i <- 1 to 3) {
val f1 = Select(d, outcomes: _*)
f1.observe(3)
}
for (i <- 1 to 7) {
val f = Flip(b)
f.observe(true)
}
for (i <- 1 to 3) {
val f = Flip(b)
f.observe(false)
}
val algorithm = EMWithBP(5, 10, b,d)(universe)
algorithm.start
val result = d.getLearnedElement(outcomes)
result.probs(0) should be(0.33 +- 0.01)
result.probs(1) should be(0.33 +- 0.01)
result.probs(2) should be(0.33 +- 0.01)
val betaResult = b.getLearnedElement
betaResult.prob should be(0.5 +- 0.01)
}
}
val observationProbability = 0.7
val trainingSetSize = 100
val testSetSize = 100
val minScale = 10
val maxScale = 10
val scaleStep = 2
abstract class Parameters(val universe: Universe) {
val b1: Element[Double]
val b2: Element[Double]
val b3: Element[Double]
val b4: Element[Double]
val b5: Element[Double]
val b6: Element[Double]
val b7: Element[Double]
val b8: Element[Double]
val b9: Element[Double]
}
val trueB1 = 0.1
val trueB2 = 0.2
val trueB3 = 0.3
val trueB4 = 0.4
val trueB5 = 0.5
val trueB6 = 0.6
val trueB7 = 0.7
val trueB8 = 0.8
val trueB9 = 0.9
val trueUniverse = new Universe
object TrueParameters extends Parameters(trueUniverse) {
val b1 = Constant(trueB1)("b1", universe)
val b2 = Constant(trueB2)("b2", universe)
val b3 = Constant(trueB3)("b3", universe)
val b4 = Constant(trueB4)("b4", universe)
val b5 = Constant(trueB5)("b5", universe)
val b6 = Constant(trueB6)("b6", universe)
val b7 = Constant(trueB7)("b7", universe)
val b8 = Constant(trueB8)("b8", universe)
val b9 = Constant(trueB9)("b9", universe)
}
class LearnableParameters(universe: Universe) extends Parameters(universe) {
val b1 = BetaParameter(1, 1)("b1", universe)
val b2 = BetaParameter(1, 1)("b2", universe)
val b3 = BetaParameter(1, 1)("b3", universe)
val b4 = BetaParameter(1, 1)("b4", universe)
val b5 = BetaParameter(1, 1)("b5", universe)
val b6 = BetaParameter(1, 1)("b6", universe)
val b7 = BetaParameter(1, 1)("b7", universe)
val b8 = BetaParameter(1, 1)("b8", universe)
val b9 = BetaParameter(1, 1)("b9", universe)
}
var id = 0
class Model(val parameters: Parameters, flipConstructor: (Element[Double], String, Universe) => Flip) {
id += 1
val universe = parameters.universe
val x = flipConstructor(parameters.b1, "x_" + id, universe)
val f2 = flipConstructor(parameters.b2, "f2_" + id, universe)
val f3 = flipConstructor(parameters.b3, "f3_" + id, universe)
val f4 = flipConstructor(parameters.b4, "f4_" + id, universe)
val f5 = flipConstructor(parameters.b5, "f5_" + id, universe)
val f6 = flipConstructor(parameters.b6, "f6_" + id, universe)
val f7 = flipConstructor(parameters.b7, "f7_" + id, universe)
val f8 = flipConstructor(parameters.b8, "f8_" + id, universe)
val f9 = flipConstructor(parameters.b9, "f9_" + id, universe)
val y = If(x, f2, f3)("y_" + id, universe)
val z = If(x, f4, f5)("z_" + id, universe)
val w = CPD(y, z, (true, true) -> f6, (true, false) -> f7,
(false, true) -> f8, (false, false) -> f9)("w_" + id, universe)
}
def normalFlipConstructor(parameter: Element[Double], name: String, universe: Universe) = new CompoundFlip(name, parameter, universe)
def learningFlipConstructor(parameter: Element[Double], name: String, universe: Universe) = {
parameter match {
case p: AtomicBeta => new ParameterizedFlip(name, p, universe)
case _ => throw new IllegalArgumentException("Not a beta parameter")
}
}
object TrueModel extends Model(TrueParameters, normalFlipConstructor)
case class Datum(x: Boolean, y: Boolean, z: Boolean, w: Boolean)
def generateDatum(): Datum = {
val model = TrueModel
model.universe.generateAll()
Datum(model.x.value, model.y.value, model.z.value, model.w.value)
}
def observe(model: Model, datum: Datum) {
if (random.nextDouble() < observationProbability) model.x.observe(datum.x)
if (random.nextDouble() < observationProbability) model.y.observe(datum.y)
if (random.nextDouble() < observationProbability) model.z.observe(datum.z)
if (random.nextDouble() < observationProbability) model.w.observe(datum.w)
}
var nextSkip = 0
def predictionAccuracy(model: Model, datum: Datum): Double = {
model.x.unobserve()
model.y.unobserve()
model.z.unobserve()
model.w.unobserve()
val result = nextSkip match {
case 0 =>
model.y.observe(datum.y)
model.z.observe(datum.z)
model.w.observe(datum.w)
val alg = VariableElimination(model.x)(model.universe)
alg.start()
alg.probability(model.x, datum.x)
case 1 =>
model.x.observe(datum.x)
model.z.observe(datum.z)
model.w.observe(datum.w)
val alg = VariableElimination(model.y)(model.universe)
alg.start()
alg.probability(model.y, datum.y)
case 2 =>
model.x.observe(datum.x)
model.y.observe(datum.y)
model.w.observe(datum.w)
val alg = VariableElimination(model.z)(model.universe)
alg.start()
alg.probability(model.z, datum.z)
case 3 =>
model.x.observe(datum.x)
model.y.observe(datum.y)
model.z.observe(datum.z)
val alg = VariableElimination(model.w)(model.universe)
alg.start()
alg.probability(model.w, datum.w)
}
nextSkip = (nextSkip + 1) % 4
result
}
def parameterError(model: Model): Double = {
val parameters = model.parameters
(abs(parameters.b1.value - trueB1) + abs(parameters.b2.value - trueB2) + abs(parameters.b3.value - trueB3) +
abs(parameters.b4.value - trueB4) + abs(parameters.b5.value - trueB5) + abs(parameters.b6.value - trueB6) +
abs(parameters.b7.value - trueB7) + abs(parameters.b8.value - trueB8) + abs(parameters.b9.value - trueB9)) / 9.0
}
def assessModel(model: Model, testSet: Seq[Datum]): (Double, Double) = {
val paramErr = parameterError(model)
nextSkip = 0
var totalPredictionAccuracy = 0.0
for (datum <- testSet) (totalPredictionAccuracy += predictionAccuracy(model, datum))
val predAcc = totalPredictionAccuracy / testSet.length
(paramErr, predAcc)
}
def train(trainingSet: List[Datum], parameters: Parameters, algorithmCreator: Parameters => Algorithm, valueGetter: (Algorithm, Element[Double]) => Double,
flipConstructor: (Element[Double], String, Universe) => Flip): (Model, Double) = {
for (datum <- trainingSet) observe(new Model(parameters, flipConstructor), datum)
val time0 = System.currentTimeMillis()
val algorithm = algorithmCreator(parameters)
algorithm.start()
val resultUniverse = new Universe
def extractParameter(parameter: Element[Double], name: String) =
{
parameter match
{
case b: AtomicBeta =>
{
Constant(valueGetter(algorithm, parameter))(name, resultUniverse)
}
case _ => Constant(valueGetter(algorithm, parameter))(name, resultUniverse)
}
}
val learnedParameters = new Parameters(resultUniverse) {
val b1 = extractParameter(parameters.b1, "b1"); b1.generate()
val b2 = extractParameter(parameters.b2, "b2"); b2.generate()
val b3 = extractParameter(parameters.b3, "b3"); b3.generate()
val b4 = extractParameter(parameters.b4, "b4"); b4.generate()
val b5 = extractParameter(parameters.b5, "b5"); b5.generate()
val b6 = extractParameter(parameters.b6, "b6"); b6.generate()
val b7 = extractParameter(parameters.b7, "b7"); b7.generate()
val b8 = extractParameter(parameters.b8, "b8"); b8.generate()
val b9 = extractParameter(parameters.b9, "b9"); b9.generate()
}
algorithm.kill()
val time1 = System.currentTimeMillis()
val totalTime = (time1 - time0) / 1000.0
println("Training time: " + totalTime + " seconds")
(new Model(learnedParameters, normalFlipConstructor), totalTime)
}
"derive parameters within a reasonable accuracy for random data" taggedAs(NonDeterministic) in
{
val numEMIterations = 5
val testSet = List.fill(testSetSize)(generateDatum())
val trainingSet = List.fill(trainingSetSize)(generateDatum())
def learner(parameters: Parameters): Algorithm = {
parameters match {
case ps: LearnableParameters => EMWithBP(numEMIterations, 10, ps.b1, ps.b2, ps.b3, ps.b4, ps.b5, ps.b6, ps.b7, ps.b8, ps.b9)(parameters.universe)
case _ => throw new IllegalArgumentException("Not learnable parameters")
}
}
def parameterGetter(algorithm: Algorithm, parameter: Element[Double]): Double = {
parameter match {
case p: Parameter[Double] => {
p.MAPValue
}
case _ => throw new IllegalArgumentException("Not a learnable parameter")
}
}
val (trueParamErr, truePredAcc) = assessModel(TrueModel, testSet)
val (learnedModel, learningTime) = train(trainingSet, new LearnableParameters(new Universe), learner, parameterGetter, learningFlipConstructor)
val (learnedParamErr, learnedPredAcc) = assessModel(learnedModel, testSet)
println(learnedParamErr)
println(learnedPredAcc)
learnedParamErr should be(0.00 +- 0.12)
learnedPredAcc should be(truePredAcc +- 0.12)
}
}
} | bruttenberg/figaro | Figaro/src/test/scala/com/cra/figaro/test/algorithm/learning/EMWithBPTest.scala | Scala | bsd-3-clause | 23,701 |
/*
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package play.api.data
/**
* Contains the validation API used by `Form`.
*
* For example, to define a custom constraint:
* {{{
* val negative = Constraint[Int] {
* case i if i < 0 => Valid
* case _ => Invalid("Must be a negative number.")
* }
* }}}
*/
package object validation
| easel/play-json-extra | play-json-extra/js/src/main/scala/play/api/data/validation/package.scala | Scala | apache-2.0 | 374 |
package org.littlewings.javaee7.rest
import org.jboss.resteasy.plugins.server.netty.NettyJaxrsServer
import org.jboss.resteasy.spi.ResteasyDeployment
import org.scalatest.FunSpec
import org.scalatest.Matchers._
import scala.io.Source
class RestEasyNetty4Spec extends FunSpec {
describe("RestEasyNetty4Spec") {
it("calc/add, using Application") {
val netty = new NettyJaxrsServer
val deployment = netty.getDeployment
// 以下も可
// val deployment = new ResteasyDeployment
deployment.setApplicationClass(classOf[JaxrsApplication].getName)
netty.setRootResourcePath("")
netty.setPort(8080)
netty.setDeployment(deployment)
netty.start()
val source = Source.fromURL("http://localhost:8080/calc/add?a=5&b=3")
source.mkString.toInt should be(8)
source.close()
netty.stop()
}
it("calc/add, Resource only") {
val netty = new NettyJaxrsServer
val deployment = netty.getDeployment
netty.setRootResourcePath("")
netty.setPort(8080)
netty.setDeployment(deployment)
netty.start()
deployment.getRegistry.addPerRequestResource(classOf[CalcResource])
val source = Source.fromURL("http://localhost:8080/calc/add?a=5&b=3")
source.mkString.toInt should be(8)
source.close()
netty.stop()
}
}
}
| kazuhira-r/javaee7-scala-examples | resteasy-embedded-netty4/src/test/scala/org/littlewings/javaee7/rest/RestEasyNetty4Spec.scala | Scala | mit | 1,349 |
def unit[A](a: => A): Gen[A] =
Gen(State.unit(a))
def boolean: Gen[Boolean] =
Gen(State(RNG.boolean))
def choose(start: Int, stopExclusive: Int): Gen[Int] =
Gen(State(RNG.nonNegativeInt).map(n => start + n % (stopExclusive-start)))
def listOfN[A](n: Int, g: Gen[A]): Gen[List[A]] =
Gen(State.sequence(List.fill(n)(g.sample))) | lucaviolanti/scala-redbook | answerkey/testing/05.answer.scala | Scala | mit | 346 |
package im.actor.utils.http
import java.math.BigInteger
import java.nio.file.Files
import java.security.MessageDigest
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{ Span, Seconds }
import org.scalatest.{ FlatSpec, Matchers }
class DownloadManagerSpec extends FlatSpec with ScalaFutures with Matchers {
it should "Download https files" in e1
override implicit def patienceConfig: PatienceConfig =
new PatienceConfig(timeout = Span(10, Seconds))
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val downloadManager = new DownloadManager()
def e1() = {
whenReady(downloadManager.download("https://ajax.googleapis.com/ajax/libs/webfont/1.5.18/webfont.js")) {
case (path, size) ⇒
val fileBytes = Files.readAllBytes(path)
fileBytes.length shouldEqual size
val md = MessageDigest.getInstance("MD5")
val hexDigest = new BigInteger(1, md.digest(fileBytes)) toString (16)
hexDigest shouldEqual "593e60ad549e46f8ca9a60755336c7df"
}
}
}
| boneyao/actor-platform | actor-server/actor-utils-http/src/test/scala/im/actor/utils/http/DownloadManagerSpec.scala | Scala | mit | 1,137 |
package charactor.core.messages
import charactor.core.model.persistance._
class SnapshotMessage(val charactorsState: List[CharactorSnapshot], val foodSourcesState: List[FoodSourceSnapshot])
{
}
| PiotrTrzpil/charactor | src/charactor/core/messages/SnapshotMessage.scala | Scala | apache-2.0 | 196 |
/*
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
* Copyright (C) 2017-2018 Alexis Seigneurin.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.scala
package kstream
import org.apache.kafka.common.serialization.Serde
import org.apache.kafka.common.utils.Bytes
import org.apache.kafka.streams.kstream.{KTable => KTableJ, _}
import org.apache.kafka.streams.scala.ImplicitConversions._
import org.apache.kafka.streams.scala.FunctionConversions._
import org.apache.kafka.streams.state.KeyValueStore
/**
* Wraps the Java class [[org.apache.kafka.streams.kstream.KTable]] and delegates method calls to the underlying Java object.
*
* @param [K] Type of keys
* @param [V] Type of values
* @param inner The underlying Java abstraction for KTable
*
* @see `org.apache.kafka.streams.kstream.KTable`
*/
class KTable[K, V](val inner: KTableJ[K, V]) {
/**
* Create a new [[KTable]] that consists all records of this [[KTable]] which satisfies the given
* predicate
*
* @param predicate a filter that is applied to each record
* @return a [[KTable]] that contains only those records that satisfy the given predicate
* @see `org.apache.kafka.streams.kstream.KTable#filter`
*/
def filter(predicate: (K, V) => Boolean): KTable[K, V] =
inner.filter(predicate.asPredicate)
/**
* Create a new [[KTable]] that consists all records of this [[KTable]] which satisfies the given
* predicate
*
* @param predicate a filter that is applied to each record
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains only those records that satisfy the given predicate
* @see `org.apache.kafka.streams.kstream.KTable#filter`
*/
def filter(predicate: (K, V) => Boolean, materialized: Materialized[K, V, ByteArrayKeyValueStore]): KTable[K, V] =
inner.filter(predicate.asPredicate, materialized)
/**
* Create a new [[KTable]] that consists all records of this [[KTable]] which do <em>not</em> satisfy the given
* predicate
*
* @param predicate a filter that is applied to each record
* @return a [[KTable]] that contains only those records that do <em>not</em> satisfy the given predicate
* @see `org.apache.kafka.streams.kstream.KTable#filterNot`
*/
def filterNot(predicate: (K, V) => Boolean): KTable[K, V] =
inner.filterNot(predicate.asPredicate)
/**
* Create a new [[KTable]] that consists all records of this [[KTable]] which do <em>not</em> satisfy the given
* predicate
*
* @param predicate a filter that is applied to each record
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains only those records that do <em>not</em> satisfy the given predicate
* @see `org.apache.kafka.streams.kstream.KTable#filterNot`
*/
def filterNot(predicate: (K, V) => Boolean, materialized: Materialized[K, V, ByteArrayKeyValueStore]): KTable[K, V] =
inner.filterNot(predicate.asPredicate, materialized)
/**
* Create a new [[KTable]] by transforming the value of each record in this [[KTable]] into a new value
* (with possible new type) in the new [[KTable]].
* <p>
* The provided `mapper`, a function `V => VR` is applied to each input record value and computes a new value for it
*
* @param mapper, a function `V => VR` that computes a new output value
* @return a [[KTable]] that contains records with unmodified key and new values (possibly of different type)
* @see `org.apache.kafka.streams.kstream.KTable#mapValues`
*/
def mapValues[VR](mapper: V => VR): KTable[K, VR] =
inner.mapValues[VR](mapper.asValueMapper)
/**
* Create a new [[KTable]] by transforming the value of each record in this [[KTable]] into a new value
* (with possible new type) in the new [[KTable]].
* <p>
* The provided `mapper`, a function `V => VR` is applied to each input record value and computes a new value for it
*
* @param mapper, a function `V => VR` that computes a new output value
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains records with unmodified key and new values (possibly of different type)
* @see `org.apache.kafka.streams.kstream.KTable#mapValues`
*/
def mapValues[VR](mapper: V => VR, materialized: Materialized[K, VR, ByteArrayKeyValueStore]): KTable[K, VR] =
inner.mapValues[VR](mapper.asValueMapper, materialized)
/**
* Create a new [[KTable]] by transforming the value of each record in this [[KTable]] into a new value
* (with possible new type) in the new [[KTable]].
* <p>
* The provided `mapper`, a function `(K, V) => VR` is applied to each input record value and computes a new value for it
*
* @param mapper, a function `(K, V) => VR` that computes a new output value
* @return a [[KTable]] that contains records with unmodified key and new values (possibly of different type)
* @see `org.apache.kafka.streams.kstream.KTable#mapValues`
*/
def mapValues[VR](mapper: (K, V) => VR): KTable[K, VR] =
inner.mapValues[VR](mapper.asValueMapperWithKey)
/**
* Create a new [[KTable]] by transforming the value of each record in this [[KTable]] into a new value
* (with possible new type) in the new [[KTable]].
* <p>
* The provided `mapper`, a function `(K, V) => VR` is applied to each input record value and computes a new value for it
*
* @param mapper, a function `(K, V) => VR` that computes a new output value
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains records with unmodified key and new values (possibly of different type)
* @see `org.apache.kafka.streams.kstream.KTable#mapValues`
*/
def mapValues[VR](mapper: (K, V) => VR, materialized: Materialized[K, VR, ByteArrayKeyValueStore]): KTable[K, VR] =
inner.mapValues[VR](mapper.asValueMapperWithKey)
/**
* Convert this changelog stream to a [[KStream]].
*
* @return a [[KStream]] that contains the same records as this [[KTable]]
* @see `org.apache.kafka.streams.kstream.KTable#toStream`
*/
def toStream: KStream[K, V] = inner.toStream
/**
* Convert this changelog stream to a [[KStream]] using the given key/value mapper to select the new key
*
* @param mapper a function that computes a new key for each record
* @return a [[KStream]] that contains the same records as this [[KTable]]
* @see `org.apache.kafka.streams.kstream.KTable#toStream`
*/
def toStream[KR](mapper: (K, V) => KR): KStream[KR, V] =
inner.toStream[KR](mapper.asKeyValueMapper)
/**
* Create a new `KTable` by transforming the value of each record in this `KTable` into a new value, (with possibly new type).
* Transform the value of each input record into a new value (with possible new type) of the output record.
* A `ValueTransformerWithKey` (provided by the given `ValueTransformerWithKeySupplier`) is applied to each input
* record value and computes a new value for it.
* This is similar to `#mapValues(ValueMapperWithKey)`, but more flexible, allowing access to additional state-stores,
* and to the `ProcessorContext`.
* If the downstream topology uses aggregation functions, (e.g. `KGroupedTable#reduce`, `KGroupedTable#aggregate`, etc),
* care must be taken when dealing with state, (either held in state-stores or transformer instances), to ensure correct
* aggregate results.
* In contrast, if the resulting KTable is materialized, (cf. `#transformValues(ValueTransformerWithKeySupplier, Materialized, String...)`),
* such concerns are handled for you.
* In order to assign a state, the state must be created and registered
* beforehand via stores added via `addStateStore` or `addGlobalStore` before they can be connected to the `Transformer`
*
* @param valueTransformerWithKeySupplier a instance of `ValueTransformerWithKeySupplier` that generates a `ValueTransformerWithKey`.
* At least one transformer instance will be created per streaming task.
* Transformer implementations doe not need to be thread-safe.
* @param stateStoreNames the names of the state stores used by the processor
* @return a [[KStream]] that contains records with unmodified key and new values (possibly of different type)
* @see `org.apache.kafka.streams.kstream.KStream#transformValues`
*/
def transformValues[VR](valueTransformerWithKeySupplier: ValueTransformerWithKeySupplier[K, V, VR],
stateStoreNames: String*): KTable[K, VR] =
inner.transformValues[VR](valueTransformerWithKeySupplier, stateStoreNames: _*)
/**
* Create a new `KTable` by transforming the value of each record in this `KTable` into a new value, (with possibly new type).
* A `ValueTransformer` (provided by the given `ValueTransformerSupplier`) is applied to each input
* record value and computes a new value for it.
* This is similar to `#mapValues(ValueMapperWithKey)`, but more flexible, allowing stateful, rather than stateless,
* record-by-record operation, access to additional state-stores, and access to the `ProcessorContext`.
* In order to assign a state, the state must be created and registered
* beforehand via stores added via `addStateStore` or `addGlobalStore` before they can be connected to the `Transformer`
* The resulting `KTable` is materialized into another state store (additional to the provided state store names)
* as specified by the user via `Materialized` parameter, and is queryable through its given name.
*
* @param valueTransformerWithKeySupplier a instance of `ValueTransformerWithKeySupplier` that generates a `ValueTransformerWithKey`
* At least one transformer instance will be created per streaming task.
* Transformer implementations doe not need to be thread-safe.
* @param materialized an instance of `Materialized` used to describe how the state store of the
* resulting table should be materialized.
* @param stateStoreNames the names of the state stores used by the processor
* @return a [[KStream]] that contains records with unmodified key and new values (possibly of different type)
* @see `org.apache.kafka.streams.kstream.KStream#transformValues`
*/
def transformValues[VR](valueTransformerWithKeySupplier: ValueTransformerWithKeySupplier[K, V, VR],
materialized: Materialized[K, VR, KeyValueStore[Bytes, Array[Byte]]],
stateStoreNames: String*): KTable[K, VR] =
inner.transformValues[VR](valueTransformerWithKeySupplier, materialized, stateStoreNames: _*)
/**
* Re-groups the records of this [[KTable]] using the provided key/value mapper
* and `Serde`s as specified by `Serialized`.
*
* @param selector a function that computes a new grouping key and value to be aggregated
* @param serialized the `Serialized` instance used to specify `Serdes`
* @return a [[KGroupedTable]] that contains the re-grouped records of the original [[KTable]]
* @see `org.apache.kafka.streams.kstream.KTable#groupBy`
*/
def groupBy[KR, VR](selector: (K, V) => (KR, VR))(implicit serialized: Serialized[KR, VR]): KGroupedTable[KR, VR] =
inner.groupBy(selector.asKeyValueMapper, serialized)
/**
* Join records of this [[KTable]] with another [[KTable]]'s records using non-windowed inner equi join.
*
* @param other the other [[KTable]] to be joined with this [[KTable]]
* @param joiner a function that computes the join result for a pair of matching records
* @return a [[KTable]] that contains join-records for each key and values computed by the given joiner,
* one for each matched record-pair with the same key
* @see `org.apache.kafka.streams.kstream.KTable#join`
*/
def join[VO, VR](other: KTable[K, VO])(joiner: (V, VO) => VR): KTable[K, VR] =
inner.join[VO, VR](other.inner, joiner.asValueJoiner)
/**
* Join records of this [[KTable]] with another [[KTable]]'s records using non-windowed inner equi join.
*
* @param other the other [[KTable]] to be joined with this [[KTable]]
* @param joiner a function that computes the join result for a pair of matching records
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains join-records for each key and values computed by the given joiner,
* one for each matched record-pair with the same key
* @see `org.apache.kafka.streams.kstream.KTable#join`
*/
def join[VO, VR](other: KTable[K, VO], materialized: Materialized[K, VR, ByteArrayKeyValueStore])(
joiner: (V, VO) => VR
): KTable[K, VR] =
inner.join[VO, VR](other.inner, joiner.asValueJoiner, materialized)
/**
* Join records of this [[KTable]] with another [[KTable]]'s records using non-windowed left equi join.
*
* @param other the other [[KTable]] to be joined with this [[KTable]]
* @param joiner a function that computes the join result for a pair of matching records
* @return a [[KTable]] that contains join-records for each key and values computed by the given joiner,
* one for each matched record-pair with the same key
* @see `org.apache.kafka.streams.kstream.KTable#leftJoin`
*/
def leftJoin[VO, VR](other: KTable[K, VO])(joiner: (V, VO) => VR): KTable[K, VR] =
inner.leftJoin[VO, VR](other.inner, joiner.asValueJoiner)
/**
* Join records of this [[KTable]] with another [[KTable]]'s records using non-windowed left equi join.
*
* @param other the other [[KTable]] to be joined with this [[KTable]]
* @param joiner a function that computes the join result for a pair of matching records
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains join-records for each key and values computed by the given joiner,
* one for each matched record-pair with the same key
* @see `org.apache.kafka.streams.kstream.KTable#leftJoin`
*/
def leftJoin[VO, VR](other: KTable[K, VO], materialized: Materialized[K, VR, ByteArrayKeyValueStore])(
joiner: (V, VO) => VR
): KTable[K, VR] =
inner.leftJoin[VO, VR](other.inner, joiner.asValueJoiner, materialized)
/**
* Join records of this [[KTable]] with another [[KTable]]'s records using non-windowed outer equi join.
*
* @param other the other [[KTable]] to be joined with this [[KTable]]
* @param joiner a function that computes the join result for a pair of matching records
* @return a [[KTable]] that contains join-records for each key and values computed by the given joiner,
* one for each matched record-pair with the same key
* @see `org.apache.kafka.streams.kstream.KTable#leftJoin`
*/
def outerJoin[VO, VR](other: KTable[K, VO])(joiner: (V, VO) => VR): KTable[K, VR] =
inner.outerJoin[VO, VR](other.inner, joiner.asValueJoiner)
/**
* Join records of this [[KTable]] with another [[KTable]]'s records using non-windowed outer equi join.
*
* @param other the other [[KTable]] to be joined with this [[KTable]]
* @param joiner a function that computes the join result for a pair of matching records
* @param materialized a `Materialized` that describes how the `StateStore` for the resulting [[KTable]]
* should be materialized.
* @return a [[KTable]] that contains join-records for each key and values computed by the given joiner,
* one for each matched record-pair with the same key
* @see `org.apache.kafka.streams.kstream.KTable#leftJoin`
*/
def outerJoin[VO, VR](other: KTable[K, VO], materialized: Materialized[K, VR, ByteArrayKeyValueStore])(
joiner: (V, VO) => VR
): KTable[K, VR] =
inner.outerJoin[VO, VR](other.inner, joiner.asValueJoiner, materialized)
/**
* Get the name of the local state store used that can be used to query this [[KTable]].
*
* @return the underlying state store name, or `null` if this [[KTable]] cannot be queried.
*/
def queryableStoreName: String = inner.queryableStoreName
}
| Esquive/kafka | streams/streams-scala/src/main/scala/org/apache/kafka/streams/scala/kstream/KTable.scala | Scala | apache-2.0 | 17,548 |
package memnets.models.chaos
import memnets.model._
class Duffing(val tau: Double = 30.0)(implicit mn: DynamicSystem) {
import GREEK._
val alpha = Param(ALPHA, max = 1.5, init = 1)
val beta = Param(BETA, max = 5, init = 1)
val delta = Param(DELTA, max = -0.5, init = -0.2)
val x = Y("x", tau = tau)
val dx = Y("x'", tau = tau)
// p. 28-30 Chaos
dx --> x tie = alpha
dx --> dx tie = delta
dx.f(s"-${beta.name} * x^3", x) { t =>
-beta.getValue * x * x * x
}
override def equals(obj: scala.Any): Boolean = obj match {
case that: AnyRef =>
this eq that
case default =>
false
}
}
| MemoryNetworks/memnets | models/src/main/scala/memnets/models/chaos/Duffing.scala | Scala | apache-2.0 | 629 |
package org.krazykat.ontapi
import akka.actor.ActorSystem
object ApplicationMain extends App {
val system = ActorSystem("Scala-Ontapi")
system.awaitTermination()
} | ehudkaldor/scala-ontapi | src/main/scala/org/krazykat/ontapi/ApplicationMain.scala | Scala | gpl-2.0 | 169 |
package org.wquery.lang.operations
import org.wquery.lang.Context
import org.wquery.model.WordNet
import scalaz.Scalaz._
import scalaz._
case class FunctionOp(function: Function, args: Option[AlgebraOp]) extends AlgebraOp {
def evaluate(wordNet: WordNet, bindings: Bindings, context: Context) = function.evaluate(args, wordNet, bindings, context)
def leftType(pos: Int) = function.leftType(args, pos)
def rightType(pos: Int) = function.rightType(args, pos)
val minTupleSize = function.minTupleSize(args)
val maxTupleSize = function.maxTupleSize(args)
def bindingsPattern = function.bindingsPattern(args)
val referencedVariables = args.map(_.referencedVariables).orZero
}
| marekkubis/wquery | src/main/scala/org/wquery/lang/operations/FunctionOp.scala | Scala | bsd-3-clause | 697 |
package io.cumulus.controllers.bodyParsers
import io.cumulus.controllers.ErrorSupport
import io.cumulus.validation.AppError
import play.api.libs.json._
import play.api.mvc._
/**
* Trait to provided an easy to use JSON parser for a specified payload type.
*/
trait BodyParserJson extends ErrorSupport { self: BaseController =>
def parseJson[A](implicit reader: Reads[A]): BodyParser[A] =
BodyParser("JSON reader") { implicit request =>
self.parse.json
.apply(request)
.map {
case Left(_) =>
Left(AppError.validation("api-error.bad-format").toResult)
case Right(jsValue) =>
jsValue.validate(reader) map { a =>
Right(a)
} recoverTotal { jsError =>
Left(AppError.validation(jsError).toResult)
}
}(self.defaultExecutionContext)
}
}
| Cumulus-Cloud/cumulus | server/cumulus-core/src/main/scala/io/cumulus/controllers/bodyParsers/BodyParserJson.scala | Scala | mit | 872 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.view.timeseries
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.view.rewrite.TestUtil
class TestMVTimeSeriesQueryRollUp extends QueryTest with BeforeAndAfterAll {
// scalastyle:off lineLength
override def beforeAll(): Unit = {
drop()
createTable()
loadData("maintable")
}
override def afterAll(): Unit = {
drop()
}
test("test timeseries query rollup with simple projection") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),projectcode from maintable")
val df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with simple projection with group by - scenario-1") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable group by timeseries(projectjoindate,'day'),projectcode")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable group by timeseries(projectjoindate,'second'),projectcode")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),projectcode from maintable group by timeseries(projectjoindate,'hour'),projectcode")
var df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable group by timeseries(projectjoindate,'day'),projectcode")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
df = sql("select timeseries(projectjoindate,'second'),projectcode from maintable group by timeseries(projectjoindate,'second'),projectcode")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
df = sql("select timeseries(projectjoindate,'second') from maintable group by timeseries(projectjoindate,'second')")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with simple projection with group by - scenario-1 with single materialized view ") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable group by timeseries(projectjoindate,'day'),projectcode")
sql("drop materialized view if exists mv1")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable group by timeseries(projectjoindate,'second'),projectcode")
var df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable group by timeseries(projectjoindate,'day'),projectcode")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
checkAnswer(result, df)
df = sql("select timeseries(projectjoindate,'second'),projectcode from maintable group by timeseries(projectjoindate,'second'),projectcode")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
df = sql("select timeseries(projectjoindate,'second') from maintable group by timeseries(projectjoindate,'second')")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
sql("drop materialized view if exists mv1")
}
test("test timeseries query rollup with simple projection with group by - scenario-2") {
val result = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable group by timeseries(projectjoindate,'day')")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),sum(projectcode) from maintable group by timeseries(projectjoindate,'second')")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),sum(projectcode) from maintable group by timeseries(projectjoindate,'hour')")
val df = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable group by timeseries(projectjoindate,'day')")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with simple projection with filter") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable where projectcode=8")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),projectcode from maintable")
val df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable where projectcode=8")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with simple projection with group by & filter - scenario 1") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable where projectcode=8 " +
"group by timeseries(projectjoindate,'day'),projectcode")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql("create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable group by " +
"timeseries(projectjoindate,'second'),projectcode")
sql("create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),projectcode from maintable group by timeseries" +
"(projectjoindate,'hour'),projectcode")
val df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable where projectcode=8 " +
"group by timeseries(projectjoindate,'day'),projectcode")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with simple projection with group by & filter - scenario 2") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable where projectcode=8 group by timeseries(projectjoindate,'day'),projectcode")
sql("drop materialized view if exists mv1")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable where projectcode=1 group by timeseries(projectjoindate,'second'),projectcode")
val df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable where projectcode=8 group by timeseries(projectjoindate,'day'),projectcode")
assert(!TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
}
test("test timeseries query rollup with simple projection with alias- scenario 1") {
val result = sql("select timeseries(projectjoindate,'day') as a,projectcode as b from maintable")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),projectcode from maintable")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),projectcode from maintable")
val df = sql("select timeseries(projectjoindate,'day') as a,projectcode as b from maintable")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with simple projection with alias- scenario 2") {
val result = sql("select timeseries(projectjoindate,'day'),projectcode from maintable")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second') as a,projectcode as b from maintable")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour') as a,projectcode as b from maintable")
val df = sql("select timeseries(projectjoindate,'day'),projectcode from maintable")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with projection with alias and group by- scenario 1") {
val result = sql("select timeseries(projectjoindate,'day') as a,sum(projectcode) as b from maintable group by timeseries(projectjoindate,'day')")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),sum(projectcode) from maintable group by timeseries(projectjoindate,'second')")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),sum(projectcode) from maintable group by timeseries(projectjoindate,'hour')")
val df = sql("select timeseries(projectjoindate,'day') as a,sum(projectcode) as b from maintable group by timeseries(projectjoindate,'day')")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test timeseries query rollup with projection with alias and group by- scenario 2") {
val result = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable group by timeseries(projectjoindate,'day')")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second') as a,sum(projectcode) as b from maintable group by timeseries(projectjoindate,'second')")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour') as a,sum(projectcode) as b from maintable group by timeseries(projectjoindate,'hour')")
val df = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable group by timeseries(projectjoindate,'day')")
assert(TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("rollup not supported for join queries") {
sql("drop table if exists maintable1")
sql("CREATE TABLE maintable1 (empno int,empname string, projectcode int, projectjoindate " +
"Timestamp,salary double) STORED AS CARBONDATA")
loadData("maintable1")
val result = sql("select timeseries(t1.projectjoindate,'day'),count(timeseries(t2.projectjoindate,'day')),sum(t2.projectcode) from maintable t1 inner join maintable1 t2 " +
"on (timeseries(t1.projectjoindate,'day')=timeseries(t2.projectjoindate,'day')) group by timeseries(t1.projectjoindate,'day')")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql("create materialized view mv1 as " +
"select timeseries(t1.projectjoindate,'second'),count(timeseries(t2.projectjoindate,'second')),sum(t2.projectcode) from maintable t1 inner join maintable1 t2 " +
"on (timeseries(t1.projectjoindate,'second')=timeseries(t2.projectjoindate,'second')) group by timeseries(t1.projectjoindate,'second')")
sql("create materialized view mv2 as " +
"select timeseries(t1.projectjoindate,'hour'),count(timeseries(t2.projectjoindate,'hour')),sum(t2.projectcode) from maintable t1 inner join maintable1 t2 " +
"on (timeseries(t1.projectjoindate,'hour')=timeseries(t2.projectjoindate,'hour')) group by timeseries(t1.projectjoindate,'hour')")
val df = sql("select timeseries(t1.projectjoindate,'day'),count(timeseries(t2.projectjoindate,'day')),sum(t2.projectcode) from maintable t1 inner join maintable1 t2 " +
"on (timeseries(t1.projectjoindate,'day')=timeseries(t2.projectjoindate,'day')) group by timeseries(t1.projectjoindate,'day')")
assert(!TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("rollup not supported for timeseries udf in filter") {
val result = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable where timeseries(projectjoindate,'day')='2016-02-23 00:00:00' group by timeseries(projectjoindate,'day')")
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
sql(
"create materialized view mv1 as " +
"select timeseries(projectjoindate,'second'),sum(projectcode) from maintable group by timeseries(projectjoindate,'second')")
sql(
"create materialized view mv2 as " +
"select timeseries(projectjoindate,'hour'),sum(projectcode) from maintable group by timeseries(projectjoindate,'hour')")
val df = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable where timeseries(projectjoindate,'day')='2016-02-23 00:00:00' group by timeseries(projectjoindate,'day')")
assert(!TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv2"))
checkAnswer(result, df)
sql("drop materialized view if exists mv1")
sql("drop materialized view if exists mv2")
}
test("test rollup for timeseries column of Date type") {
drop()
sql("CREATE TABLE maintable (empno int,empname string, projectcode int, projectjoindate " +
"date,salary double) STORED AS CARBONDATA")
sql("insert into maintable select 11,'joey',2,'2016-02-23',300")
sql("insert into maintable select 13,'pheobe',1,'2016-02-23',450")
sql("insert into maintable select 22,'cathy',1,'2016-02-25',450.5")
sql("drop materialized view if exists mv1")
val result = sql("select timeseries(projectjoindate,'week'),sum(projectcode) from maintable group by timeseries(projectjoindate,'week')")
sql("create materialized view mv1 as select timeseries(projectjoindate,'day'),sum(projectcode) from maintable group by timeseries(projectjoindate,'day')")
val dayDF = sql("select timeseries(projectjoindate,'day'),sum(projectcode) from maintable group by timeseries(projectjoindate,'day')")
assert(TestUtil.verifyMVHit(dayDF.queryExecution.optimizedPlan, "mv1"))
val weekDF = sql("select timeseries(projectjoindate,'week'),sum(projectcode) from maintable group by timeseries(projectjoindate,'week')")
assert(TestUtil.verifyMVHit(weekDF.queryExecution.optimizedPlan, "mv1"))
checkAnswer(result, weekDF)
sql("drop materialized view if exists mv1")
drop()
}
def drop(): Unit = {
sql("drop table if exists maintable")
}
def createTable(): Unit = {
sql(
"CREATE TABLE maintable (empno int,empname string, projectcode int, projectjoindate " +
"Timestamp,salary double) STORED AS CARBONDATA")
}
def loadData(table: String): Unit = {
sql(
s"""LOAD DATA local inpath '$resourcesPath/mv_sampledata.csv' INTO TABLE $table OPTIONS
|('DELIMITER'= ',', 'QUOTECHAR'= '"')""".stripMargin)
}
// scalastyle:on lineLength
}
| zzcclp/carbondata | integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesQueryRollUp.scala | Scala | apache-2.0 | 17,350 |
// #Sireum
/*
Copyright (c) 2017-2021, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum
object UnionFind {
type Index = Z
object Internal {
@pure def find[T](ds: UnionFind[T], e: Index): Index = {
var root = e
while (ds.parentOf(root) != root) {
root = ds.parentOf(root)
}
return root
}
@pure def findCompress[T](ds: UnionFind[T], e: Index): (UnionFind[T], Index) = {
var root = e
var newParentOf = ds.parentOf
while (newParentOf(root) != root) {
newParentOf = newParentOf(root ~> newParentOf(newParentOf(root)))
root = newParentOf(root)
}
return (ds(parentOf = newParentOf), root)
}
@pure def merge[T](ds: UnionFind[T], e1: Index, e2: Index): UnionFind[T] = {
var newDs = ds
val rootN: Index = {
val pe1 = findCompress(newDs, e1)
newDs = pe1._1
pe1._2
}
val rootM: Index = {
val pe2 = findCompress(newDs, e2)
newDs = pe2._1
pe2._2
}
val (rep, other): (Index, Index) =
if (newDs.sizeOf(rootM) > newDs.sizeOf(rootN)) (rootM, rootN) else (rootN, rootM)
return newDs(
parentOf = newDs.parentOf(other ~> rep),
sizeOf = newDs.sizeOf(rep ~> (newDs.sizeOf(rep) + newDs.sizeOf(other)))
)
}
}
@pure def create[T](elements: ISZ[T]): UnionFind[T] = {
val size = elements.size
var es = HashSMap.emptyInit[T, Index](size)
for (e <- elements) {
es = es + e ~> es.size
}
val parentOf: IS[Index, Index] = for (i <- z"0" until size) yield i
val sizeOf = IS.create[Index, Index](size, 1)
return UnionFind(es, elements, parentOf, sizeOf)
}
}
@datatype class UnionFind[T](
val elements: HashSMap[T, UnionFind.Index],
val elementsInverse: IS[UnionFind.Index, T],
val parentOf: IS[UnionFind.Index, UnionFind.Index],
val sizeOf: IS[UnionFind.Index, UnionFind.Index]
) {
@pure def size: Z = {
return elements.size
}
@pure override def hash: Z = {
return size
}
@pure def isEqual(other: UnionFind[T]): B = {
if (elementsInverse.size != other.elementsInverse.size) {
return F
}
if ((HashSSet ++ elementsInverse) != (HashSSet ++ other.elementsInverse)) {
return F
}
var seen = HashSSet.emptyInit[(T, T)](size)
for (element1 <- elementsInverse; element2 <- elementsInverse if element1 != element2) {
val p = (element1, element2)
if (!seen.contains(p)) {
seen = seen + p + ((element2, element1))
if (inSameSet(element1, element2) != inSameSet(element1, element2)) {
return F
}
}
}
return T
}
@pure def inSameSet(element1: T, element2: T): B = {
return UnionFind.Internal.find(this, elements.get(element1).get) == UnionFind.Internal
.find(this, elements.get(element2).get)
}
@pure def inSameSetCompress(element1: T, element2: T): (UnionFind[T], B) = {
val e1 = elements.get(element1).get
val e2 = elements.get(element2).get
var newDs = this
val rep1: UnionFind.Index = {
val p1 = UnionFind.Internal.findCompress(newDs, e1)
newDs = p1._1
p1._2
}
val rep2: UnionFind.Index = {
val p2 = UnionFind.Internal.findCompress(newDs, e2)
newDs = p2._1
p2._2
}
return (newDs, rep1 == rep2)
}
@pure def find(element: T): T = {
val n = elements.get(element).get
val rep = UnionFind.Internal.find(this, n)
return elementsInverse(rep)
}
@pure def findCompress(element: T): (UnionFind[T], T) = {
val n = elements.get(element).get
val (newDs, rep) = UnionFind.Internal.findCompress(this, n)
return (newDs, elementsInverse(rep))
}
@pure def merge(element1: T, element2: T): UnionFind[T] = {
val e1 = elements.get(element1).get
val e2 = elements.get(element2).get
return UnionFind.Internal.merge(this, e1, e2)
}
@pure def toST(f: T => ST): ST = {
var map = HashSMap.emptyInit[UnionFind.Index, ISZ[ST]](size)
for (element <- elementsInverse) {
val rep = UnionFind.Internal.find(this, elements.get(element).get)
map = map + rep ~> (map.get(rep).getOrElse(ISZ[ST]()) :+ f(element))
}
val sets: ISZ[ST] = for (sts <- map.values) yield st"""{
| ${(sts, ",\\n")}
|}"""
val r =
st"""{
| ${(sets, ",\\n")}
|}"""
return r
}
@pure override def string: String = {
return toST(e => st"$e").render
}
}
| sireum/v3-logika-runtime | library/shared/src/main/scala/org/sireum/UnionFind.scala | Scala | bsd-2-clause | 5,745 |
package singleton.twoface
import singleton.twoface.math._
import org.scalacheck.Properties
import shapeless.test.illTyped
import singleton.TestUtils._
import singleton.ops._
class TwoFaceLongSpec extends Properties("TwoFace.Long") {
property("Implicit Creation[]") = {
val a = implicitly[TwoFace.Long[W.`2L`.T]]
a.getValue == 2L && a.isLiteral
}
property("Safe Creation[]") = {
val a = TwoFace.Long[W.`2L`.T]
a.getValue == 2L && a.isLiteral
}
property("Safe Creation()") = {
val a = TwoFace.Long(2L)
a.getValue == 2L && a.isLiteral
}
property("Unsafe Creation()") = {
val a = TwoFace.Long(us(2L))
a.getValue == 2L && !a.isLiteral
}
property("Safe ifThenElse") = verifyTFLong(ifThenElse(true, 1L, 2L), 1L)
property("Unsafe ifThenElse") = verifyTFLong(ifThenElse(us(false), 1L, 2L), us(2L))
property("Safe Long + Safe Char") = verifyTFLong(TwoFace.Long(2L) + TwoFace.Char('\\u0001'), 3L)
property("Safe Long + Unsafe Char") = verifyTFLong(TwoFace.Long(2L) + TwoFace.Char(us('\\u0001')), us(3L))
property("Unsafe Long + Safe Char") = verifyTFLong(TwoFace.Long(us(2L)) + TwoFace.Char('\\u0001'), us(3L))
property("Unsafe Long + Unsafe Char") = verifyTFLong(TwoFace.Long(us(2L)) + TwoFace.Char(us('\\u0001')), us(3L))
property("Safe Long + Safe Int") = verifyTFLong(TwoFace.Long(2L) + TwoFace.Int(1), 3L)
property("Safe Long + Unsafe Int") = verifyTFLong(TwoFace.Long(2L) + TwoFace.Int(us(1)), us(3L))
property("Unsafe Long + Safe Int") = verifyTFLong(TwoFace.Long(us(2L)) + TwoFace.Int(1), us(3L))
property("Unsafe Long + Unsafe Int") = verifyTFLong(TwoFace.Long(us(2L)) + TwoFace.Int(us(1)), us(3L))
property("Safe Long + Safe Long") = verifyTFLong(TwoFace.Long(2L) + TwoFace.Long(1L), 3L)
property("Safe Long + Unsafe Long") = verifyTFLong(TwoFace.Long(2L) + TwoFace.Long(us(1L)), us(3L))
property("Unsafe Long + Safe Long") = verifyTFLong(TwoFace.Long(us(2L)) + TwoFace.Long(1L), us(3L))
property("Unsafe Long + Unsafe Long") = verifyTFLong(TwoFace.Long(us(2L)) + TwoFace.Long(us(1L)), us(3L))
property("Safe Long + Safe Float") = verifyTFFloat(TwoFace.Long(2L) + TwoFace.Float(1.0f), 3.0f)
property("Safe Long + Unsafe Float") = verifyTFFloat(TwoFace.Long(2L) + TwoFace.Float(us(1.0f)), us(3.0f))
property("Unsafe Long + Safe Float") = verifyTFFloat(TwoFace.Long(us(2L)) + TwoFace.Float(1.0f), us(3.0f))
property("Unsafe Long + Unsafe Float") = verifyTFFloat(TwoFace.Long(us(2L)) + TwoFace.Float(us(1.0f)), us(3.0f))
property("Safe Long + Safe Double") = verifyTFDouble(TwoFace.Long(2L) + TwoFace.Double(1.0), 3.0)
property("Safe Long + Unsafe Double") = verifyTFDouble(TwoFace.Long(2L) + TwoFace.Double(us(1.0)), us(3.0))
property("Unsafe Long + Safe Double") = verifyTFDouble(TwoFace.Long(us(2L)) + TwoFace.Double(1.0), us(3.0))
property("Unsafe Long + Unsafe Double") = verifyTFDouble(TwoFace.Long(us(2L)) + TwoFace.Double(us(1.0)), us(3.0))
property("Safe Long - Safe Char") = verifyTFLong(TwoFace.Long(2L) - TwoFace.Char('\\u0001'), 1L)
property("Safe Long - Unsafe Char") = verifyTFLong(TwoFace.Long(2L) - TwoFace.Char(us('\\u0001')), us(1L))
property("Unsafe Long - Safe Char") = verifyTFLong(TwoFace.Long(us(2L)) - TwoFace.Char('\\u0001'), us(1L))
property("Unsafe Long - Unsafe Char") = verifyTFLong(TwoFace.Long(us(2L)) - TwoFace.Char(us('\\u0001')), us(1L))
property("Safe Long - Safe Int") = verifyTFLong(TwoFace.Long(2L) - TwoFace.Int(1), 1L)
property("Safe Long - Unsafe Int") = verifyTFLong(TwoFace.Long(2L) - TwoFace.Int(us(1)), us(1L))
property("Unsafe Long - Safe Int") = verifyTFLong(TwoFace.Long(us(2L)) - TwoFace.Int(1), us(1L))
property("Unsafe Long - Unsafe Int") = verifyTFLong(TwoFace.Long(us(2L)) - TwoFace.Int(us(1)), us(1L))
property("Safe Long - Safe Long") = verifyTFLong(TwoFace.Long(2L) - TwoFace.Long(1L), 1L)
property("Safe Long - Unsafe Long") = verifyTFLong(TwoFace.Long(2L) - TwoFace.Long(us(1L)), us(1L))
property("Unsafe Long - Safe Long") = verifyTFLong(TwoFace.Long(us(2L)) - TwoFace.Long(1L), us(1L))
property("Unsafe Long - Unsafe Long") = verifyTFLong(TwoFace.Long(us(2L)) - TwoFace.Long(us(1L)), us(1L))
property("Safe Long - Safe Float") = verifyTFFloat(TwoFace.Long(2L) - TwoFace.Float(1.0f), 1.0f)
property("Safe Long - Unsafe Float") = verifyTFFloat(TwoFace.Long(2L) - TwoFace.Float(us(1.0f)), us(1.0f))
property("Unsafe Long - Safe Float") = verifyTFFloat(TwoFace.Long(us(2L)) - TwoFace.Float(1.0f), us(1.0f))
property("Unsafe Long - Unsafe Float") = verifyTFFloat(TwoFace.Long(us(2L)) - TwoFace.Float(us(1.0f)), us(1.0f))
property("Safe Long - Safe Double") = verifyTFDouble(TwoFace.Long(2L) - TwoFace.Double(1.0), 1.0)
property("Safe Long - Unsafe Double") = verifyTFDouble(TwoFace.Long(2L) - TwoFace.Double(us(1.0)), us(1.0))
property("Unsafe Long - Safe Double") = verifyTFDouble(TwoFace.Long(us(2L)) - TwoFace.Double(1.0), us(1.0))
property("Unsafe Long - Unsafe Double") = verifyTFDouble(TwoFace.Long(us(2L)) - TwoFace.Double(us(1.0)), us(1.0))
property("Safe Long * Safe Char") = verifyTFLong(TwoFace.Long(2L) * TwoFace.Char('\\u0001'), 2L)
property("Safe Long * Unsafe Char") = verifyTFLong(TwoFace.Long(2L) * TwoFace.Char(us('\\u0001')), us(2L))
property("Unsafe Long * Safe Char") = verifyTFLong(TwoFace.Long(us(2L)) * TwoFace.Char('\\u0001'), us(2L))
property("Unsafe Long * Unsafe Char") = verifyTFLong(TwoFace.Long(us(2L)) * TwoFace.Char(us('\\u0001')), us(2L))
property("Safe Long * Safe Int") = verifyTFLong(TwoFace.Long(2L) * TwoFace.Int(1), 2L)
property("Safe Long * Unsafe Int") = verifyTFLong(TwoFace.Long(2L) * TwoFace.Int(us(1)), us(2L))
property("Unsafe Long * Safe Int") = verifyTFLong(TwoFace.Long(us(2L)) * TwoFace.Int(1), us(2L))
property("Unsafe Long * Unsafe Int") = verifyTFLong(TwoFace.Long(us(2L)) * TwoFace.Int(us(1)), us(2L))
property("Safe Long * Safe Long") = verifyTFLong(TwoFace.Long(2L) * TwoFace.Long(1L), 2L)
property("Safe Long * Unsafe Long") = verifyTFLong(TwoFace.Long(2L) * TwoFace.Long(us(1L)), us(2L))
property("Unsafe Long * Safe Long") = verifyTFLong(TwoFace.Long(us(2L)) * TwoFace.Long(1L), us(2L))
property("Unsafe Long * Unsafe Long") = verifyTFLong(TwoFace.Long(us(2L)) * TwoFace.Long(us(1L)), us(2L))
property("Safe Long * Safe Float") = verifyTFFloat(TwoFace.Long(2L) * TwoFace.Float(1.0f), 2.0f)
property("Safe Long * Unsafe Float") = verifyTFFloat(TwoFace.Long(2L) * TwoFace.Float(us(1.0f)), us(2.0f))
property("Unsafe Long * Safe Float") = verifyTFFloat(TwoFace.Long(us(2L)) * TwoFace.Float(1.0f), us(2.0f))
property("Unsafe Long * Unsafe Float") = verifyTFFloat(TwoFace.Long(us(2L)) * TwoFace.Float(us(1.0f)), us(2.0f))
property("Safe Long * Safe Double") = verifyTFDouble(TwoFace.Long(2L) * TwoFace.Double(1.0), 2.0)
property("Safe Long * Unsafe Double") = verifyTFDouble(TwoFace.Long(2L) * TwoFace.Double(us(1.0)), us(2.0))
property("Unsafe Long * Safe Double") = verifyTFDouble(TwoFace.Long(us(2L)) * TwoFace.Double(1.0), us(2.0))
property("Unsafe Long * Unsafe Double") = verifyTFDouble(TwoFace.Long(us(2L)) * TwoFace.Double(us(1.0)), us(2.0))
property("Safe Long / Safe Char") = verifyTFLong(TwoFace.Long(6L) / TwoFace.Char('\\u0002'), 3L)
property("Safe Long / Unsafe Char") = verifyTFLong(TwoFace.Long(6L) / TwoFace.Char(us('\\u0002')), us(3L))
property("Unsafe Long / Safe Char") = verifyTFLong(TwoFace.Long(us(6L)) / TwoFace.Char('\\u0002'), us(3L))
property("Unsafe Long / Unsafe Char") = verifyTFLong(TwoFace.Long(us(6L)) / TwoFace.Char(us('\\u0002')), us(3L))
property("Safe Long / Safe Int") = verifyTFLong(TwoFace.Long(6L) / TwoFace.Int(2), 3L)
property("Safe Long / Unsafe Int") = verifyTFLong(TwoFace.Long(6L) / TwoFace.Int(us(2)), us(3L))
property("Unsafe Long / Safe Int") = verifyTFLong(TwoFace.Long(us(6L)) / TwoFace.Int(2), us(3L))
property("Unsafe Long / Unsafe Int") = verifyTFLong(TwoFace.Long(us(6L)) / TwoFace.Int(us(2)), us(3L))
property("Safe Long / Safe Long") = verifyTFLong(TwoFace.Long(6L) / TwoFace.Long(2L), 3L)
property("Safe Long / Unsafe Long") = verifyTFLong(TwoFace.Long(6L) / TwoFace.Long(us(2L)), us(3L))
property("Unsafe Long / Safe Long") = verifyTFLong(TwoFace.Long(us(6L)) / TwoFace.Long(2L), us(3L))
property("Unsafe Long / Unsafe Long") = verifyTFLong(TwoFace.Long(us(6L)) / TwoFace.Long(us(2L)), us(3L))
property("Safe Long / Safe Float") = verifyTFFloat(TwoFace.Long(6L) / TwoFace.Float(2.0f), 3.0f)
property("Safe Long / Unsafe Float") = verifyTFFloat(TwoFace.Long(6L) / TwoFace.Float(us(2.0f)), us(3.0f))
property("Unsafe Long / Safe Float") = verifyTFFloat(TwoFace.Long(us(6L)) / TwoFace.Float(2.0f), us(3.0f))
property("Unsafe Long / Unsafe Float") = verifyTFFloat(TwoFace.Long(us(6L)) / TwoFace.Float(us(2.0f)), us(3.0f))
property("Safe Long / Safe Double") = verifyTFDouble(TwoFace.Long(6L) / TwoFace.Double(2.0), 3.0)
property("Safe Long / Unsafe Double") = verifyTFDouble(TwoFace.Long(6L) / TwoFace.Double(us(2.0)), us(3.0))
property("Unsafe Long / Safe Double") = verifyTFDouble(TwoFace.Long(us(6L)) / TwoFace.Double(2.0), us(3.0))
property("Unsafe Long / Unsafe Double") = verifyTFDouble(TwoFace.Long(us(6L)) / TwoFace.Double(us(2.0)), us(3.0))
property("Safe Long % Safe Char") = verifyTFLong(TwoFace.Long(7L) % TwoFace.Char('\\u0004'), 3L)
property("Safe Long % Unsafe Char") = verifyTFLong(TwoFace.Long(7L) % TwoFace.Char(us('\\u0004')), us(3L))
property("Unsafe Long % Safe Char") = verifyTFLong(TwoFace.Long(us(7L)) % TwoFace.Char('\\u0004'), us(3L))
property("Unsafe Long % Unsafe Char") = verifyTFLong(TwoFace.Long(us(7L)) % TwoFace.Char(us('\\u0004')), us(3L))
property("Safe Long % Safe Int") = verifyTFLong(TwoFace.Long(7L) % TwoFace.Int(4), 3L)
property("Safe Long % Unsafe Int") = verifyTFLong(TwoFace.Long(7L) % TwoFace.Int(us(4)), us(3L))
property("Unsafe Long % Safe Int") = verifyTFLong(TwoFace.Long(us(7L)) % TwoFace.Int(4), us(3L))
property("Unsafe Long % Unsafe Int") = verifyTFLong(TwoFace.Long(us(7L)) % TwoFace.Int(us(4)), us(3L))
property("Safe Long % Safe Long") = verifyTFLong(TwoFace.Long(7L) % TwoFace.Long(4L), 3L)
property("Safe Long % Unsafe Long") = verifyTFLong(TwoFace.Long(7L) % TwoFace.Long(us(4L)), us(3L))
property("Unsafe Long % Safe Long") = verifyTFLong(TwoFace.Long(us(7L)) % TwoFace.Long(4L), us(3L))
property("Unsafe Long % Unsafe Long") = verifyTFLong(TwoFace.Long(us(7L)) % TwoFace.Long(us(4L)), us(3L))
property("Safe Long % Safe Float") = verifyTFFloat(TwoFace.Long(7L) % TwoFace.Float(4.0f), 3.0f)
property("Safe Long % Unsafe Float") = verifyTFFloat(TwoFace.Long(7L) % TwoFace.Float(us(4.0f)), us(3.0f))
property("Unsafe Long % Safe Float") = verifyTFFloat(TwoFace.Long(us(7L)) % TwoFace.Float(4.0f), us(3.0f))
property("Unsafe Long % Unsafe Float") = verifyTFFloat(TwoFace.Long(us(7L)) % TwoFace.Float(us(4.0f)), us(3.0f))
property("Safe Long % Safe Double") = verifyTFDouble(TwoFace.Long(7L) % TwoFace.Double(4.0), 3.0)
property("Safe Long % Unsafe Double") = verifyTFDouble(TwoFace.Long(7L) % TwoFace.Double(us(4.0)), us(3.0))
property("Unsafe Long % Safe Double") = verifyTFDouble(TwoFace.Long(us(7L)) % TwoFace.Double(4.0), us(3.0))
property("Unsafe Long % Unsafe Double") = verifyTFDouble(TwoFace.Long(us(7L)) % TwoFace.Double(us(4.0)), us(3.0))
property("Safe Long < Safe Char") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Char('\\u0004'), false)
property("Safe Long < Unsafe Char") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Char(us('\\u0004')), us(false))
property("Unsafe Long < Safe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Char('\\u0004'), us(false))
property("Unsafe Long < Unsafe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Char(us('\\u0004')), us(false))
property("Safe Long < Safe Int") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Int(4), false)
property("Safe Long < Unsafe Int") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Int(us(4)), us(false))
property("Unsafe Long < Safe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Int(4), us(false))
property("Unsafe Long < Unsafe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Int(us(4)), us(false))
property("Safe Long < Safe Long") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Long(4L), false)
property("Safe Long < Unsafe Long") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Long(us(4L)), us(false))
property("Unsafe Long < Safe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Long(4L), us(false))
property("Unsafe Long < Unsafe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Long(us(4L)), us(false))
property("Safe Long < Safe Float") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Float(4.0f), false)
property("Safe Long < Unsafe Float") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Float(us(4.0f)), us(false))
property("Unsafe Long < Safe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Float(4.0f), us(false))
property("Unsafe Long < Unsafe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Float(us(4.0f)), us(false))
property("Safe Long < Safe Double") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Double(4.0), false)
property("Safe Long < Unsafe Double") = verifyTFBoolean(TwoFace.Long(7L) < TwoFace.Double(us(4.0)), us(false))
property("Unsafe Long < Safe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Double(4.0), us(false))
property("Unsafe Long < Unsafe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) < TwoFace.Double(us(4.0)), us(false))
property("Safe Long > Safe Char") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Char('\\u0004'), true)
property("Safe Long > Unsafe Char") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Char(us('\\u0004')), us(true))
property("Unsafe Long > Safe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Char('\\u0004'), us(true))
property("Unsafe Long > Unsafe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Char(us('\\u0004')), us(true))
property("Safe Long > Safe Int") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Int(4), true)
property("Safe Long > Unsafe Int") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Int(us(4)), us(true))
property("Unsafe Long > Safe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Int(4), us(true))
property("Unsafe Long > Unsafe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Int(us(4)), us(true))
property("Safe Long > Safe Long") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Long(4L), true)
property("Safe Long > Unsafe Long") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Long(us(4L)), us(true))
property("Unsafe Long > Safe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Long(4L), us(true))
property("Unsafe Long > Unsafe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Long(us(4L)), us(true))
property("Safe Long > Safe Float") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Float(4.0f), true)
property("Safe Long > Unsafe Float") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Float(us(4.0f)), us(true))
property("Unsafe Long > Safe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Float(4.0f), us(true))
property("Unsafe Long > Unsafe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Float(us(4.0f)), us(true))
property("Safe Long > Safe Double") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Double(4.0), true)
property("Safe Long > Unsafe Double") = verifyTFBoolean(TwoFace.Long(7L) > TwoFace.Double(us(4.0)), us(true))
property("Unsafe Long > Safe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Double(4.0), us(true))
property("Unsafe Long > Unsafe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) > TwoFace.Double(us(4.0)), us(true))
property("Safe Long <= Safe Char") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Char('\\u0004'), false)
property("Safe Long <= Unsafe Char") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Char(us('\\u0004')), us(false))
property("Unsafe Long <= Safe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Char('\\u0004'), us(false))
property("Unsafe Long <= Unsafe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Char(us('\\u0004')), us(false))
property("Safe Long <= Safe Int") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Int(4), false)
property("Safe Long <= Unsafe Int") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Int(us(4)), us(false))
property("Unsafe Long <= Safe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Int(4), us(false))
property("Unsafe Long <= Unsafe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Int(us(4)), us(false))
property("Safe Long <= Safe Long") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Long(4L), false)
property("Safe Long <= Unsafe Long") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Long(us(4L)), us(false))
property("Unsafe Long <= Safe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Long(4L), us(false))
property("Unsafe Long <= Unsafe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Long(us(4L)), us(false))
property("Safe Long <= Safe Float") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Float(4.0f), false)
property("Safe Long <= Unsafe Float") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Float(us(4.0f)), us(false))
property("Unsafe Long <= Safe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Float(4.0f), us(false))
property("Unsafe Long <= Unsafe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Float(us(4.0f)), us(false))
property("Safe Long <= Safe Double") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Double(4.0), false)
property("Safe Long <= Unsafe Double") = verifyTFBoolean(TwoFace.Long(7L) <= TwoFace.Double(us(4.0)), us(false))
property("Unsafe Long <= Safe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Double(4.0), us(false))
property("Unsafe Long <= Unsafe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) <= TwoFace.Double(us(4.0)), us(false))
property("Safe Long >= Safe Char") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Char('\\u0004'), true)
property("Safe Long >= Unsafe Char") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Char(us('\\u0004')), us(true))
property("Unsafe Long >= Safe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Char('\\u0004'), us(true))
property("Unsafe Long >= Unsafe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Char(us('\\u0004')), us(true))
property("Safe Long >= Safe Int") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Int(4), true)
property("Safe Long >= Unsafe Int") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Int(us(4)), us(true))
property("Unsafe Long >= Safe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Int(4), us(true))
property("Unsafe Long >= Unsafe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Int(us(4)), us(true))
property("Safe Long >= Safe Long") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Long(4L), true)
property("Safe Long >= Unsafe Long") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Long(us(4L)), us(true))
property("Unsafe Long >= Safe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Long(4L), us(true))
property("Unsafe Long >= Unsafe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Long(us(4L)), us(true))
property("Safe Long >= Safe Float") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Float(4.0f), true)
property("Safe Long >= Unsafe Float") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Float(us(4.0f)), us(true))
property("Unsafe Long >= Safe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Float(4.0f), us(true))
property("Unsafe Long >= Unsafe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Float(us(4.0f)), us(true))
property("Safe Long >= Safe Double") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Double(4.0), true)
property("Safe Long >= Unsafe Double") = verifyTFBoolean(TwoFace.Long(7L) >= TwoFace.Double(us(4.0)), us(true))
property("Unsafe Long >= Safe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Double(4.0), us(true))
property("Unsafe Long >= Unsafe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) >= TwoFace.Double(us(4.0)), us(true))
property("Safe Long == Safe Char") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Char('\\u0007'), true)
property("Safe Long == Unsafe Char") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Char(us('\\u0007')), us(true))
property("Unsafe Long == Safe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Char('\\u0007'), us(true))
property("Unsafe Long == Unsafe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Char(us('\\u0007')), us(true))
property("Safe Long == Safe Int") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Int(7), true)
property("Safe Long == Unsafe Int") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Int(us(7)), us(true))
property("Unsafe Long == Safe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Int(7), us(true))
property("Unsafe Long == Unsafe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Int(us(7)), us(true))
property("Safe Long == Safe Long") = {
val result = TwoFace.Long(7L) == TwoFace.Long(7L)
implicitly[result.Out =:= W.`true`.T]
result.getValue
}
property("Safe Long == Unsafe Long") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Long(us(7L)), us(true))
property("Unsafe Long == Safe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Long(7L), us(true))
property("Unsafe Long == Unsafe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Long(us(7L)), us(true))
property("Safe Long == Safe Float") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Float(7.0f), true)
property("Safe Long == Unsafe Float") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Float(us(7.0f)), us(true))
property("Unsafe Long == Safe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Float(7.0f), us(true))
property("Unsafe Long == Unsafe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Float(us(7.0f)), us(true))
property("Safe Long == Safe Double") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Double(7.0), true)
property("Safe Long == Unsafe Double") = verifyTFBoolean(TwoFace.Long(7L) == TwoFace.Double(us(7.0)), us(true))
property("Unsafe Long == Safe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Double(7.0), us(true))
property("Unsafe Long == Unsafe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) == TwoFace.Double(us(7.0)), us(true))
property("Safe Long != Safe Char") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Char('\\u0007'), false)
property("Safe Long != Unsafe Char") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Char(us('\\u0007')), us(false))
property("Unsafe Long != Safe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Char('\\u0007'), us(false))
property("Unsafe Long != Unsafe Char") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Char(us('\\u0007')), us(false))
property("Safe Long != Safe Int") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Int(7), false)
property("Safe Long != Unsafe Int") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Int(us(7)), us(false))
property("Unsafe Long != Safe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Int(7), us(false))
property("Unsafe Long != Unsafe Int") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Int(us(7)), us(false))
property("Safe Long != Safe Long") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Long(7L), false)
property("Safe Long != Unsafe Long") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Long(us(7L)), us(false))
property("Unsafe Long != Safe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Long(7L), us(false))
property("Unsafe Long != Unsafe Long") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Long(us(7L)), us(false))
property("Safe Long != Safe Float") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Float(7.0f), false)
property("Safe Long != Unsafe Float") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Float(us(7.0f)), us(false))
property("Unsafe Long != Safe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Float(7.0f), us(false))
property("Unsafe Long != Unsafe Float") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Float(us(7.0f)), us(false))
property("Safe Long != Safe Double") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Double(7.0), false)
property("Safe Long != Unsafe Double") = verifyTFBoolean(TwoFace.Long(7L) != TwoFace.Double(us(7.0)), us(false))
property("Unsafe Long != Safe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Double(7.0), us(false))
property("Unsafe Long != Unsafe Double") = verifyTFBoolean(TwoFace.Long(us(7L)) != TwoFace.Double(us(7.0)), us(false))
property("Safe Long, Safe Long") = verifyTFLong(min(TwoFace.Long(2L), TwoFace.Long(1L)), 1L)
property("Safe Long, Unsafe Long") = verifyTFLong(min(TwoFace.Long(2L), TwoFace.Long(us(1L))), us(1L))
property("Unsafe Long, Safe Long") = verifyTFLong(min(TwoFace.Long(us(2L)), TwoFace.Long(1L)), us(1L))
property("Unsafe Long, Unsafe Long") = verifyTFLong(min(TwoFace.Long(us(2L)), TwoFace.Long(us(1L))), us(1L))
property("Safe Long, Safe Long") = verifyTFLong(max(TwoFace.Long(2L), TwoFace.Long(1L)), 2L)
property("Safe Long, Unsafe Long") = verifyTFLong(max(TwoFace.Long(2L), TwoFace.Long(us(1L))), us(2L))
property("Unsafe Long, Safe Long") = verifyTFLong(max(TwoFace.Long(us(2L)), TwoFace.Long(1L)), us(2L))
property("Unsafe Long, Unsafe Long") = verifyTFLong(max(TwoFace.Long(us(2L)), TwoFace.Long(us(1L))), us(2L))
property("Safe Negate") = verifyTFLong(-TwoFace.Long(-1L), 1L)
property("Unsafe Negate") = verifyTFLong(-TwoFace.Long(us(1L)), us(-1L))
property("Safe toNat") = wellTyped {
val nat = TwoFace.Long(3L).toNat
verifyOp[nat.N, shapeless.Nat._3]
}
property("Safe toChar") = verifyTFChar(TwoFace.Long(1L).toChar, '\\u0001')
property("Unsafe toChar") = verifyTFChar(TwoFace.Long(us(1L)).toChar, us('\\u0001'))
property("Safe toInt") = verifyTFInt(TwoFace.Long(1L).toInt, 1)
property("Unsafe toInt") = verifyTFInt(TwoFace.Long(us(1L)).toInt, us(1))
property("Safe toFloat") = verifyTFFloat(TwoFace.Long(1L).toFloat, 1.0f)
property("Unsafe toFloat") = verifyTFFloat(TwoFace.Long(us(1L)).toFloat, us(1.0f))
property("Safe toDouble") = verifyTFDouble(TwoFace.Long(1L).toDouble, 1.0)
property("Unsafe toDouble") = verifyTFDouble(TwoFace.Long(us(1L)).toDouble, us(1.0))
property("Safe toStringTF") = verifyTFString(TwoFace.Long(1L).toStringTF, "1")
property("Unsafe toStringTF") = verifyTFString(TwoFace.Long(us(1L)).toStringTF, us("1"))
property("Safe abs") = verifyTFLong(abs(TwoFace.Long(-1L)), 1L)
property("Unsafe abs") = verifyTFLong(abs(TwoFace.Long(us(-1L))), us(1L))
property("Safe numberOfLeadingZeros") = verifyTFInt(TwoFace.Long.numberOfLeadingZeros(TwoFace.Long(1L)), 63)
property("Unsafe numberOfLeadingZeros") = verifyTFInt(TwoFace.Long.numberOfLeadingZeros(TwoFace.Long(us(1L))), us(63))
property("Implicit Conversions") = wellTyped {
val a : TwoFace.Long[W.`3L`.T] = implicitly[TwoFace.Long[W.`2L`.T + W.`1L`.T]]
val b : TwoFace.Long[W.`3L`.T + W.`0L`.T] = implicitly[TwoFace.Long[W.`2L`.T + W.`1L`.T]]
val c : TwoFace.Long[W.`3L`.T + W.`0L`.T] = implicitly[TwoFace.Long[W.`3L`.T]]
val d : W.`3L`.T = TwoFace.Long(3L)
val e : Long = TwoFace.Long(us(3L))
val f : TwoFace.Long[Long] = 3L
}
property("Wrong Implicit Conversions") = wellTyped {
illTyped("""val impl = implicitly[TwoFace.Long[W.`2L`.T + W.`2L`.T]]; val a : TwoFace.Long[W.`3L`.T] = impl""")
illTyped("""val impl = implicitly[TwoFace.Long[W.`2L`.T + W.`2L`.T]]; val b : TwoFace.Long[W.`3L`.T + W.`0L`.T] = impl""")
illTyped("""val impl = implicitly[TwoFace.Long[W.`4L`.T]]; val c : TwoFace.Long[W.`3L`.T + W.`0L`.T] = impl""")
}
property("ToString") = {
TwoFace.Long[W.`1L`.T].toString() == "1"
}
type Fin = W.`3L`.T
final val fin = 3L
property("Extracting from an Upper Bounded Numeric") = wellTyped {
def foo[W](width: TwoFace.Long[W]) = width
def foo2[R <: Long](r: R) = foo(r)
val a = foo2(W(fin).value)
implicitly[a.Out =:= Fin]
val b = foo2(us(fin))
implicitly[b.Out =:= Long]
}
property("Extracting from Safe TwoFace") = {
val a = me(TwoFace.Long(fin))
val ret = shapeless.the[Id[a.T]]
implicitly[ret.Out =:= Fin]
ret.value == fin
}
def noImplFoo[W](w : TwoFace.Long[W]) = -w //Missing twoface shell implicit
property("Unavailable Implicit Safe TwoFace Shell") = {
val ret = noImplFoo(2L)
implicitly[ret.Out <:< Negate[W.`2L`.T]]
val retSimple = ret.simplify
implicitly[retSimple.Out <:< W.`-2L`.T]
retSimple.getValue == -2L
}
property("Unavailable Implicit Unsafe TwoFace Shell") = {
val ret = noImplFoo(us(2L))
implicitly[ret.Out <:< Negate[Long]]
val retSimple = ret.simplify
implicitly[retSimple.Out <:< Long]
retSimple.getValue == -2L
}
} | soronpo/singleton-ops | src/test/scala/singleton/twoface/TwoFaceLongSpec.scala | Scala | apache-2.0 | 28,860 |
package deaktator.pops
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import com.eharmony.aloha.score.Scores.Score
import com.eharmony.aloha.score.Scores.Score.{ModelId, ScoreError}
import com.google.protobuf.GeneratedMessage
import deaktator.pops.msgs.ProtoOps
import org.scalatest._
import scala.collection.JavaConversions.collectionAsScalaIterable
/**
* @author deaktator
*/
class ProtoTest extends FlatSpec with Matchers {
import ProtoTest._
"Runtime ProtoOps instances" should "be serializable" in {
checkSerializable(ProtoOps.runtime(classOf[Score]))
}
"Macro ProtoOps instances" should "operate the same as runtime ProtoOps instances" in {
val macroOps = ProtoOps[Score]
val runtimeOps = ProtoOps.runtime(classOf[Score])
val bytes = someNonDefaultProto.toByteArray
val m = macroOps.parseFrom(bytes)
val r = runtimeOps.parseFrom(bytes)
m should be (r)
}
they should "be instantiable explicitly" in {
Proto[Score].isInstanceOf[ProtoOps[Score]] should be (true)
}
they should "be serializable" in {
checkSerializable(ProtoOps[Score])
}
they should "be able to deserialize Protos" in {
assertSomeNonDefaultProtoIsCorrect(Proto[Score].parseFrom(someNonDefaultProto.toByteArray))
}
they should "be able to be generated and passed around implicitly" in {
assertSomeNonDefaultProtoIsCorrect(new Converter[Score].decode(someNonDefaultProto.toByteArray))
}
they should "be at least as fast than runtime-based ProtoOps instances" in {
val bytes = someNonDefaultProto.toByteArray
var i = 0
var sum = 0L
val n = 10000
val runtimeProtoOps = ProtoOps.runtime[Score](classOf[Score])
val (sR, tR) = time {
while(i < n) {
sum += runtimeProtoOps.parseFrom(bytes).getError.getModel.getId
i += 1
}
sum
}
i = 0
sum = 0
val (sM, tM) = time {
while(i < n) {
sum += runtimeProtoOps.parseFrom(bytes).getError.getModel.getId
i += 1
}
sum
}
sM should be (n)
sR should be (n)
tM should be < (tR)
}
def checkSerializable(ops: ProtoOps[Score]): Unit = {
val baos = new ByteArrayOutputStream()
val oos = new ObjectOutputStream(baos)
oos.writeObject(ops)
oos.close()
val ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
val deser = ois.readObject().asInstanceOf[ProtoOps[Score]]
val s: Score = deser.parseFrom(someNonDefaultProto.toByteArray)
ois.close()
assertSomeNonDefaultProtoIsCorrect(s)
}
def assertSomeNonDefaultProtoIsCorrect(p: Score): Unit = {
p.getError.getModel.getId should be (1)
p.getError.getModel.getName should be ("model")
p.getError.getMessagesList.toList should be (List("fail"))
}
}
object ProtoTest {
class Converter[A <: GeneratedMessage](implicit ops: ProtoOps[A]) {
def decode(a: Array[Byte]): A = ops.parseFrom(a)
}
def time[A](a: => A) = {
val t1 = System.nanoTime()
val r = a
val t2 = System.nanoTime()
(r, (1.0e-9*(t2 - t1)).toFloat)
}
def someNonDefaultProto: Score = {
val m = ModelId.newBuilder.setId(1).setName("model")
Score.newBuilder().setError(ScoreError.newBuilder.setModel(m).addMessages("fail")).build()
}
}
| deaktator/pops | pops-2.4.1/src/test/scala/deaktator/pops/ProtoTest.scala | Scala | mit | 3,322 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package connectors
import config.FrontendAppConfig
import enums.IdentityVerificationResult.IdentityVerificationResult
import javax.inject.Inject
import play.api.libs.json.Json
import services.MetricsService
import uk.gov.hmrc.http.{HeaderCarrier, HttpReadsInstances, HttpResponse}
import uk.gov.hmrc.play.bootstrap.http.DefaultHttpClient
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
class IdentityVerificationConnector @Inject() (appConfig: FrontendAppConfig, http: DefaultHttpClient)
(implicit executionContext: ExecutionContext){
val serviceUrl = appConfig.servicesConfig.baseUrl("identity-verification")
private def url(journeyId: String) = s"$serviceUrl/mdtp/journey/journeyId/$journeyId"
private[connectors] case class IdentityVerificationResponse(result: IdentityVerificationResult)
private implicit val formats = Json.format[IdentityVerificationResponse]
implicit val legacyRawReads = HttpReadsInstances.throwOnFailure(HttpReadsInstances.readEitherOf(HttpReadsInstances.readRaw))
def identityVerificationResponse(journeyId: String)(implicit hc: HeaderCarrier): Future[IdentityVerificationResult] = {
val context = MetricsService.identityVerificationTimer.time()
val ivFuture = http.GET[HttpResponse](url(journeyId)).flatMap { httpResponse =>
context.stop()
httpResponse.json.validate[IdentityVerificationResponse].fold(
errs => Future.failed(new JsonValidationException(s"Unable to deserialise: $errs")),
valid => Future.successful(valid.result)
)
}
ivFuture.onComplete {
case Failure(_) => MetricsService.identityVerificationFailedCounter.inc()
case Success(_) =>
}
ivFuture
}
private[connectors] class JsonValidationException(message: String) extends Exception(message)
}
| hmrc/pensions-lifetime-allowance-frontend | app/connectors/IdentityVerificationConnector.scala | Scala | apache-2.0 | 2,471 |
package org.judal.examples.scala.jdbc
import org.junit.Test
import java.text.SimpleDateFormat
import java.util.Date
import org.judal.storage.EngineFactory
import org.judal.storage.table.RecordSet
import org.judal.storage.table.IndexableView
import org.judal.examples.scala.model.Course
import org.judal.Using._
import scala.collection.JavaConverters._
/**
* Example of how to print a RecordSet as XML o JSON
*/
class E31_ConvertToXMLJSON {
@Test
def demo() : Unit = {
E31_ConvertToXMLJSON.setUp()
val c = new Course()
c.load(1)
val attribs = Map("timestamp" -> new Date().toString).asJava
val dateFormat = new SimpleDateFormat("yyyy-MM-DD")
val courseAsXML = c.toXML(" ", attribs, dateFormat, null, null)
var viw : IndexableView = null
using (viw) {
viw = EngineFactory.getDefaultRelationalDataSource.openIndexedView(c)
val courses : RecordSet[Course] = viw.fetch(c.fetchGroup, "id_course", new Integer(1), new Integer(4))
val coursesAsXML = courses.toXML(" ", dateFormat, null, null)
val coursesAsJSON = courses.toJSON
}
E31_ConvertToXMLJSON.tearDown()
}
}
object E31_ConvertToXMLJSON {
def setUp() : Unit = {
E10_WriteCSVDataIntoTheDatabase.setUp()
E10_WriteCSVDataIntoTheDatabase.insertCoursesIntoDatabase()
}
def tearDown() : Unit = {
E10_WriteCSVDataIntoTheDatabase.tearDown()
}
} | sergiomt/judal | aexample/src/main/scala/org/judal/examples/scala/jdbc/E31_ConvertToXMLJSON.scala | Scala | apache-2.0 | 1,383 |
package org.genericConfig.admin.shared.user
import play.api.libs.json.Json
/**
* Copyright (C) 2016 Gennadi Heimann genaheimann@gmail.com
*
* Created by Gennadi Heimann 16.03.2020
*/
case class UserUpdateDTO(
oldUsername : String,
newUsername : String,
oldPassword : String,
newPassword : String
)
object UserUpdateDTO {
implicit val format = Json.format[UserUpdateDTO]
}
| gennadij/admin | shared/src/main/scala/org/genericConfig/admin/shared/user/UserUpdateDTO.scala | Scala | apache-2.0 | 510 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.core.group.clock
import java.net.{InetAddress, InetSocketAddress}
/**
* Lamport clock.
* @author Akiyoshi SUGIKI
*/
//@serializable
class LamportClock(val addr: InetSocketAddress, val count: Long) extends Clock[LamportClock] {
def this(name: String, port: Int, count: Long) = this(new InetSocketAddress(name, port), count)
def this(name: String, port: Int) = this(name, port, 0L)
def this(addr: InetSocketAddress) = this(addr, 0)
//def this() = this(new InetSocketAddress(InetAddress.getLocalHost, 0))
def advance = { new LamportClock(addr, count + 1) }
override def hashCode = 41 * ((41 * addr.hashCode) + count.toInt) // TODO: need to be optimized
override def equals(that: Any) = that match {
case that: LamportClock =>
(that canEqual this) && (addr.equals(that.addr)) && count == count
case _ => false
}
def canEqual(that: Any) = that.isInstanceOf[LamportClock]
def compare(that: LamportClock) = {
if (count != that.count) {
if (count > that.count) 1
else if (count < that.count) -1
else 0
} else {
val cp = compare(addr.getAddress.getAddress.toList, that.addr.getAddress.getAddress.toList)
if (cp == 0) addr.getPort - that.addr .getPort
else cp
}
}
private def compare(a: List[Byte], b: List[Byte]): Int = {
(a, b) match {
case (ha :: ra, hb :: rb) =>
val cp = ha.compare(hb)
cp match {
case 0 => compare(ra, rb)
case _ => cp
}
case (ha :: ra, List()) => 1
case (List(), ba :: rb) => -1
case (List(), List()) => 0
}
}
override def toString = addr.getHostName + ":" + addr.getPort + ":" + count
}
| axi-sugiki/kumoi | src/kumoi/core/group/clock/LamportClock.scala | Scala | apache-2.0 | 2,239 |
package com.twitter.finagle.service
import com.twitter.finagle._
import com.twitter.finagle.tracing.Trace
import com.twitter.util.{Future, Duration, Timer}
object TimeoutFilter {
val TimeoutAnnotation = "finagle.timeout"
val role = new Stack.Role("RequestTimeout")
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.service.TimeoutFilter]] module.
*/
case class Param(timeout: Duration)
implicit object Param extends Stack.Param[Param] {
val default = Param(Duration.Top)
}
/**
* Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.service.TimeoutFilter]].
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module2[TimeoutFilter.Param, param.Timer, ServiceFactory[Req, Rep]] {
val role = TimeoutFilter.role
val description = "Apply a timeout to requests"
def make(_param: Param, _timer: param.Timer, next: ServiceFactory[Req, Rep]) = {
val Param(timeout) = _param
val param.Timer(timer) = _timer
if (!timeout.isFinite) next else {
val exc = new IndividualRequestTimeoutException(timeout)
new TimeoutFilter(timeout, exc, timer) andThen next
}
}
}
}
/**
* A [[com.twitter.finagle.Filter]] that applies a global timeout to requests.
*
* @param timeout the timeout to apply to requests
* @param exception an exception object to return in cases of timeout exceedance
* @param timer a `Timer` object used to track elapsed time
*/
class TimeoutFilter[Req, Rep](
timeout: Duration,
exception: RequestTimeoutException,
timer: Timer)
extends SimpleFilter[Req, Rep] {
def this(timeout: Duration, timer: Timer) =
this(timeout, new IndividualRequestTimeoutException(timeout), timer)
def apply(request: Req, service: Service[Req, Rep]): Future[Rep] = {
val res = service(request)
res.within(timer, timeout) rescue {
case exc: java.util.concurrent.TimeoutException =>
res.raise(exc)
Trace.record(TimeoutFilter.TimeoutAnnotation)
Future.exception(exception)
}
}
}
| kristofa/finagle | finagle-core/src/main/scala/com/twitter/finagle/service/TimeoutFilter.scala | Scala | apache-2.0 | 2,134 |
package com.wavesplatform.features.api
case class ActivationStatus(height: Int, votingInterval: Int, votingThreshold: Int, nextCheck: Int, features: Seq[FeatureActivationStatus])
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/features/api/ActivationStatus.scala | Scala | mit | 180 |
package com.crealytics.spark.excel
import scala.util.{Success, Try}
object Utils {
implicit class RichTry[T](t: Try[T]) {
def toEither: Either[Throwable, T] = t.transform(s => Success(Right(s)), f => Success(Left(f))).get
}
case class MapIncluding[K](keys: Seq[K], optionally: Seq[K] = Seq()) {
def unapply[V](m: Map[K, V]): Option[(Seq[V], Seq[Option[V]])] =
if (keys.forall(m.contains)) {
Some((keys.map(m), optionally.map(m.get)))
} else {
None
}
}
sealed trait MapRequirements[K] {
type ResultType[V]
def unapplySeq[V](m: Map[K, V]): Option[ResultType[V]]
}
case class RequiredKeys[K](keys: K*) extends MapRequirements[K] {
type ResultType[V] = Seq[V]
def unapplySeq[V](m: Map[K, V]): Option[Seq[V]] =
if (keys.forall(m.contains)) {
Some(keys.map(m))
} else {
None
}
}
case class OptionalKeys[K](keys: K*) extends MapRequirements[K] {
type ResultType[V] = Seq[Option[V]]
def unapplySeq[V](m: Map[K, V]): Option[Seq[Option[V]]] = Some(keys.map(m.get))
}
case class MapWith[K](
requiredKeys: RequiredKeys[K] = RequiredKeys[K](),
optionalKeys: OptionalKeys[K] = OptionalKeys[K]()
) {
def unapply[V](m: Map[K, V]): Option[(requiredKeys.ResultType[V], optionalKeys.ResultType[V])] =
for {
req <- requiredKeys.unapplySeq(m)
opt <- optionalKeys.unapplySeq(m)
} yield (req, opt)
}
}
| crealytics/spark-excel | src/main/scala/com/crealytics/spark/excel/Utils.scala | Scala | apache-2.0 | 1,447 |
package com.seanshubin.learn.datomic.domain
case class ThrowableValue(message:String, cause:Option[ThrowableValue], stackTrace:Seq[StackTraceElementValue])
object ThrowableValue {
def fromThrowable(throwable:Throwable):ThrowableValue = {
val message = throwable.getMessage
val cause = Option(throwable.getCause).map(fromThrowable)
val stackTrace = throwable.getStackTrace.map(StackTraceElementValue.fromStackTraceElement)
ThrowableValue(message, cause, stackTrace)
}
} | SeanShubin/learn-datomic | domain/src/main/scala/com/seanshubin/learn/datomic/domain/ThrowableValue.scala | Scala | unlicense | 490 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.util
private[recorder] object collection {
implicit class RichSeq[T](val elts: Seq[T]) extends AnyVal {
// See ScenarioSpec for example
def groupAsLongAs(p: T => Boolean): List[List[T]] =
elts.foldRight(List[List[T]]()) {
case (t, Nil) => (t :: Nil) :: Nil
case (t, xs @ xh :: xt) =>
if (p(t)) (t :: xh) :: xt
else (t :: Nil) :: xs
}
def splitWhen(p: T => Boolean): List[List[T]] =
elts.foldLeft(List.empty[List[T]])({
case (Nil, x) => List(x) :: Nil
case (l @ (h :: t), x) => if (p(x)) List(x) :: l else (x :: h) :: t
}).map(_.reverse).reverse
}
}
| thkluge/gatling | gatling-recorder/src/main/scala/io/gatling/recorder/util/collection.scala | Scala | apache-2.0 | 1,300 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.dataretention
import java.text.SimpleDateFormat
import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatusManager}
import org.apache.carbondata.core.locks.{CarbonLockFactory, ICarbonLock, LockUsage}
import org.apache.commons.lang3.time.DateUtils
import org.apache.spark.sql.Row
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonMetadata, CarbonTableIdentifier}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.test.util.QueryTest
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
/**
* This class contains data retention feature test cases
*/
class DataRetentionTestCase extends QueryTest with BeforeAndAfterAll {
var absoluteTableIdentifierForLock: AbsoluteTableIdentifier = null
var absoluteTableIdentifierForRetention: AbsoluteTableIdentifier = null
var carbonTablePath : String = null
var carbonDateFormat = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP)
var defaultDateFormat = new SimpleDateFormat(CarbonCommonConstants
.CARBON_TIMESTAMP_DEFAULT_FORMAT)
var carbonTableStatusLock: ICarbonLock = null
var carbonDeleteSegmentLock: ICarbonLock = null
var carbonCleanFilesLock: ICarbonLock = null
var carbonMetadataLock: ICarbonLock = null
override def beforeAll {
sql("drop table if exists DataRetentionTable")
sql("drop table if exists retentionlock")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.MAX_TIMEOUT_FOR_CARBON_LOCK, "1")
CarbonProperties.getInstance.addProperty(CarbonCommonConstants.MAX_QUERY_EXECUTION_TIME, "1")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
sql(
"CREATE table DataRetentionTable (ID int, date String, country String, name " +
"String," +
"phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'"
)
sql(
"CREATE table retentionlock (ID int, date String, country String, name " +
"String," +
"phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'"
)
val carbonTable = CarbonMetadata.getInstance().getCarbonTable(
CarbonCommonConstants.DATABASE_DEFAULT_NAME,
"retentionlock"
)
absoluteTableIdentifierForLock = carbonTable.getAbsoluteTableIdentifier
val carbonTable2 = CarbonMetadata.getInstance().getCarbonTable(
CarbonCommonConstants.DATABASE_DEFAULT_NAME,
"dataRetentionTable"
)
absoluteTableIdentifierForRetention = carbonTable2.getAbsoluteTableIdentifier
carbonTablePath = CarbonTablePath
.getMetadataPath(absoluteTableIdentifierForRetention.getTablePath)
carbonTableStatusLock = CarbonLockFactory
.getCarbonLockObj(absoluteTableIdentifierForLock, LockUsage.TABLE_STATUS_LOCK)
carbonDeleteSegmentLock= CarbonLockFactory
.getCarbonLockObj(absoluteTableIdentifierForLock, LockUsage.DELETE_SEGMENT_LOCK)
carbonCleanFilesLock = CarbonLockFactory
.getCarbonLockObj(absoluteTableIdentifierForLock, LockUsage.CLEAN_FILES_LOCK)
carbonMetadataLock = CarbonLockFactory
.getCarbonLockObj(absoluteTableIdentifierForLock, LockUsage.METADATA_LOCK)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE retentionlock " +
"OPTIONS('DELIMITER' = ',')")
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE DataRetentionTable " +
"OPTIONS('DELIMITER' = ',')")
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention2.csv' INTO TABLE DataRetentionTable " +
"OPTIONS('DELIMITER' = ',')")
}
override def afterAll {
sql("drop table if exists DataRetentionTable")
sql("drop table if exists retentionlock")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
private def getSegmentStartTime(segments: Array[LoadMetadataDetails],
segmentId: Integer): String = {
val segmentLoadTimeString = segments(segmentId).getLoadStartTime()
var loadTime = carbonDateFormat.parse(carbonDateFormat.format(segmentLoadTimeString))
// add one min to execute delete before load start time command
loadTime = DateUtils.addMinutes(loadTime, 1)
defaultDateFormat.format(loadTime)
}
test("RetentionTest_withoutDelete") {
checkAnswer(
sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country" +
" IN ('china','ind','aus','eng') GROUP BY country"
),
Seq(Row("aus", 9), Row("ind", 9))
)
}
test("RetentionTest_DeleteSegmentsByLoadTime") {
val segments: Array[LoadMetadataDetails] =
SegmentStatusManager.readLoadMetadata(carbonTablePath)
// check segment length, it should be 3 (loads)
if (segments.length != 2) {
assert(false)
}
val actualValue: String = getSegmentStartTime(segments, 1)
// delete segments (0,1) which contains ind, aus
sql(
"delete from table DataRetentionTable where segment.starttime before '" + actualValue + "'")
// load segment 2 which contains eng
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention3.csv' INTO TABLE DataRetentionTable " +
"OPTIONS('DELIMITER' = ',')")
checkAnswer(
sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country" +
" IN ('china','ind','aus','eng') GROUP BY country"
),
Seq(Row("eng", 9))
)
}
test("RetentionTest3_DeleteByLoadId") {
// delete segment 2 and load ind segment
sql("delete from table DataRetentionTable where segment.id in (2)")
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE DataRetentionTable " +
"OPTIONS('DELIMITER' = ',')")
checkAnswer(
sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country" +
" IN ('china','ind','aus','eng') GROUP BY country"
),
Seq(Row("ind", 9))
)
// these queries should execute without any error.
sql("show segments for table DataRetentionTable")
sql("clean files for table DataRetentionTable")
}
test("RetentionTest4_DeleteByInvalidLoadId") {
val e = intercept[MalformedCarbonCommandException] {
// delete segment with no id
sql("delete from table DataRetentionTable where segment.id in ()")
}
assert(e.getMessage.contains("should not be empty"))
}
test("test delete segments by load date with case-insensitive table name") {
sql(
"""
CREATE TABLE IF NOT EXISTS carbon_table_1
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_EXCLUDE'='country,phonetype,serialname',
'DICTIONARY_INCLUDE'='ID')
""")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/emptyDimensionData.csv' into table carbon_table_1")
checkAnswer(
sql("select count(*) from carbon_table_1"), Seq(Row(20)))
sql("delete from table carbon_table_1 where segment.starttime " +
" before '2099-07-28 11:00:00'")
checkAnswer(
sql("select count(*) from carbon_table_1"), Seq(Row(0)))
sql("DROP TABLE carbon_table_1")
}
test("RetentionTest_DeleteSegmentsByLoadTimeValiadtion") {
val e = intercept[MalformedCarbonCommandException] {
sql(
"delete from table DataRetentionTable where segment.starttime before" +
" 'abcd-01-01 00:00:00'")
}
assert(e.getMessage.contains("Invalid load start time format"))
val ex = intercept[MalformedCarbonCommandException] {
sql(
"delete from table DataRetentionTable where segment.starttime before" +
" '2099:01:01 00:00:00'")
}
assert(ex.getMessage.contains("Invalid load start time format"))
checkAnswer(
sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country" +
" IN ('china','ind','aus','eng') GROUP BY country"
),
Seq(Row("ind", 9))
)
sql("delete from table DataRetentionTable where segment.starttime before '2099-01-01'")
checkAnswer(
sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country" +
" IN ('china','ind','aus','eng') GROUP BY country"), Seq())
}
test("RetentionTest_InvalidDeleteCommands") {
// All these queries should fail.
intercept[Exception] {
sql("DELETE LOADS FROM TABLE DataRetentionTable where STARTTIME before '2099-01-01'")
}
intercept[Exception] {
sql("DELETE LOAD 2 FROM TABLE DataRetentionTable")
}
intercept[Exception] {
sql("show loads for table DataRetentionTable")
}
}
test("RetentionTest_Locks") {
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE retentionlock " +
"OPTIONS('DELIMITER' = ',')")
carbonDeleteSegmentLock.lockWithRetries()
carbonTableStatusLock.lockWithRetries()
carbonCleanFilesLock.lockWithRetries()
// delete segment 0 it should fail
intercept[Exception] {
sql("delete from table retentionlock where segment.id in (0)")
}
// it should fail
intercept[Exception] {
sql("delete from table retentionlock where segment.starttime before " +
"'2099-01-01 00:00:00.0'")
}
// it should fail
intercept[Exception] {
sql("clean files for table retentionlock")
}
sql("SHOW SEGMENTS FOR TABLE retentionlock").show
carbonTableStatusLock.unlock()
carbonCleanFilesLock.unlock()
carbonDeleteSegmentLock.unlock()
sql("delete from table retentionlock where segment.id in (0)")
//load and delete should execute parallely
carbonMetadataLock.lockWithRetries()
sql("delete from table retentionlock where segment.id in (1)")
carbonMetadataLock.unlock()
}
}
| ravipesala/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala | Scala | apache-2.0 | 11,033 |
package shredzzz.kirkwood.cumath.tensor
import breeze.storage.DefaultArrayValue
import jcuda.Pointer
import shredzzz.kirkwood.cumath.tensor.modules.Modules
import shredzzz.kirkwood.driver.{CuContext, CuPointed}
import scala.reflect.ClassTag
import shredzzz.kirkwood.cumath.tensor.operations._
object CuTensor extends TensorArithmeticImplicits with TensorBinaryImplicits with TensorBitwiseImplicits with TensorComparisonImplicits with ConversionImplicits
abstract class CuTensor[V, Dim, TT[X] <: CuTensor[X, Dim, TT]](pointer: Pointer, val stride: Int = 1, val offset: Int = 0) extends CuPointed(pointer)
{
def ctx: CuContext
def size: Int
def dim: Dim
def cloneShape[T]()(implicit tag: ClassTag[T], d: DefaultArrayValue[T], mm: Modules[T]): TT[T]
def fetch(): Array[V]
protected[tensor] def requireSameShape(tensor: TT[_]) {
require(dim == tensor.dim, "Different shape: %s and %s".format(dim, dim))
}
protected[tensor] def requireSameShape(tensors: TT[_]*) {
tensors.foreach(requireSameShape)
}
}
| shredzzz/kirkwood | src/main/scala/shredzzz/kirkwood/cumath/tensor/CuTensor.scala | Scala | apache-2.0 | 1,033 |
package me.reminisce.analysis.clustering.cluster
import org.apache.commons.math3.ml.clustering.{Clusterable, Clusterer}
import scala.collection.JavaConverters._
/**
* Common trait for any class intending to implement clustering using the apache common library
* @tparam T the type of object that will be clustered, extends the CLusterable class
*/
trait ClustererWrapper[T <: Clusterable] {
val clusterer: Clusterer[T]
/**
* Cluster the date using the defined clusterer
* @param data list of objects to be clustered
* @return list of clusters
*/
def cluster(data: List[T]): List[List[T]] = {
val datAsJava = data.asJavaCollection
val clustersList = clusterer.cluster(datAsJava).asScala.toList
clustersList.map {
cluster =>
cluster.getPoints.asScala.toList
}
}
}
| reminisceme/game-creator | src/main/scala/me/reminisce/analysis/clustering/cluster/ClustererWrapper.scala | Scala | apache-2.0 | 826 |
package robotichoover.service
import robotichoover.domain.Instruction.Instruction
import robotichoover.domain._
object HooverService {
def clean(roomStatus: RoomStatus): RoomStatus = {
if (roomStatus.isHooverPositionDirty()) roomStatus.cleanHooverPosition()
else roomStatus
}
def move(roomStatus: RoomStatus, instruction: Instruction): RoomStatus =
nextPosition(roomStatus.hooverPosition, toVector(instruction)) match {
case position if roomStatus.isValid(position) => roomStatus.copy(hooverPosition = position)
case _ => roomStatus
}
private def nextPosition(current: Position, instructionVector: Vector): Position =
(current._1 + instructionVector._1, current._2 + instructionVector._2)
private def toVector(instruction: Instruction): Vector = instruction match {
case Instruction.N => ( 0, 1)
case Instruction.S => ( 0, -1)
case Instruction.E => ( 1, 0)
case Instruction.W => (-1, 0)
}
}
| xlbilbao/robotic-hoover-tray-io | src/main/scala/robotichoover/service/HooverService.scala | Scala | apache-2.0 | 959 |
package models
import java.net.URL
import play.api.db.slick.Config.driver.simple._
case class Show(name: String, episodesUrl: String) {
def episodes:List[Episode] = {
val crtTimeInMinutes = System.currentTimeMillis() / 60000
// cache expired; crawl from scratch
if(! EpisodeCache.validCache(name)) {
val html = scala.io.Source.fromURL(episodesUrl.toString).mkString
//val html = scala.io.Source.fromFile("test/modern_family.html").mkString
val es = EpisodeParser.parseEpisodes(new URL(episodesUrl.toString), html)
EpisodeCache.add(name, es, crtTimeInMinutes)
es
} else {
//println(s"Using cache for show $name")
EpisodeCache.episodes(name).get
}
}
}
/* Table mapping
*/
class ShowsTable(tag: Tag) extends Table[Show](tag, "SHOW") {
def name = column[String]("name", O.PrimaryKey)
def episodesUrl = column[String]("episodesUrl", O.NotNull)
def * = (name, episodesUrl) <> (Show.tupled, Show.unapply _)
}
| MihaiSurdeanu/ourshows | app/models/Show.scala | Scala | apache-2.0 | 984 |
package com.acework.js.components.bootstrap
import com.acework.js.utils.{Mappable, Mergeable}
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import scala.scalajs.js.{UndefOr, undefined}
/**
* Created by weiyin on 10/03/15.
*/
object Popover extends BootstrapComponent {
override type P = Popover
override type S = Unit
override type B = Unit
override type N = TopNode
override def defaultProps = Popover()
case class Popover(title: UndefOr[ReactNode] = undefined,
placement: Placements.Value = Placements.right,
positionLeft: UndefOr[Int] = undefined,
positionTop: UndefOr[Int] = undefined,
arrowOffsetLeft: UndefOr[Int] = undefined,
arrowOffsetTop: UndefOr[Int] = undefined,
bsClass: UndefOr[Classes.Value] = Classes.btn,
bsStyle: UndefOr[Styles.Value] = Styles.default,
bsSize: UndefOr[Sizes.Value] = undefined,
addClasses: String = "") extends BsProps with MergeableProps[Popover] {
def merge(t: Map[String, Any]): Popover = implicitly[Mergeable[Popover]].merge(this, t)
def asMap: Map[String, Any] = implicitly[Mappable[Popover]].toMap(this)
def apply(children: ReactNode*) = component(this, children)
def apply() = component(this)
}
override val component = ReactComponentB[Popover]("Popover")
.render { (P, C) =>
def renderTitle() = {
<.h3(^.className := "popover-title", P.title)
}
val classes = Map("popover" -> true,
P.placement.toString -> true,
"in" -> (P.positionLeft.isDefined || P.positionTop.isDefined)
)
<.div(^.classSet1M(P.addClasses, classes), ^.left := P.positionLeft, ^.top := P.positionTop,
^.display := "block",
<.div(^.className := "arrow", ^.left := P.arrowOffsetLeft, ^.top := P.arrowOffsetTop),
if (P.title.isDefined) renderTitle() else EmptyTag,
<.div(^.className := "popover-content")(C)
)
}.build
}
| weiyinteo/scalajs-react-bootstrap | core/src/main/scala/com/acework/js/components/bootstrap/Popover.scala | Scala | mit | 2,051 |
package ch09
/*
* 6. Make a regular expression searching for quoted strings "like
* this, maybe with \\" or \\\\" in a Java or C++ program. Write a Scala
* program that prints out all such strings in a source file.
*/
//TODO: Incomplete
import scala.io.Source
object ex06 extends App {
var fileName = "numbers.txt"
if (args.length < 1) {
println("No input file. Exiting.")
sys.exit(-1)
}
fileName = args(0)
val source = Source.fromFile(fileName, "UTF-8")
val commentPattern = ".*\\"(.*(\\\\\\").*)*\\"".r
val text = source.mkString
source.close
for(m <- commentPattern.findAllIn(text)) println(m)
}
| tuxdna/scala-for-the-impatient-exercises | src/main/scala/ch09/ex06.scala | Scala | apache-2.0 | 625 |
package net.mrkeks.clave.game
import net.mrkeks.clave.view.DrawingContext
trait GameObjectManagement {
val context: DrawingContext
/** List of individual objects in the game (movable stuff, enemies, the player..) */
var gameObjects = List[GameObject]()
var gameObjectIdCount = 0
def add(o: GameObject): Unit = {
gameObjects = o :: gameObjects
gameObjectIdCount += 1
o.id = gameObjectIdCount
o.init(context)
}
def remove(o: GameObject): Unit = {
gameObjects = gameObjects.filterNot(_.id == o.id)
o.clear(context)
}
def removeAllMarkedForDeletion(): Unit = {
if (gameObjects.exists(_.markedForDeletion)) { // only rebuild the list if necessary
val (deleted, surviving) = gameObjects.partition(_.markedForDeletion)
deleted.foreach(_.clear(context))
gameObjects = surviving
}
}
def clear(): Unit = {
gameObjects.foreach(remove)
}
} | benkeks/clave | src/main/scala/net/mrkeks/clave/game/GameObjectManagement.scala | Scala | gpl-3.0 | 922 |
package provingground.library
import provingground._
import HoTT._
import induction._
import implicits._
import shapeless._
import Fold._
object boolInd {
lazy val value = ConstructorSeqTL(
ConstructorSeqDom.Cons(
"bool.ff",
ConstructorShape.IdShape.byTyp("bool" :: Type),
ConstructorSeqDom.Cons("bool.tt",
ConstructorShape.IdShape.byTyp("bool" :: Type),
ConstructorSeqDom.Empty.byTyp("bool" :: Type))
),
"bool" :: Type
)
}
| siddhartha-gadgil/ProvingGround | leanlib/src/main/scala/provingground/library/inductive-types/boolInd.scala | Scala | mit | 516 |
package sandbox
class hierarOverload {
/*
* Template structure - using abstract types.
*/
trait AB {
type TA <: A
type TB <: B
protected trait A {
val entities : List[TB]
}
protected trait B {
var group : TA
}
}
/*
* Template instantiation in an object to ease use and globally define
abstract types
*/
object NAnB extends AB {
type TB = nB
type TA = nA
class nA extends A {
val entities = List[nB]()
}
class nB extends B {
var group = new nA
}
}
def foo () {
val t = new NAnB.nA
println(t)
}
}
| loskutov/intellij-scala | testdata/scalacTests/pos/t1391.scala | Scala | apache-2.0 | 631 |
package org.scalameta.adt
import scala.reflect.api.Universe
import org.scalameta.adt.{Internal => AdtInternal}
import org.scalameta.ast.{internal => AstInternal}
import scala.reflect.{classTag, ClassTag}
import scala.collection.mutable
trait Reflection {
val u: Universe
val mirror: u.Mirror
import u._
import internal._
import decorators._
implicit class XtensionAdtSymbol(sym: Symbol) {
def isAdt: Boolean = sym.isClass && (sym.asClass.toType <:< typeOf[AdtInternal.Adt])
private def hasAnnotation[T: ClassTag] = { sym.initialize; sym.annotations.exists(_.tree.tpe.typeSymbol.fullName == classTag[T].runtimeClass.getCanonicalName) }
def isRoot: Boolean = hasAnnotation[AdtInternal.root]
def isBranch: Boolean = hasAnnotation[AdtInternal.branch]
def isLeaf: Boolean = hasAnnotation[AdtInternal.leafClass]
def isField: Boolean = {
val isMethodInLeafClass = sym.isMethod && sym.owner.isLeaf
val isParamGetter = sym.isTerm && sym.asTerm.isParamAccessor && sym.asTerm.isGetter && sym.isPublic
val isAstField = hasAnnotation[AstInternal.astField]
isMethodInLeafClass && (isParamGetter || isAstField)
}
def isPayload: Boolean = sym.isField && !sym.isAuxiliary
def isAuxiliary: Boolean = sym.isField && hasAnnotation[AstInternal.auxiliary]
def asAdt: Adt = if (isRoot) sym.asRoot else if (isBranch) sym.asBranch else if (isLeaf) sym.asLeaf else sys.error("not an adt")
def asRoot: Root = new Root(sym)
def asBranch: Branch = new Branch(sym)
def asLeaf: Leaf = new Leaf(sym)
def asField: Field = new Field(sym)
}
protected def figureOutDirectSubclasses(sym: ClassSymbol): List[Symbol] = {
if (sym.isSealed) sym.knownDirectSubclasses.toList.sortBy(_.fullName)
else sys.error(s"failed to figure out direct subclasses for ${sym.fullName}")
}
private implicit class PrivateXtensionAdtSymbol(sym: Symbol) {
private def ensureModule(sym: Symbol): Symbol = if (sym.isModuleClass) sym.owner.info.member(sym.name.toTermName) else sym
def branches: List[Symbol] = { sym.initialize; figureOutDirectSubclasses(sym.asClass).toList.filter(_.isBranch) }
def allBranches: List[Symbol] = (sym.branches ++ sym.branches.flatMap(_.allBranches)).distinct
def leafs: List[Symbol] = { sym.initialize; figureOutDirectSubclasses(sym.asClass).toList.filter(_.isLeaf).map(ensureModule) }
def allLeafs: List[Symbol] = (sym.leafs ++ sym.branches.flatMap(_.allLeafs)).map(ensureModule).distinct
def root: Symbol = sym.asClass.baseClasses.reverse.find(_.isRoot).getOrElse(NoSymbol)
def fields: List[Symbol] = allFields.filter(p => p.isPayload)
def allFields: List[Symbol] = sym.info.decls.filter(_.isField).toList
}
abstract class Adt(val sym: Symbol) {
def tpe: Type = if (sym.isTerm) sym.info else sym.asType.toType
def prefix: String = {
def loop(sym: Symbol): String = {
if (sym.owner.isPackageClass) sym.name.toString
else loop(sym.owner) + "." + sym.name.toString
}
loop(sym)
}
def root = sym.root.asRoot
}
trait NonLeafApi extends Adt {
def all: List[Adt] = List(this) ++ this.allBranches ++ this.allLeafs
def branches: List[Branch] = sym.branches.map(_.asBranch)
def allBranches: List[Branch] = sym.allBranches.map(_.asBranch)
def leafs: List[Leaf] = sym.leafs.map(_.asLeaf)
def allLeafs: List[Leaf] = sym.allLeafs.map(_.asLeaf)
}
class Root(sym: Symbol) extends Adt(sym) with NonLeafApi {
require(sym.isRoot)
override def toString = s"root $prefix"
}
class Branch(sym: Symbol) extends Adt(sym) with NonLeafApi {
require(sym.isBranch)
override def toString = s"branch $prefix"
}
class Leaf(sym: Symbol) extends Adt(sym) {
require(sym.isLeaf)
def fields: List[Field] = sym.fields.map(_.asField)
def allFields: List[Field] = sym.allFields.map(_.asField)
override def toString = s"leaf $prefix"
}
class Field(val sym: Symbol) {
require(sym.isField)
def owner: Leaf = sym.owner.asLeaf
def name: TermName = TermName(sym.name.toString.stripPrefix("_"))
def tpe: Type = sym.info.finalResultType
def isPayload: Boolean = sym.isPayload
def isAuxiliary: Boolean = sym.isAuxiliary
override def toString = s"field ${owner.prefix}.$name: $tpe" + (if (isAuxiliary) " (auxiliary)" else "")
}
}
| mdemarne/scalameta | foundation/src/main/scala/org/scalameta/adt/Reflection.scala | Scala | bsd-3-clause | 4,354 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package statements
import base.ScPatternList
import expr.ScExpression
import base.patterns.ScBindingPattern
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
trait ScVariableDefinition extends ScVariable {
def pList: ScPatternList
def bindings: Seq[ScBindingPattern]
def declaredElements = bindings
def expr: Option[ScExpression]
def hasExplicitType: Boolean = typeElement.isDefined
def isSimple: Boolean = pList.allPatternsSimple && bindings.size == 1
override def accept(visitor: ScalaElementVisitor) {
visitor.visitVariableDefinition(this)
}
}
object ScVariableDefinition {
object expr {
def unapply(definition: ScVariableDefinition) = definition.expr
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScVariableDefinition.scala | Scala | apache-2.0 | 779 |
package net.chwthewke.passman
package engine
case class CharSpace(name: String, symbols: IndexedSeq[Char])
object CharSpace {
val Alphanumeric: CharSpace =
CharSpace("alphanum", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789".to[Vector])
val Complex: CharSpace = CharSpace(
"alphanum-symbols",
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789&\\"'(-_)=#{[|\\\\@]}$%*<>,?;.:/!^`".to[Vector])
// TODO UTF-8 letters as a joke?
}
| chwthewke/passman | passman-core/src/main/scala/net/chwthewke/passman/engine/CharSpace.scala | Scala | bsd-3-clause | 477 |
package keystoneml.nodes.nlp
import edu.arizona.sista.processors.Processor
import edu.arizona.sista.processors.fastnlp.FastNLPProcessor
import org.apache.spark.rdd.RDD
import keystoneml.workflow.Transformer
/**
* Transformer that uses CoreNLP to (in order):
* - Tokenize document
* - Lemmatize tokens
* - Replace entities w/ their type (e.g. "Jon" => "NAME", "Paris" => "PLACE")
* - Return n-grams for the above (respecting sentence boundaries)
* Note: Much slower than just using [[Tokenizer]] followed by [[NGramsFeaturizer]]
*
* @param orders The size of the n-grams to output
*/
case class CoreNLPFeatureExtractor(orders: Seq[Int]) extends Transformer[String, Seq[String]] {
@transient lazy val proc = new FastNLPProcessor()
override def apply(in: String): Seq[String] = {
val doc = proc.mkDocument(in)
proc.tagPartsOfSpeech(doc)
proc.lemmatize(doc)
proc.recognizeNamedEntities(doc)
doc.clear()
val out = doc.sentences.map(s => {
val out = new Array[String](s.words.length)
for (i <- 0 to s.words.length - 1) {
out(i) = if (s.entities.get(i) != "O") s.entities.get(i) else normalize(s.lemmas.get(i))
}
out
})
orders.map(n => {
out.map(s => {
s.sliding(n).map(gram => gram.mkString(" ")).toList
}).flatMap(identity).toList
}).flatMap(identity).toList
}
def normalize(s : String): String = {
val pattern = "[^a-zA-Z0-9\\\\s+]"
pattern.r.replaceAllIn(s,pattern=>"").toLowerCase
}
}
| amplab/keystone | src/main/scala/keystoneml/nodes/nlp/CoreNLPFeatureExtractor.scala | Scala | apache-2.0 | 1,501 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.trees.CurrentOrigin
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.StructType
abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging {
private var _analyzed: Boolean = false
/**
* Marks this plan as already analyzed. This should only be called by [[CheckAnalysis]].
*/
private[catalyst] def setAnalyzed(): Unit = { _analyzed = true }
/**
* Returns true if this node and its children have already been gone through analysis and
* verification. Note that this is only an optimization used to avoid analyzing trees that
* have already been analyzed, and can be reset by transformations.
*/
def analyzed: Boolean = _analyzed
/** Returns true if this subtree contains any streaming data sources. */
def isStreaming: Boolean = children.exists(_.isStreaming == true)
/**
* Returns a copy of this node where `rule` has been recursively applied first to all of its
* children and then itself (post-order). When `rule` does not apply to a given node, it is left
* unchanged. This function is similar to `transformUp`, but skips sub-trees that have already
* been marked as analyzed.
*
* @param rule the function use to transform this nodes children
*/
def resolveOperators(rule: PartialFunction[LogicalPlan, LogicalPlan]): LogicalPlan = {
if (!analyzed) {
val afterRuleOnChildren = mapChildren(_.resolveOperators(rule))
if (this fastEquals afterRuleOnChildren) {
CurrentOrigin.withOrigin(origin) {
rule.applyOrElse(this, identity[LogicalPlan])
}
} else {
CurrentOrigin.withOrigin(origin) {
rule.applyOrElse(afterRuleOnChildren, identity[LogicalPlan])
}
}
} else {
this
}
}
/**
* Recursively transforms the expressions of a tree, skipping nodes that have already
* been analyzed.
*/
def resolveExpressions(r: PartialFunction[Expression, Expression]): LogicalPlan = {
this resolveOperators {
case p => p.transformExpressions(r)
}
}
/** A cache for the estimated statistics, such that it will only be computed once. */
private var statsCache: Option[Statistics] = None
/**
* Returns the estimated statistics for the current logical plan node. Under the hood, this
* method caches the return value, which is computed based on the configuration passed in the
* first time. If the configuration changes, the cache can be invalidated by calling
* [[invalidateStatsCache()]].
*/
final def stats(conf: SQLConf): Statistics = statsCache.getOrElse {
statsCache = Some(computeStats(conf))
statsCache.get
}
/** Invalidates the stats cache. See [[stats]] for more information. */
final def invalidateStatsCache(): Unit = {
statsCache = None
children.foreach(_.invalidateStatsCache())
}
/**
* Computes [[Statistics]] for this plan. The default implementation assumes the output
* cardinality is the product of all child plan's cardinality, i.e. applies in the case
* of cartesian joins.
*
* [[LeafNode]]s must override this.
*/
protected def computeStats(conf: SQLConf): Statistics = {
if (children.isEmpty) {
throw new UnsupportedOperationException(s"LeafNode $nodeName must implement statistics.")
}
Statistics(sizeInBytes = children.map(_.stats(conf).sizeInBytes).product)
}
override def verboseStringWithSuffix: String = {
super.verboseString + statsCache.map(", " + _.toString).getOrElse("")
}
/**
* Returns the maximum number of rows that this plan may compute.
*
* Any operator that a Limit can be pushed passed should override this function (e.g., Union).
* Any operator that can push through a Limit should override this function (e.g., Project).
*/
def maxRows: Option[Long] = None
/**
* Returns true if this expression and all its children have been resolved to a specific schema
* and false if it still contains any unresolved placeholders. Implementations of LogicalPlan
* can override this (e.g.
* [[org.apache.spark.sql.catalyst.analysis.UnresolvedRelation UnresolvedRelation]]
* should return `false`).
*/
lazy val resolved: Boolean = expressions.forall(_.resolved) && childrenResolved
override protected def statePrefix = if (!resolved) "'" else super.statePrefix
/**
* Returns true if all its children of this query plan have been resolved.
*/
def childrenResolved: Boolean = children.forall(_.resolved)
/**
* Resolves a given schema to concrete [[Attribute]] references in this query plan. This function
* should only be called on analyzed plans since it will throw [[AnalysisException]] for
* unresolved [[Attribute]]s.
*/
def resolve(schema: StructType, resolver: Resolver): Seq[Attribute] = {
schema.map { field =>
resolve(field.name :: Nil, resolver).map {
case a: AttributeReference => a
case other => sys.error(s"can not handle nested schema yet... plan $this")
}.getOrElse {
throw new AnalysisException(
s"Unable to resolve ${field.name} given [${output.map(_.name).mkString(", ")}]")
}
}
}
/**
* Optionally resolves the given strings to a [[NamedExpression]] using the input from all child
* nodes of this LogicalPlan. The attribute is expressed as
* as string in the following form: `[scope].AttributeName.[nested].[fields]...`.
*/
def resolveChildren(
nameParts: Seq[String],
resolver: Resolver): Option[NamedExpression] =
resolve(nameParts, children.flatMap(_.output), resolver)
/**
* Optionally resolves the given strings to a [[NamedExpression]] based on the output of this
* LogicalPlan. The attribute is expressed as string in the following form:
* `[scope].AttributeName.[nested].[fields]...`.
*/
def resolve(
nameParts: Seq[String],
resolver: Resolver): Option[NamedExpression] =
resolve(nameParts, output, resolver)
/**
* Given an attribute name, split it to name parts by dot, but
* don't split the name parts quoted by backticks, for example,
* `ab.cd`.`efg` should be split into two parts "ab.cd" and "efg".
*/
def resolveQuoted(
name: String,
resolver: Resolver): Option[NamedExpression] = {
resolve(UnresolvedAttribute.parseAttributeName(name), output, resolver)
}
/**
* Resolve the given `name` string against the given attribute, returning either 0 or 1 match.
*
* This assumes `name` has multiple parts, where the 1st part is a qualifier
* (i.e. table name, alias, or subquery alias).
* See the comment above `candidates` variable in resolve() for semantics the returned data.
*/
private def resolveAsTableColumn(
nameParts: Seq[String],
resolver: Resolver,
attribute: Attribute): Option[(Attribute, List[String])] = {
assert(nameParts.length > 1)
if (attribute.qualifier.exists(resolver(_, nameParts.head))) {
// At least one qualifier matches. See if remaining parts match.
val remainingParts = nameParts.tail
resolveAsColumn(remainingParts, resolver, attribute)
} else {
None
}
}
/**
* Resolve the given `name` string against the given attribute, returning either 0 or 1 match.
*
* Different from resolveAsTableColumn, this assumes `name` does NOT start with a qualifier.
* See the comment above `candidates` variable in resolve() for semantics the returned data.
*/
private def resolveAsColumn(
nameParts: Seq[String],
resolver: Resolver,
attribute: Attribute): Option[(Attribute, List[String])] = {
if (!attribute.isGenerated && resolver(attribute.name, nameParts.head)) {
Option((attribute.withName(nameParts.head), nameParts.tail.toList))
} else {
None
}
}
/** Performs attribute resolution given a name and a sequence of possible attributes. */
protected def resolve(
nameParts: Seq[String],
input: Seq[Attribute],
resolver: Resolver): Option[NamedExpression] = {
// A sequence of possible candidate matches.
// Each candidate is a tuple. The first element is a resolved attribute, followed by a list
// of parts that are to be resolved.
// For example, consider an example where "a" is the table name, "b" is the column name,
// and "c" is the struct field name, i.e. "a.b.c". In this case, Attribute will be "a.b",
// and the second element will be List("c").
var candidates: Seq[(Attribute, List[String])] = {
// If the name has 2 or more parts, try to resolve it as `table.column` first.
if (nameParts.length > 1) {
input.flatMap { option =>
resolveAsTableColumn(nameParts, resolver, option)
}
} else {
Seq.empty
}
}
// If none of attributes match `table.column` pattern, we try to resolve it as a column.
if (candidates.isEmpty) {
candidates = input.flatMap { candidate =>
resolveAsColumn(nameParts, resolver, candidate)
}
}
def name = UnresolvedAttribute(nameParts).name
candidates.distinct match {
// One match, no nested fields, use it.
case Seq((a, Nil)) => Some(a)
// One match, but we also need to extract the requested nested field.
case Seq((a, nestedFields)) =>
// The foldLeft adds ExtractValues for every remaining parts of the identifier,
// and aliased it with the last part of the name.
// For example, consider "a.b.c", where "a" is resolved to an existing attribute.
// Then this will add ExtractValue("c", ExtractValue("b", a)), and alias the final
// expression as "c".
val fieldExprs = nestedFields.foldLeft(a: Expression)((expr, fieldName) =>
ExtractValue(expr, Literal(fieldName), resolver))
Some(Alias(fieldExprs, nestedFields.last)())
// No matches.
case Seq() =>
logTrace(s"Could not find $name in ${input.mkString(", ")}")
None
// More than one match.
case ambiguousReferences =>
val referenceNames = ambiguousReferences.map(_._1).mkString(", ")
throw new AnalysisException(
s"Reference '$name' is ambiguous, could be: $referenceNames.")
}
}
/**
* Refreshes (or invalidates) any metadata/data cached in the plan recursively.
*/
def refresh(): Unit = children.foreach(_.refresh())
}
/**
* A logical plan node with no children.
*/
abstract class LeafNode extends LogicalPlan {
override final def children: Seq[LogicalPlan] = Nil
override def producedAttributes: AttributeSet = outputSet
}
/**
* A logical plan node with single child.
*/
abstract class UnaryNode extends LogicalPlan {
def child: LogicalPlan
override final def children: Seq[LogicalPlan] = child :: Nil
/**
* Generates an additional set of aliased constraints by replacing the original constraint
* expressions with the corresponding alias
*/
protected def getAliasedConstraints(projectList: Seq[NamedExpression]): Set[Expression] = {
var allConstraints = child.constraints.asInstanceOf[Set[Expression]]
projectList.foreach {
case a @ Alias(e, _) =>
// For every alias in `projectList`, replace the reference in constraints by its attribute.
allConstraints ++= allConstraints.map(_ transform {
case expr: Expression if expr.semanticEquals(e) =>
a.toAttribute
})
allConstraints += EqualNullSafe(e, a.toAttribute)
case _ => // Don't change.
}
allConstraints -- child.constraints
}
override protected def validConstraints: Set[Expression] = child.constraints
override def computeStats(conf: SQLConf): Statistics = {
// There should be some overhead in Row object, the size should not be zero when there is
// no columns, this help to prevent divide-by-zero error.
val childRowSize = child.output.map(_.dataType.defaultSize).sum + 8
val outputRowSize = output.map(_.dataType.defaultSize).sum + 8
// Assume there will be the same number of rows as child has.
var sizeInBytes = (child.stats(conf).sizeInBytes * outputRowSize) / childRowSize
if (sizeInBytes == 0) {
// sizeInBytes can't be zero, or sizeInBytes of BinaryNode will also be zero
// (product of children).
sizeInBytes = 1
}
// Don't propagate rowCount and attributeStats, since they are not estimated here.
Statistics(sizeInBytes = sizeInBytes, hints = child.stats(conf).hints)
}
}
/**
* A logical plan node with a left and right child.
*/
abstract class BinaryNode extends LogicalPlan {
def left: LogicalPlan
def right: LogicalPlan
override final def children: Seq[LogicalPlan] = Seq(left, right)
}
| bOOm-X/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala | Scala | apache-2.0 | 13,860 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import java.sql.{Date, Timestamp}
import java.text.{DateFormat, SimpleDateFormat}
import java.util.{Calendar, Locale, TimeZone}
import java.util.concurrent.ConcurrentHashMap
import java.util.function.{Function => JFunction}
import javax.xml.bind.DatatypeConverter
import scala.annotation.tailrec
import org.apache.spark.unsafe.types.UTF8String
/**
* Helper functions for converting between internal and external date and time representations.
* Dates are exposed externally as java.sql.Date and are represented internally as the number of
* dates since the Unix epoch (1970-01-01). Timestamps are exposed externally as java.sql.Timestamp
* and are stored internally as longs, which are capable of storing timestamps with microsecond
* precision.
*/
object DateTimeUtils {
// we use Int and Long internally to represent [[DateType]] and [[TimestampType]]
type SQLDate = Int
type SQLTimestamp = Long
// see http://stackoverflow.com/questions/466321/convert-unix-timestamp-to-julian
// it's 2440587.5, rounding up to compatible with Hive
final val JULIAN_DAY_OF_EPOCH = 2440588
final val SECONDS_PER_DAY = 60 * 60 * 24L
final val MICROS_PER_MILLIS = 1000L
final val MICROS_PER_SECOND = MICROS_PER_MILLIS * MILLIS_PER_SECOND
final val MILLIS_PER_SECOND = 1000L
final val NANOS_PER_SECOND = MICROS_PER_SECOND * 1000L
final val MICROS_PER_DAY = MICROS_PER_SECOND * SECONDS_PER_DAY
final val MILLIS_PER_DAY = SECONDS_PER_DAY * 1000L
// number of days in 400 years
final val daysIn400Years: Int = 146097
// number of days between 1.1.1970 and 1.1.2001
final val to2001 = -11323
// this is year -17999, calculation: 50 * daysIn400Year
final val YearZero = -17999
final val toYearZero = to2001 + 7304850
final val TimeZoneGMT = TimeZone.getTimeZone("GMT")
final val TimeZoneUTC = TimeZone.getTimeZone("UTC")
final val MonthOf31Days = Set(1, 3, 5, 7, 8, 10, 12)
val TIMEZONE_OPTION = "timeZone"
def defaultTimeZone(): TimeZone = TimeZone.getDefault()
// Reuse the Calendar object in each thread as it is expensive to create in each method call.
private val threadLocalGmtCalendar = new ThreadLocal[Calendar] {
override protected def initialValue: Calendar = {
Calendar.getInstance(TimeZoneGMT)
}
}
// `SimpleDateFormat` is not thread-safe.
private val threadLocalTimestampFormat = new ThreadLocal[DateFormat] {
override def initialValue(): SimpleDateFormat = {
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
}
}
def getThreadLocalTimestampFormat(timeZone: TimeZone): DateFormat = {
val sdf = threadLocalTimestampFormat.get()
sdf.setTimeZone(timeZone)
sdf
}
// `SimpleDateFormat` is not thread-safe.
private val threadLocalDateFormat = new ThreadLocal[DateFormat] {
override def initialValue(): SimpleDateFormat = {
new SimpleDateFormat("yyyy-MM-dd", Locale.US)
}
}
def getThreadLocalDateFormat(): DateFormat = {
val sdf = threadLocalDateFormat.get()
sdf.setTimeZone(defaultTimeZone())
sdf
}
private val computedTimeZones = new ConcurrentHashMap[String, TimeZone]
private val computeTimeZone = new JFunction[String, TimeZone] {
override def apply(timeZoneId: String): TimeZone = TimeZone.getTimeZone(timeZoneId)
}
def getTimeZone(timeZoneId: String): TimeZone = {
computedTimeZones.computeIfAbsent(timeZoneId, computeTimeZone)
}
def newDateFormat(formatString: String, timeZone: TimeZone): DateFormat = {
val sdf = new SimpleDateFormat(formatString, Locale.US)
sdf.setTimeZone(timeZone)
// Enable strict parsing, if the input date/format is invalid, it will throw an exception.
// e.g. to parse invalid date '2016-13-12', or '2016-01-12' with invalid format 'yyyy-aa-dd',
// an exception will be throwed.
sdf.setLenient(false)
sdf
}
// we should use the exact day as Int, for example, (year, month, day) -> day
def millisToDays(millisUtc: Long): SQLDate = {
millisToDays(millisUtc, defaultTimeZone())
}
def millisToDays(millisUtc: Long, timeZone: TimeZone): SQLDate = {
// SPARK-6785: use Math.floor so negative number of days (dates before 1970)
// will correctly work as input for function toJavaDate(Int)
val millisLocal = millisUtc + timeZone.getOffset(millisUtc)
Math.floor(millisLocal.toDouble / MILLIS_PER_DAY).toInt
}
// reverse of millisToDays
def daysToMillis(days: SQLDate): Long = {
daysToMillis(days, defaultTimeZone())
}
def daysToMillis(days: SQLDate, timeZone: TimeZone): Long = {
val millisLocal = days.toLong * MILLIS_PER_DAY
millisLocal - getOffsetFromLocalMillis(millisLocal, timeZone)
}
def dateToString(days: SQLDate): String =
getThreadLocalDateFormat.format(toJavaDate(days))
// Converts Timestamp to string according to Hive TimestampWritable convention.
def timestampToString(us: SQLTimestamp): String = {
timestampToString(us, defaultTimeZone())
}
// Converts Timestamp to string according to Hive TimestampWritable convention.
def timestampToString(us: SQLTimestamp, timeZone: TimeZone): String = {
val ts = toJavaTimestamp(us)
val timestampString = ts.toString
val timestampFormat = getThreadLocalTimestampFormat(timeZone)
val formatted = timestampFormat.format(ts)
if (timestampString.length > 19 && timestampString.substring(19) != ".0") {
formatted + timestampString.substring(19)
} else {
formatted
}
}
@tailrec
def stringToTime(s: String): java.util.Date = {
val indexOfGMT = s.indexOf("GMT")
if (indexOfGMT != -1) {
// ISO8601 with a weird time zone specifier (2000-01-01T00:00GMT+01:00)
val s0 = s.substring(0, indexOfGMT)
val s1 = s.substring(indexOfGMT + 3)
// Mapped to 2000-01-01T00:00+01:00
stringToTime(s0 + s1)
} else if (!s.contains('T')) {
// JDBC escape string
if (s.contains(' ')) {
Timestamp.valueOf(s)
} else {
Date.valueOf(s)
}
} else {
DatatypeConverter.parseDateTime(s).getTime()
}
}
/**
* Returns the number of days since epoch from java.sql.Date.
*/
def fromJavaDate(date: Date): SQLDate = {
millisToDays(date.getTime)
}
/**
* Returns a java.sql.Date from number of days since epoch.
*/
def toJavaDate(daysSinceEpoch: SQLDate): Date = {
new Date(daysToMillis(daysSinceEpoch))
}
/**
* Returns a java.sql.Timestamp from number of micros since epoch.
*/
def toJavaTimestamp(us: SQLTimestamp): Timestamp = {
// setNanos() will overwrite the millisecond part, so the milliseconds should be
// cut off at seconds
var seconds = us / MICROS_PER_SECOND
var micros = us % MICROS_PER_SECOND
// setNanos() can not accept negative value
if (micros < 0) {
micros += MICROS_PER_SECOND
seconds -= 1
}
val t = new Timestamp(seconds * 1000)
t.setNanos(micros.toInt * 1000)
t
}
/**
* Returns the number of micros since epoch from java.sql.Timestamp.
*/
def fromJavaTimestamp(t: Timestamp): SQLTimestamp = {
if (t != null) {
t.getTime() * 1000L + (t.getNanos().toLong / 1000) % 1000L
} else {
0L
}
}
/**
* Returns the number of microseconds since epoch from Julian day
* and nanoseconds in a day
*/
def fromJulianDay(day: Int, nanoseconds: Long): SQLTimestamp = {
// use Long to avoid rounding errors
val seconds = (day - JULIAN_DAY_OF_EPOCH).toLong * SECONDS_PER_DAY
seconds * MICROS_PER_SECOND + nanoseconds / 1000L
}
/**
* Returns Julian day and nanoseconds in a day from the number of microseconds
*
* Note: support timestamp since 4717 BC (without negative nanoseconds, compatible with Hive).
*/
def toJulianDay(us: SQLTimestamp): (Int, Long) = {
val julian_us = us + JULIAN_DAY_OF_EPOCH * MICROS_PER_DAY
val day = julian_us / MICROS_PER_DAY
val micros = julian_us % MICROS_PER_DAY
(day.toInt, micros * 1000L)
}
/*
* Converts the timestamp to milliseconds since epoch. In spark timestamp values have microseconds
* precision, so this conversion is lossy.
*/
def toMillis(us: SQLTimestamp): Long = {
// When the timestamp is negative i.e before 1970, we need to adjust the millseconds portion.
// Example - 1965-01-01 10:11:12.123456 is represented as (-157700927876544) in micro precision.
// In millis precision the above needs to be represented as (-157700927877).
Math.floor(us.toDouble / MILLIS_PER_SECOND).toLong
}
/*
* Converts millseconds since epoch to SQLTimestamp.
*/
def fromMillis(millis: Long): SQLTimestamp = {
millis * 1000L
}
/**
* Parses a given UTF8 date string to the corresponding a corresponding [[Long]] value.
* The return type is [[Option]] in order to distinguish between 0L and null. The following
* formats are allowed:
*
* `yyyy`
* `yyyy-[m]m`
* `yyyy-[m]m-[d]d`
* `yyyy-[m]m-[d]d `
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
*/
def stringToTimestamp(s: UTF8String): Option[SQLTimestamp] = {
stringToTimestamp(s, defaultTimeZone())
}
def stringToTimestamp(s: UTF8String, timeZone: TimeZone): Option[SQLTimestamp] = {
if (s == null) {
return None
}
var tz: Option[Byte] = None
val segments: Array[Int] = Array[Int](1, 1, 1, 0, 0, 0, 0, 0, 0)
var i = 0
var currentSegmentValue = 0
val bytes = s.getBytes
var j = 0
var digitsMilli = 0
var justTime = false
while (j < bytes.length) {
val b = bytes(j)
val parsedValue = b - '0'.toByte
if (parsedValue < 0 || parsedValue > 9) {
if (j == 0 && b == 'T') {
justTime = true
i += 3
} else if (i < 2) {
if (b == '-') {
if (i == 0 && j != 4) {
// year should have exact four digits
return None
}
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else if (i == 0 && b == ':') {
justTime = true
segments(3) = currentSegmentValue
currentSegmentValue = 0
i = 4
} else {
return None
}
} else if (i == 2) {
if (b == ' ' || b == 'T') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
} else if (i == 3 || i == 4) {
if (b == ':') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
} else if (i == 5 || i == 6) {
if (b == 'Z') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
tz = Some(43)
} else if (b == '-' || b == '+') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
tz = Some(b)
} else if (b == '.' && i == 5) {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
if (i == 6 && b != '.') {
i += 1
}
} else {
if (b == ':' || b == ' ') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
}
} else {
if (i == 6) {
digitsMilli += 1
}
currentSegmentValue = currentSegmentValue * 10 + parsedValue
}
j += 1
}
segments(i) = currentSegmentValue
if (!justTime && i == 0 && j != 4) {
// year should have exact four digits
return None
}
while (digitsMilli < 6) {
segments(6) *= 10
digitsMilli += 1
}
// We are truncating the nanosecond part, which results in loss of precision
while (digitsMilli > 6) {
segments(6) /= 10
digitsMilli -= 1
}
if (!justTime && isInvalidDate(segments(0), segments(1), segments(2))) {
return None
}
if (segments(3) < 0 || segments(3) > 23 || segments(4) < 0 || segments(4) > 59 ||
segments(5) < 0 || segments(5) > 59 || segments(6) < 0 || segments(6) > 999999 ||
segments(7) < 0 || segments(7) > 23 || segments(8) < 0 || segments(8) > 59) {
return None
}
val c = if (tz.isEmpty) {
Calendar.getInstance(timeZone)
} else {
Calendar.getInstance(
getTimeZone(f"GMT${tz.get.toChar}${segments(7)}%02d:${segments(8)}%02d"))
}
c.set(Calendar.MILLISECOND, 0)
if (justTime) {
c.set(Calendar.HOUR_OF_DAY, segments(3))
c.set(Calendar.MINUTE, segments(4))
c.set(Calendar.SECOND, segments(5))
} else {
c.set(segments(0), segments(1) - 1, segments(2), segments(3), segments(4), segments(5))
}
Some(c.getTimeInMillis * 1000 + segments(6))
}
/**
* Parses a given UTF8 date string to a corresponding [[Int]] value.
* The return type is [[Option]] in order to distinguish between 0 and null. The following
* formats are allowed:
*
* `yyyy`
* `yyyy-[m]m`
* `yyyy-[m]m-[d]d`
* `yyyy-[m]m-[d]d `
* `yyyy-[m]m-[d]d *`
* `yyyy-[m]m-[d]dT*`
*/
def stringToDate(s: UTF8String): Option[SQLDate] = {
if (s == null) {
return None
}
val segments: Array[Int] = Array[Int](1, 1, 1)
var i = 0
var currentSegmentValue = 0
val bytes = s.getBytes
var j = 0
while (j < bytes.length && (i < 3 && !(bytes(j) == ' ' || bytes(j) == 'T'))) {
val b = bytes(j)
if (i < 2 && b == '-') {
if (i == 0 && j != 4) {
// year should have exact four digits
return None
}
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
val parsedValue = b - '0'.toByte
if (parsedValue < 0 || parsedValue > 9) {
return None
} else {
currentSegmentValue = currentSegmentValue * 10 + parsedValue
}
}
j += 1
}
if (i == 0 && j != 4) {
// year should have exact four digits
return None
}
segments(i) = currentSegmentValue
if (isInvalidDate(segments(0), segments(1), segments(2))) {
return None
}
val c = threadLocalGmtCalendar.get()
c.clear()
c.set(segments(0), segments(1) - 1, segments(2), 0, 0, 0)
c.set(Calendar.MILLISECOND, 0)
Some((c.getTimeInMillis / MILLIS_PER_DAY).toInt)
}
/**
* Return true if the date is invalid.
*/
private def isInvalidDate(year: Int, month: Int, day: Int): Boolean = {
if (year < 0 || year > 9999 || month < 1 || month > 12 || day < 1 || day > 31) {
return true
}
if (month == 2) {
if (isLeapYear(year) && day > 29) {
return true
} else if (!isLeapYear(year) && day > 28) {
return true
}
} else if (!MonthOf31Days.contains(month) && day > 30) {
return true
}
false
}
/**
* Returns the microseconds since year zero (-17999) from microseconds since epoch.
*/
private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
microsec + toYearZero * MICROS_PER_DAY
}
private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
localTimestamp(microsec, defaultTimeZone())
}
private def localTimestamp(microsec: SQLTimestamp, timeZone: TimeZone): SQLTimestamp = {
absoluteMicroSecond(microsec) + timeZone.getOffset(microsec / 1000) * 1000L
}
/**
* Returns the hour value of a given timestamp value. The timestamp is expressed in microseconds.
*/
def getHours(microsec: SQLTimestamp): Int = {
((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
}
/**
* Returns the hour value of a given timestamp value. The timestamp is expressed in microseconds.
*/
def getHours(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND / 3600) % 24).toInt
}
/**
* Returns the minute value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getMinutes(microsec: SQLTimestamp): Int = {
((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
}
/**
* Returns the minute value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getMinutes(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND / 60) % 60).toInt
}
/**
* Returns the second value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getSeconds(microsec: SQLTimestamp): Int = {
((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
}
/**
* Returns the second value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getSeconds(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND) % 60).toInt
}
private[this] def isLeapYear(year: Int): Boolean = {
(year % 4) == 0 && ((year % 100) != 0 || (year % 400) == 0)
}
/**
* Return the number of days since the start of 400 year period.
* The second year of a 400 year period (year 1) starts on day 365.
*/
private[this] def yearBoundary(year: Int): Int = {
year * 365 + ((year / 4 ) - (year / 100) + (year / 400))
}
/**
* Calculates the number of years for the given number of days. This depends
* on a 400 year period.
* @param days days since the beginning of the 400 year period
* @return (number of year, days in year)
*/
private[this] def numYears(days: Int): (Int, Int) = {
val year = days / 365
val boundary = yearBoundary(year)
if (days > boundary) (year, days - boundary) else (year - 1, days - yearBoundary(year - 1))
}
/**
* Calculates the year and the number of the day in the year for the given
* number of days. The given days is the number of days since 1.1.1970.
*
* The calculation uses the fact that the period 1.1.2001 until 31.12.2400 is
* equals to the period 1.1.1601 until 31.12.2000.
*/
private[this] def getYearAndDayInYear(daysSince1970: SQLDate): (Int, Int) = {
// add the difference (in days) between 1.1.1970 and the artificial year 0 (-17999)
var daysSince1970Tmp = daysSince1970
// Since Julian calendar was replaced with the Gregorian calendar,
// the 10 days after Oct. 4 were skipped.
// (1582-10-04) -141428 days since 1970-01-01
if (daysSince1970 <= -141428) {
daysSince1970Tmp -= 10
}
val daysNormalized = daysSince1970Tmp + toYearZero
val numOfQuarterCenturies = daysNormalized / daysIn400Years
val daysInThis400 = daysNormalized % daysIn400Years + 1
val (years, dayInYear) = numYears(daysInThis400)
val year: Int = (2001 - 20000) + 400 * numOfQuarterCenturies + years
(year, dayInYear)
}
/**
* Returns the 'day in year' value for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getDayInYear(date: SQLDate): Int = {
getYearAndDayInYear(date)._2
}
/**
* Returns the year value for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getYear(date: SQLDate): Int = {
getYearAndDayInYear(date)._1
}
/**
* Returns the quarter for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getQuarter(date: SQLDate): Int = {
var (year, dayInYear) = getYearAndDayInYear(date)
if (isLeapYear(year)) {
dayInYear = dayInYear - 1
}
if (dayInYear <= 90) {
1
} else if (dayInYear <= 181) {
2
} else if (dayInYear <= 273) {
3
} else {
4
}
}
/**
* Split date (expressed in days since 1.1.1970) into four fields:
* year, month (Jan is Month 1), dayInMonth, daysToMonthEnd (0 if it's last day of month).
*/
def splitDate(date: SQLDate): (Int, Int, Int, Int) = {
var (year, dayInYear) = getYearAndDayInYear(date)
val isLeap = isLeapYear(year)
if (isLeap && dayInYear == 60) {
(year, 2, 29, 0)
} else {
if (isLeap && dayInYear > 60) dayInYear -= 1
if (dayInYear <= 181) {
if (dayInYear <= 31) {
(year, 1, dayInYear, 31 - dayInYear)
} else if (dayInYear <= 59) {
(year, 2, dayInYear - 31, if (isLeap) 60 - dayInYear else 59 - dayInYear)
} else if (dayInYear <= 90) {
(year, 3, dayInYear - 59, 90 - dayInYear)
} else if (dayInYear <= 120) {
(year, 4, dayInYear - 90, 120 - dayInYear)
} else if (dayInYear <= 151) {
(year, 5, dayInYear - 120, 151 - dayInYear)
} else {
(year, 6, dayInYear - 151, 181 - dayInYear)
}
} else {
if (dayInYear <= 212) {
(year, 7, dayInYear - 181, 212 - dayInYear)
} else if (dayInYear <= 243) {
(year, 8, dayInYear - 212, 243 - dayInYear)
} else if (dayInYear <= 273) {
(year, 9, dayInYear - 243, 273 - dayInYear)
} else if (dayInYear <= 304) {
(year, 10, dayInYear - 273, 304 - dayInYear)
} else if (dayInYear <= 334) {
(year, 11, dayInYear - 304, 334 - dayInYear)
} else {
(year, 12, dayInYear - 334, 365 - dayInYear)
}
}
}
}
/**
* Returns the month value for the given date. The date is expressed in days
* since 1.1.1970. January is month 1.
*/
def getMonth(date: SQLDate): Int = {
var (year, dayInYear) = getYearAndDayInYear(date)
if (isLeapYear(year)) {
if (dayInYear == 60) {
return 2
} else if (dayInYear > 60) {
dayInYear = dayInYear - 1
}
}
if (dayInYear <= 31) {
1
} else if (dayInYear <= 59) {
2
} else if (dayInYear <= 90) {
3
} else if (dayInYear <= 120) {
4
} else if (dayInYear <= 151) {
5
} else if (dayInYear <= 181) {
6
} else if (dayInYear <= 212) {
7
} else if (dayInYear <= 243) {
8
} else if (dayInYear <= 273) {
9
} else if (dayInYear <= 304) {
10
} else if (dayInYear <= 334) {
11
} else {
12
}
}
/**
* Returns the 'day of month' value for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getDayOfMonth(date: SQLDate): Int = {
var (year, dayInYear) = getYearAndDayInYear(date)
if (isLeapYear(year)) {
if (dayInYear == 60) {
return 29
} else if (dayInYear > 60) {
dayInYear = dayInYear - 1
}
}
if (dayInYear <= 31) {
dayInYear
} else if (dayInYear <= 59) {
dayInYear - 31
} else if (dayInYear <= 90) {
dayInYear - 59
} else if (dayInYear <= 120) {
dayInYear - 90
} else if (dayInYear <= 151) {
dayInYear - 120
} else if (dayInYear <= 181) {
dayInYear - 151
} else if (dayInYear <= 212) {
dayInYear - 181
} else if (dayInYear <= 243) {
dayInYear - 212
} else if (dayInYear <= 273) {
dayInYear - 243
} else if (dayInYear <= 304) {
dayInYear - 273
} else if (dayInYear <= 334) {
dayInYear - 304
} else {
dayInYear - 334
}
}
/**
* The number of days for each month (not leap year)
*/
private val monthDays = Array(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
/**
* Returns the date value for the first day of the given month.
* The month is expressed in months since year zero (17999 BC), starting from 0.
*/
private def firstDayOfMonth(absoluteMonth: Int): SQLDate = {
val absoluteYear = absoluteMonth / 12
var monthInYear = absoluteMonth - absoluteYear * 12
var date = getDateFromYear(absoluteYear)
if (monthInYear >= 2 && isLeapYear(absoluteYear + YearZero)) {
date += 1
}
while (monthInYear > 0) {
date += monthDays(monthInYear - 1)
monthInYear -= 1
}
date
}
/**
* Returns the date value for January 1 of the given year.
* The year is expressed in years since year zero (17999 BC), starting from 0.
*/
private def getDateFromYear(absoluteYear: Int): SQLDate = {
val absoluteDays = (absoluteYear * 365 + absoluteYear / 400 - absoluteYear / 100
+ absoluteYear / 4)
absoluteDays - toYearZero
}
/**
* Add date and year-month interval.
* Returns a date value, expressed in days since 1.1.1970.
*/
def dateAddMonths(days: SQLDate, months: Int): SQLDate = {
val (year, monthInYear, dayOfMonth, daysToMonthEnd) = splitDate(days)
val absoluteMonth = (year - YearZero) * 12 + monthInYear - 1 + months
val nonNegativeMonth = if (absoluteMonth >= 0) absoluteMonth else 0
val currentMonthInYear = nonNegativeMonth % 12
val currentYear = nonNegativeMonth / 12
val leapDay = if (currentMonthInYear == 1 && isLeapYear(currentYear + YearZero)) 1 else 0
val lastDayOfMonth = monthDays(currentMonthInYear) + leapDay
val currentDayInMonth = if (daysToMonthEnd == 0 || dayOfMonth >= lastDayOfMonth) {
// last day of the month
lastDayOfMonth
} else {
dayOfMonth
}
firstDayOfMonth(nonNegativeMonth) + currentDayInMonth - 1
}
/**
* Add timestamp and full interval.
* Returns a timestamp value, expressed in microseconds since 1.1.1970 00:00:00.
*/
def timestampAddInterval(start: SQLTimestamp, months: Int, microseconds: Long): SQLTimestamp = {
timestampAddInterval(start, months, microseconds, defaultTimeZone())
}
/**
* Add timestamp and full interval.
* Returns a timestamp value, expressed in microseconds since 1.1.1970 00:00:00.
*/
def timestampAddInterval(
start: SQLTimestamp,
months: Int,
microseconds: Long,
timeZone: TimeZone): SQLTimestamp = {
val days = millisToDays(start / 1000L, timeZone)
val newDays = dateAddMonths(days, months)
start +
daysToMillis(newDays, timeZone) * 1000L - daysToMillis(days, timeZone) * 1000L +
microseconds
}
/**
* Returns number of months between time1 and time2. time1 and time2 are expressed in
* microseconds since 1.1.1970.
*
* If time1 and time2 having the same day of month, or both are the last day of month,
* it returns an integer (time under a day will be ignored).
*
* Otherwise, the difference is calculated based on 31 days per month.
* If `roundOff` is set to true, the result is rounded to 8 decimal places.
*/
def monthsBetween(
time1: SQLTimestamp,
time2: SQLTimestamp,
roundOff: Boolean,
timeZone: TimeZone): Double = {
val millis1 = time1 / 1000L
val millis2 = time2 / 1000L
val date1 = millisToDays(millis1, timeZone)
val date2 = millisToDays(millis2, timeZone)
val (year1, monthInYear1, dayInMonth1, daysToMonthEnd1) = splitDate(date1)
val (year2, monthInYear2, dayInMonth2, daysToMonthEnd2) = splitDate(date2)
val months1 = year1 * 12 + monthInYear1
val months2 = year2 * 12 + monthInYear2
if (dayInMonth1 == dayInMonth2 || ((daysToMonthEnd1 == 0) && (daysToMonthEnd2 == 0))) {
return (months1 - months2).toDouble
}
// milliseconds is enough for 8 digits precision on the right side
val timeInDay1 = millis1 - daysToMillis(date1, timeZone)
val timeInDay2 = millis2 - daysToMillis(date2, timeZone)
val timesBetween = (timeInDay1 - timeInDay2).toDouble / MILLIS_PER_DAY
val diff = (months1 - months2).toDouble + (dayInMonth1 - dayInMonth2 + timesBetween) / 31.0
if (roundOff) {
// rounding to 8 digits
math.round(diff * 1e8) / 1e8
} else {
diff
}
}
// Thursday = 0 since 1970/Jan/01 => Thursday
private val SUNDAY = 3
private val MONDAY = 4
private val TUESDAY = 5
private val WEDNESDAY = 6
private val THURSDAY = 0
private val FRIDAY = 1
private val SATURDAY = 2
/*
* Returns day of week from String. Starting from Thursday, marked as 0.
* (Because 1970-01-01 is Thursday).
*/
def getDayOfWeekFromString(string: UTF8String): Int = {
val dowString = string.toString.toUpperCase(Locale.ROOT)
dowString match {
case "SU" | "SUN" | "SUNDAY" => SUNDAY
case "MO" | "MON" | "MONDAY" => MONDAY
case "TU" | "TUE" | "TUESDAY" => TUESDAY
case "WE" | "WED" | "WEDNESDAY" => WEDNESDAY
case "TH" | "THU" | "THURSDAY" => THURSDAY
case "FR" | "FRI" | "FRIDAY" => FRIDAY
case "SA" | "SAT" | "SATURDAY" => SATURDAY
case _ => -1
}
}
/**
* Returns the first date which is later than startDate and is of the given dayOfWeek.
* dayOfWeek is an integer ranges in [0, 6], and 0 is Thu, 1 is Fri, etc,.
*/
def getNextDateForDayOfWeek(startDate: SQLDate, dayOfWeek: Int): SQLDate = {
startDate + 1 + ((dayOfWeek - 1 - startDate) % 7 + 7) % 7
}
/**
* Returns last day of the month for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getLastDayOfMonth(date: SQLDate): SQLDate = {
val (_, _, _, daysToMonthEnd) = splitDate(date)
date + daysToMonthEnd
}
// Visible for testing.
private[sql] val TRUNC_TO_YEAR = 1
private[sql] val TRUNC_TO_MONTH = 2
private[sql] val TRUNC_TO_QUARTER = 3
private[sql] val TRUNC_TO_WEEK = 4
private[sql] val TRUNC_TO_DAY = 5
private[sql] val TRUNC_TO_HOUR = 6
private[sql] val TRUNC_TO_MINUTE = 7
private[sql] val TRUNC_TO_SECOND = 8
private[sql] val TRUNC_INVALID = -1
/**
* Returns the trunc date from original date and trunc level.
* Trunc level should be generated using `parseTruncLevel()`, should only be 1 or 2.
*/
def truncDate(d: SQLDate, level: Int): SQLDate = {
if (level == TRUNC_TO_YEAR) {
d - DateTimeUtils.getDayInYear(d) + 1
} else if (level == TRUNC_TO_MONTH) {
d - DateTimeUtils.getDayOfMonth(d) + 1
} else {
// caller make sure that this should never be reached
sys.error(s"Invalid trunc level: $level")
}
}
/**
* Returns the trunc date time from original date time and trunc level.
* Trunc level should be generated using `parseTruncLevel()`, should be between 1 and 8
*/
def truncTimestamp(t: SQLTimestamp, level: Int, timeZone: TimeZone): SQLTimestamp = {
var millis = t / MICROS_PER_MILLIS
val truncated = level match {
case TRUNC_TO_YEAR =>
val dDays = millisToDays(millis, timeZone)
daysToMillis(truncDate(dDays, level), timeZone)
case TRUNC_TO_MONTH =>
val dDays = millisToDays(millis, timeZone)
daysToMillis(truncDate(dDays, level), timeZone)
case TRUNC_TO_DAY =>
val offset = timeZone.getOffset(millis)
millis += offset
millis - millis % (MILLIS_PER_SECOND * SECONDS_PER_DAY) - offset
case TRUNC_TO_HOUR =>
val offset = timeZone.getOffset(millis)
millis += offset
millis - millis % (60 * 60 * MILLIS_PER_SECOND) - offset
case TRUNC_TO_MINUTE =>
millis - millis % (60 * MILLIS_PER_SECOND)
case TRUNC_TO_SECOND =>
millis - millis % MILLIS_PER_SECOND
case TRUNC_TO_WEEK =>
val dDays = millisToDays(millis, timeZone)
val prevMonday = getNextDateForDayOfWeek(dDays - 7, MONDAY)
daysToMillis(prevMonday, timeZone)
case TRUNC_TO_QUARTER =>
val dDays = millisToDays(millis, timeZone)
millis = daysToMillis(truncDate(dDays, TRUNC_TO_MONTH), timeZone)
val cal = Calendar.getInstance()
cal.setTimeInMillis(millis)
val quarter = getQuarter(dDays)
val month = quarter match {
case 1 => Calendar.JANUARY
case 2 => Calendar.APRIL
case 3 => Calendar.JULY
case 4 => Calendar.OCTOBER
}
cal.set(Calendar.MONTH, month)
cal.getTimeInMillis()
case _ =>
// caller make sure that this should never be reached
sys.error(s"Invalid trunc level: $level")
}
truncated * MICROS_PER_MILLIS
}
def truncTimestamp(d: SQLTimestamp, level: Int): SQLTimestamp = {
truncTimestamp(d, level, defaultTimeZone())
}
/**
* Returns the truncate level, could be TRUNC_YEAR, TRUNC_MONTH, TRUNC_TO_DAY, TRUNC_TO_HOUR,
* TRUNC_TO_MINUTE, TRUNC_TO_SECOND, TRUNC_TO_WEEK, TRUNC_TO_QUARTER or TRUNC_INVALID,
* TRUNC_INVALID means unsupported truncate level.
*/
def parseTruncLevel(format: UTF8String): Int = {
if (format == null) {
TRUNC_INVALID
} else {
format.toString.toUpperCase(Locale.ROOT) match {
case "YEAR" | "YYYY" | "YY" => TRUNC_TO_YEAR
case "MON" | "MONTH" | "MM" => TRUNC_TO_MONTH
case "DAY" | "DD" => TRUNC_TO_DAY
case "HOUR" => TRUNC_TO_HOUR
case "MINUTE" => TRUNC_TO_MINUTE
case "SECOND" => TRUNC_TO_SECOND
case "WEEK" => TRUNC_TO_WEEK
case "QUARTER" => TRUNC_TO_QUARTER
case _ => TRUNC_INVALID
}
}
}
/**
* Lookup the offset for given millis seconds since 1970-01-01 00:00:00 in given timezone.
* TODO: Improve handling of normalization differences.
* TODO: Replace with JSR-310 or similar system - see SPARK-16788
*/
private[sql] def getOffsetFromLocalMillis(millisLocal: Long, tz: TimeZone): Long = {
var guess = tz.getRawOffset
// the actual offset should be calculated based on milliseconds in UTC
val offset = tz.getOffset(millisLocal - guess)
if (offset != guess) {
guess = tz.getOffset(millisLocal - offset)
if (guess != offset) {
// fallback to do the reverse lookup using java.sql.Timestamp
// this should only happen near the start or end of DST
val days = Math.floor(millisLocal.toDouble / MILLIS_PER_DAY).toInt
val year = getYear(days)
val month = getMonth(days)
val day = getDayOfMonth(days)
var millisOfDay = (millisLocal % MILLIS_PER_DAY).toInt
if (millisOfDay < 0) {
millisOfDay += MILLIS_PER_DAY.toInt
}
val seconds = (millisOfDay / 1000L).toInt
val hh = seconds / 3600
val mm = seconds / 60 % 60
val ss = seconds % 60
val ms = millisOfDay % 1000
val calendar = Calendar.getInstance(tz)
calendar.set(year, month - 1, day, hh, mm, ss)
calendar.set(Calendar.MILLISECOND, ms)
guess = (millisLocal - calendar.getTimeInMillis()).toInt
}
}
guess
}
/**
* Convert the timestamp `ts` from one timezone to another.
*
* TODO: Because of DST, the conversion between UTC and human time is not exactly one-to-one
* mapping, the conversion here may return wrong result, we should make the timestamp
* timezone-aware.
*/
def convertTz(ts: SQLTimestamp, fromZone: TimeZone, toZone: TimeZone): SQLTimestamp = {
// We always use local timezone to parse or format a timestamp
val localZone = defaultTimeZone()
val utcTs = if (fromZone.getID == localZone.getID) {
ts
} else {
// get the human time using local time zone, that actually is in fromZone.
val localTs = ts + localZone.getOffset(ts / 1000L) * 1000L // in fromZone
localTs - getOffsetFromLocalMillis(localTs / 1000L, fromZone) * 1000L
}
if (toZone.getID == localZone.getID) {
utcTs
} else {
val localTs = utcTs + toZone.getOffset(utcTs / 1000L) * 1000L // in toZone
// treat it as local timezone, convert to UTC (we could get the expected human time back)
localTs - getOffsetFromLocalMillis(localTs / 1000L, localZone) * 1000L
}
}
/**
* Returns a timestamp of given timezone from utc timestamp, with the same string
* representation in their timezone.
*/
def fromUTCTime(time: SQLTimestamp, timeZone: String): SQLTimestamp = {
convertTz(time, TimeZoneGMT, getTimeZone(timeZone))
}
/**
* Returns a utc timestamp from a given timestamp from a given timezone, with the same
* string representation in their timezone.
*/
def toUTCTime(time: SQLTimestamp, timeZone: String): SQLTimestamp = {
convertTz(time, getTimeZone(timeZone), TimeZoneGMT)
}
/**
* Re-initialize the current thread's thread locals. Exposed for testing.
*/
private[util] def resetThreadLocals(): Unit = {
threadLocalGmtCalendar.remove()
threadLocalTimestampFormat.remove()
threadLocalDateFormat.remove()
}
}
| ddna1021/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala | Scala | apache-2.0 | 38,230 |
package com.tuplejump.inventory.models
//Changes made from NCounter in akka.crdt
case class PNCounter(
increments: PCounter,
decrements: NCounter) {
val dataType: String = PNCounter.dataType
def value = increments.value - decrements.value
/**
* Increment the PNCounter with the delta specified. If the value is
* negative then it will decrement instead of increment.
*/
def +(node: String, delta: Int = 1): PNCounter = {
if (delta < 0) this -(node, delta)
else new PNCounter(increments + delta, decrements)
}
/**
* Decrements the PNCounter with the delta specified. Agnostic to
* sign (does Math.abs(delta)).
*/
def -(node: String, delta: Int = 1): PNCounter =
new PNCounter(increments, decrements +(node, Math.abs(delta)))
def merge(that: PNCounter): PNCounter =
new PNCounter(
that.increments.merge(this.increments),
that.decrements.merge(this.decrements))
}
object PNCounter {
val dataType: String = "pn-counter"
}
| shrutig/inventory | src/main/scala/com/tuplejump/inventory/models/PNCounter.scala | Scala | apache-2.0 | 1,040 |
package com.campudus.tableaux.testtools
import com.campudus.tableaux.database.DatabaseConnection
import com.campudus.tableaux.database.domain.DomainObject
import com.campudus.tableaux.database.model.SystemModel
import com.campudus.tableaux.database.model.TableauxModel.{ColumnId, RowId, TableId}
import com.campudus.tableaux.helper.FileUtils
import com.campudus.tableaux.router.auth.permission.RoleModel
import com.campudus.tableaux.testtools.RequestCreation.ColumnType
import com.campudus.tableaux.{CustomException, RequestContext, Starter, TableauxConfig}
import com.typesafe.scalalogging.LazyLogging
import io.vertx.core.buffer.Buffer
import io.vertx.core.http.HttpMethod
import io.vertx.ext.unit.TestContext
import io.vertx.ext.unit.junit.VertxUnitRunner
import io.vertx.lang.scala.{ScalaVerticle, VertxExecutionContext}
import io.vertx.scala.FutureHelper._
import io.vertx.scala.SQLConnection
import io.vertx.scala.core.file.{AsyncFile, OpenOptions}
import io.vertx.scala.core.http._
import io.vertx.scala.core.streams.Pump
import io.vertx.scala.core.{DeploymentOptions, Vertx}
import org.junit.runner.RunWith
import org.junit.{After, Before}
import org.vertx.scala.core.json.{JsonObject, _}
import scala.collection.JavaConverters._
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success, Try}
case class TestCustomException(message: String, id: String, statusCode: Int) extends Throwable {
override def toString: String = s"TestCustomException(status=$statusCode,id=$id,message=$message)"
}
@RunWith(classOf[VertxUnitRunner])
trait TableauxTestBase
extends TestConfig
with LazyLogging
with TestAssertionHelper
with JsonCompatible
with TestVertxAccess {
override var vertx: Vertx = _
override implicit var executionContext: VertxExecutionContext = _
override var databaseConfig: JsonObject = _
override var authConfig: JsonObject = _
override var host: String = _
override var port: Int = _
override var tableauxConfig: TableauxConfig = _
// default access token used for all integration tests if no explicit token is provided
var wildcardAccessToken: String = _
implicit val requestContext: RequestContext = RequestContext()
@Before
def before(context: TestContext): Unit = {
vertx = Vertx.vertx()
executionContext = VertxExecutionContext(
io.vertx.scala.core.Context(vertx.asJava.asInstanceOf[io.vertx.core.Vertx].getOrCreateContext())
)
val config = Json
.fromObjectString(fileConfig.encode())
.put("host", fileConfig.getString("host", "127.0.0.1"))
.put("port", getFreePort)
databaseConfig = config.getJsonObject("database", Json.obj())
authConfig = config.getJsonObject("auth", Json.obj())
val rolePermissionsPath = config.getString("rolePermissionsPath")
val rolePermissions = FileUtils(this.vertxAccess()).readJsonFile(rolePermissionsPath, Json.emptyObj())
host = config.getString("host")
port = config.getInteger("port").intValue()
tableauxConfig = new TableauxConfig(
vertx,
authConfig,
databaseConfig,
config.getString("workingDirectory"),
config.getString("uploadsDirectory"),
rolePermissions
)
val async = context.async()
val options = DeploymentOptions()
.setConfig(config)
val completionHandler = {
case Success(id) =>
logger.info(s"Verticle deployed with ID $id")
val sqlConnection = SQLConnection(this.vertxAccess(), databaseConfig)
val dbConnection = DatabaseConnection(this.vertxAccess(), sqlConnection)
val system = SystemModel(dbConnection)
for {
_ <- system.uninstall()
_ <- system.install()
} yield async.complete()
case Failure(e) =>
logger.error("Verticle couldn't be deployed.", e)
context.fail(e)
async.complete()
}: Try[String] => Unit
vertx
.deployVerticleFuture(ScalaVerticle.nameForVerticle[Starter], options)
.onComplete(completionHandler)
val tokenHelper = TokenHelper(this.vertxAccess())
wildcardAccessToken = tokenHelper.generateToken(
Json.obj("aud" -> "grud-backend",
"iss" -> "campudus-test",
"preferred_username" -> "Test",
"realm_access" -> Json.obj("roles" -> Json.arr("dev"))))
requestContext.resetPrincipal()
}
@After
def after(context: TestContext): Unit = vertx.close(context.asyncAssertSuccess())
/**
* Initializes the RoleModel with the given config and also sets up the requestsContext with all provided roles
*/
def initRoleModel(roleConfig: String): RoleModel = {
val roleModel: RoleModel = RoleModel(Json.fromObjectString(roleConfig.stripMargin))
setRequestRoles(roleModel.role2permissions.keySet.toSeq: _*)
roleModel
}
protected def setRequestRoles(roles: String*): Unit = {
requestContext.principal = Json.obj("realm_access" -> Json.obj("roles" -> roles))
}
/**
* Helper method to set up Tests without running into permission problem (UnauthorizedException)
*
* 1. Sets a userRole for requestContext
* 2. Invokes a function block with this userRole
* 3. Resets the original userRoles defined by the test
*
* For this purpose there is a dummy test role "dev" in `role-permissions-test.json`.
*/
protected def asDevUser[A](function: => Future[A]): Future[A] = {
val userRolesFromTest: Seq[String] = requestContext.getUserRoles
setRequestRoles("dev")
val result: Future[A] = function
setRequestRoles(userRolesFromTest: _*)
result
}
def okTest(f: => Future[_])(implicit context: TestContext): Unit = {
val async = context.async()
(try {
f
} catch {
case ex: Throwable => Future.failed(ex)
}) onComplete {
case Success(_) => async.complete()
case Failure(ex) =>
logger.error("failed test", ex)
context.fail(ex)
async.complete()
}
}
def exceptionTest(id: String)(f: => Future[_])(implicit context: TestContext): Unit = {
val async = context.async()
f onComplete {
case Success(_) =>
val msg = s"Test with id $id should fail but got no exception."
logger.error(msg)
context.fail(msg)
async.complete()
case Failure(ex: TestCustomException) =>
context.assertEquals(id, ex.id)
async.complete()
case Failure(ex: CustomException) =>
context.assertEquals(id, ex.id)
async.complete()
case Failure(ex) =>
val msg = s"Test with id $id failed but got wrong exception (${ex.getClass.toString}: ${ex.getMessage})."
logger.error(msg)
context.fail(msg)
async.complete()
}
}
def sendRequest(method: String, path: String): Future[JsonObject] = {
sendRequest(method, path, None)
}
def sendRequest(method: String, path: String, tokenOpt: Option[String]): Future[JsonObject] = {
val p = Promise[JsonObject]()
httpJsonRequest(method, path, p, tokenOpt).end()
p.future
}
def sendRequest(method: String, path: String, jsonObj: JsonObject): Future[JsonObject] = {
sendRequest(method, path, jsonObj, None)
}
def sendRequest(method: String, path: String, jsonObj: JsonObject, tokenOpt: Option[String]): Future[JsonObject] = {
val p = Promise[JsonObject]()
httpJsonRequest(method, path, p, tokenOpt).end(jsonObj.encode())
p.future
}
def sendRequest(method: String, path: String, body: String): Future[JsonObject] = {
sendRequest(method, path, body, None)
}
def sendRequest(method: String, path: String, body: String, tokenOpt: Option[String]): Future[JsonObject] = {
val p = Promise[JsonObject]()
httpJsonRequest(method, path, p, tokenOpt).end(body)
p.future
}
def sendRequest(method: String, path: String, domainObject: DomainObject): Future[JsonObject] = {
sendRequest(method, path, domainObject.getJson)
}
def sendStringRequest(method: String, path: String): Future[String] = {
val p = Promise[String]()
httpStringRequest(method, path, p, None).end()
p.future
}
def sendStringRequest(method: String, path: String, jsonObj: JsonObject): Future[String] = {
val p = Promise[String]()
httpStringRequest(method, path, p, None).end(jsonObj.encode())
p.future
}
private def createResponseHandler[A](
p: Promise[A],
function: String => A
): (HttpClient, HttpClientResponse) => Unit = { (client: HttpClient, resp: HttpClientResponse) =>
def bodyHandler(buf: Buffer): Unit = {
val body = buf.toString()
client.close()
if (resp.statusCode() != 200) {
p.failure(TestCustomException(body, resp.statusMessage(), resp.statusCode()))
} else {
try {
p.success(function(body))
} catch {
case ex: Exception => p.failure(ex)
}
}
}
resp.bodyHandler(bodyHandler(_: Buffer))
}
private def createExceptionHandler[A](p: Promise[A]): (HttpClient, Throwable) => Unit = {
(client: HttpClient, x: Throwable) =>
client.close()
p.failure(x)
}
private def httpStringRequest(method: String,
path: String,
p: Promise[String],
tokenOpt: Option[String]): HttpClientRequest = {
httpRequest(method, path, createResponseHandler[String](p, _.toString), createExceptionHandler[String](p), tokenOpt)
}
private def httpJsonRequest(method: String,
path: String,
p: Promise[JsonObject],
tokenOpt: Option[String]): HttpClientRequest = {
httpRequest(method,
path,
createResponseHandler[JsonObject](p, Json.fromObjectString),
createExceptionHandler[JsonObject](p),
tokenOpt)
}
def httpRequest(
method: String,
path: String,
responseHandler: (HttpClient, HttpClientResponse) => Unit,
exceptionHandler: (HttpClient, Throwable) => Unit,
tokenOpt: Option[String]
): HttpClientRequest = {
val _method = HttpMethod.valueOf(method.toUpperCase)
val options = HttpClientOptions()
.setKeepAlive(false)
val client = vertx.createHttpClient(options)
val token = tokenOpt.getOrElse(wildcardAccessToken)
client
.request(_method, port, host, path)
.putHeader("Authorization", s"Bearer $token")
.handler(responseHandler(client, _: HttpClientResponse))
.exceptionHandler(exceptionHandler(client, _: Throwable))
}
protected def uploadFile(method: String, url: String, file: String, mimeType: String): Future[JsonObject] = {
val filePath = getClass.getResource(file).toURI.getPath
val fileName = file.substring(file.lastIndexOf("/") + 1)
val boundary = "dLV9Wyq26L_-JQxk6ferf-RT153LhOO"
val header =
"--" + boundary + "\r\n" +
"Content-Disposition: form-data; name=\"file\"; filename=\"" + fileName + "\"\r\n" +
"Content-Type: " + mimeType + "\r\n\r\n"
val footer = "\r\n--" + boundary + "--\r\n"
val contentLength =
String.valueOf(vertx.fileSystem.propsBlocking(filePath).size() + header.length + footer.length)
futurify({ p: Promise[JsonObject] =>
def requestHandler(req: HttpClientRequest): Unit = {
req.putHeader("Content-length", contentLength)
req.putHeader("Content-type", s"multipart/form-data; boundary=$boundary")
logger.info(s"Loading file '$filePath' from disc, content-length=$contentLength")
req.write(header)
val asyncFile: Future[AsyncFile] =
vertx.fileSystem().openFuture(filePath, OpenOptions())
asyncFile.map({ file =>
val pump = Pump.pump(file, req)
file.exceptionHandler({ e: Throwable =>
pump.stop()
req.end("")
p.failure(e)
})
file.endHandler({ _ =>
file
.closeFuture()
.onComplete({
case Success(_) =>
logger.info(s"File loaded, ending request, ${pump.numberPumped()} bytes pumped.")
req.end(footer)
case Failure(e) =>
req.end("")
p.failure(e)
})
})
pump.start()
})
}
requestHandler(httpJsonRequest(method, url, p, None))
})
}
protected def createDefaultColumns(tableId: TableId): Future[(ColumnId, ColumnId)] = {
val createStringColumnJson =
Json.obj("columns" -> Json.arr(Json.obj("kind" -> "text", "name" -> "Test Column 1", "identifier" -> true)))
val createNumberColumnJson =
Json.obj("columns" -> Json.arr(Json.obj("kind" -> "numeric", "name" -> "Test Column 2")))
for {
column1 <- sendRequest("POST", s"/tables/$tableId/columns", createStringColumnJson)
columnId1 = column1.getJsonArray("columns").getJsonObject(0).getLong("id").toLong
column2 <- sendRequest("POST", s"/tables/$tableId/columns", createNumberColumnJson)
columnId2 = column2.getJsonArray("columns").getJsonObject(0).getLong("id").toLong
} yield (columnId1, columnId2)
}
protected def createStatusTestColumns(tableId: TableId): Future[Unit] = {
val createShortTextColumnJson =
Json.obj("columns" -> Json.arr(Json.obj("kind" -> "shorttext", "name" -> "Test Column 1")))
val createRichTextColumnJson =
Json.obj("columns" -> Json.arr(Json.obj("kind" -> "richtext", "name" -> "Test Column 2")))
val createNumberColumnJson =
Json.obj("columns" -> Json.arr(Json.obj("kind" -> "numeric", "name" -> "Test Column 3")))
val createBooleanColumnJson =
Json.obj("columns" -> Json.arr(Json.obj("kind" -> "boolean", "name" -> "Test Column 4")))
for {
column1 <- sendRequest("POST", s"/tables/$tableId/columns", createShortTextColumnJson)
column2 <- sendRequest("POST", s"/tables/$tableId/columns", createRichTextColumnJson)
column3 <- sendRequest("POST", s"/tables/$tableId/columns", createNumberColumnJson)
column4 <- sendRequest("POST", s"/tables/$tableId/columns", createBooleanColumnJson)
} yield ()
}
protected def createFullStatusTestTable(): Future[TableId] = {
val postTable = Json.obj("name" -> "status test table")
val shortTextValue = Json.obj("value" -> "short_text_value")
val richTextValue = Json.obj("value" -> "rich_text_value")
val numberValue = Json.obj("value" -> 42)
val booleanValue = Json.obj("value" -> true)
for {
tableId <- sendRequest("POST", "/tables", postTable) map { js =>
{
js.getLong("id")
}
}
_ <- createStatusTestColumns(tableId)
_ <- sendRequest("POST", s"/tables/$tableId/rows")
_ <- sendRequest("POST", s"/tables/$tableId/columns/1/rows/1", shortTextValue)
_ <- sendRequest("POST", s"/tables/$tableId/columns/2/rows/1", richTextValue)
_ <- sendRequest("POST", s"/tables/$tableId/columns/3/rows/1", numberValue)
_ <- sendRequest("POST", s"/tables/$tableId/columns/4/rows/1", booleanValue)
} yield tableId
}
protected def createEmptyDefaultTable(
name: String = "Test Table 1",
tableNum: Int = 1,
displayName: Option[JsonObject] = None,
description: Option[JsonObject] = None
): Future[TableId] = {
val postTable = Json.obj("name" -> name)
displayName
.map({ obj =>
{
Json.obj("displayName" -> obj)
}
})
.foreach(postTable.mergeIn)
description
.map({ obj =>
{
Json.obj("description" -> obj)
}
})
.foreach(postTable.mergeIn)
for {
tableId <- sendRequest("POST", "/tables", postTable) map { js =>
{
js.getLong("id")
}
}
_ <- createDefaultColumns(tableId)
} yield tableId
}
protected def createDefaultTable(
name: String = "Test Table 1",
tableNum: Int = 1,
displayName: Option[JsonObject] = None,
description: Option[JsonObject] = None
): Future[TableId] = {
val fillStringCellJson = Json.obj("value" -> s"table${tableNum}row1")
val fillStringCellJson2 = Json.obj("value" -> s"table${tableNum}row2")
val fillNumberCellJson = Json.obj("value" -> 1)
val fillNumberCellJson2 = Json.obj("value" -> 2)
for {
tableId <- createEmptyDefaultTable(name, tableNum, displayName, description)
_ <- sendRequest("POST", s"/tables/$tableId/rows")
_ <- sendRequest("POST", s"/tables/$tableId/rows")
_ <- sendRequest("POST", s"/tables/$tableId/columns/1/rows/1", fillStringCellJson)
_ <- sendRequest("POST", s"/tables/$tableId/columns/1/rows/2", fillStringCellJson2)
_ <- sendRequest("POST", s"/tables/$tableId/columns/2/rows/1", fillNumberCellJson)
_ <- sendRequest("POST", s"/tables/$tableId/columns/2/rows/2", fillNumberCellJson2)
} yield tableId
}
protected def createFullTableWithMultilanguageColumns(
tableName: String
): Future[(TableId, Seq[ColumnId], Seq[RowId])] = {
def valuesRow(columnIds: Seq[Long]) = {
Json.obj(
"columns" -> Json.arr(
Json.obj("id" -> columnIds.head),
Json.obj("id" -> columnIds(1)),
Json.obj("id" -> columnIds(2)),
Json.obj("id" -> columnIds(3)),
Json.obj("id" -> columnIds(4)),
Json.obj("id" -> columnIds(5)),
Json.obj("id" -> columnIds(6))
),
"rows" -> Json.arr(
Json.obj(
"values" ->
Json.arr(
Json.obj(
"de-DE" -> s"Hallo, $tableName Welt!",
"en-GB" -> s"Hello, $tableName World!"
),
Json.obj("de-DE" -> true),
Json.obj("de-DE" -> 3.1415926),
Json.obj("en-GB" -> s"Hello, $tableName Col 1 Row 1!"),
Json.obj("en-GB" -> s"Hello, $tableName Col 2 Row 1!"),
Json.obj("de-DE" -> "2015-01-01"),
Json.obj("de-DE" -> "2015-01-01T14:37:47.110+01")
)),
Json.obj(
"values" ->
Json.arr(
Json.obj(
"de-DE" -> s"Hallo, $tableName Welt2!",
"en-GB" -> s"Hello, $tableName World2!"
),
Json.obj("de-DE" -> false),
Json.obj("de-DE" -> 2.1415926),
Json.obj("en-GB" -> s"Hello, $tableName Col 1 Row 2!"),
Json.obj("en-GB" -> s"Hello, $tableName Col 2 Row 2!"),
Json.obj("de-DE" -> "2015-01-02"),
Json.obj("de-DE" -> "2015-01-02T14:37:47.110+01")
))
)
)
}
for {
(tableId, columnIds) <- createTableWithMultilanguageColumns(tableName)
rows <- sendRequest("POST", s"/tables/$tableId/rows", valuesRow(columnIds))
_ = logger.info(s"Row is $rows")
rowIds = rows.getJsonArray("rows").asScala.map(_.asInstanceOf[JsonObject].getLong("id").toLong).toSeq
} yield (tableId, columnIds, rowIds)
}
protected def createSimpleTableWithMultilanguageColumn(
tableName: String,
columnName: String
): Future[(TableId, ColumnId)] = {
for {
table <- sendRequest("POST", "/tables", Json.obj("name" -> tableName))
tableId = table.getLong("id").toLong
columns <- sendRequest("POST",
s"/tables/$tableId/columns",
Json.obj(
"columns" -> Json.arr(
Json.obj("kind" -> "text", "name" -> columnName, "languageType" -> "language")
)))
columnId = columns.getJsonArray("columns").getJsonObject(0).getLong("id").toLong
} yield {
(tableId, columnId)
}
}
protected def createTableWithMultilanguageColumns(tableName: String): Future[(TableId, Seq[ColumnId])] = {
val createMultilanguageColumn = Json.obj(
"columns" ->
Json.arr(
Json.obj("kind" -> "text", "name" -> "Test Column 1", "languageType" -> "language", "identifier" -> true),
Json.obj("kind" -> "boolean", "name" -> "Test Column 2", "languageType" -> "language"),
Json.obj("kind" -> "numeric", "name" -> "Test Column 3", "languageType" -> "language"),
Json.obj("kind" -> "richtext", "name" -> "Test Column 4", "languageType" -> "language"),
Json.obj("kind" -> "shorttext", "name" -> "Test Column 5", "languageType" -> "language"),
Json.obj("kind" -> "date", "name" -> "Test Column 6", "languageType" -> "language"),
Json.obj("kind" -> "datetime", "name" -> "Test Column 7", "languageType" -> "language")
)
)
for {
tableId <- sendRequest("POST", "/tables", Json.obj("name" -> tableName)) map (_.getLong("id"))
columns <- sendRequest("POST", s"/tables/$tableId/columns", createMultilanguageColumn)
columnIds = columns.getJsonArray("columns").asScala.map(_.asInstanceOf[JsonObject].getLong("id").toLong).toSeq
} yield {
(tableId.toLong, columnIds)
}
}
protected def createTableWithComplexColumns(
tableName: String,
linkTo: TableId
): Future[(TableId, Seq[ColumnId], ColumnId)] = {
val createColumns = Json.obj(
"columns" -> Json.arr(
Json.obj("kind" -> "text", "name" -> "column 1 (text)", "identifier" -> true),
Json.obj("kind" -> "text", "name" -> "column 2 (text multilanguage)", "languageType" -> "language"),
Json.obj("kind" -> "numeric", "name" -> "column 3 (numeric)"),
Json.obj("kind" -> "numeric", "name" -> "column 4 (numeric multilanguage)", "languageType" -> "language"),
Json.obj("kind" -> "richtext", "name" -> "column 5 (richtext)"),
Json.obj("kind" -> "richtext", "name" -> "column 6 (richtext multilanguage)", "languageType" -> "language"),
Json.obj("kind" -> "date", "name" -> "column 7 (date)"),
Json.obj("kind" -> "date", "name" -> "column 8 (date multilanguage)", "languageType" -> "language"),
Json.obj("kind" -> "attachment", "name" -> "column 9 (attachment)")
)
)
def createLinkColumn(fromColumnId: ColumnId, linkTo: TableId) = {
Json.obj(
"columns" -> Json.arr(
Json.obj(
"kind" -> "link",
"name" -> "column 10 (link)",
"toTable" -> linkTo
)))
}
for {
table <- sendRequest("POST", "/tables", Json.obj("name" -> tableName))
tableId = table.getLong("id").toLong
columns <- sendRequest("POST", s"/tables/$tableId/columns", createColumns)
columnIds = columns.getJsonArray("columns").asScala.map(_.asInstanceOf[JsonObject].getLong("id").toLong).toList
linkColumn <- sendRequest("POST", s"/tables/$tableId/columns", createLinkColumn(columnIds.head, linkTo))
linkColumnId = linkColumn.getJsonArray("columns").getJsonObject(0).getLong("id").toLong
} yield (tableId, columnIds, linkColumnId)
}
protected def createSimpleTableWithCell(
tableName: String,
columnType: ColumnType
): Future[(TableId, ColumnId, RowId)] = {
for {
table <- sendRequest("POST", "/tables", Json.obj("name" -> tableName))
tableId = table.getLong("id").toLong
column <- sendRequest("POST", s"/tables/$tableId/columns", Json.obj("columns" -> Json.arr(columnType.getJson)))
columnId = column.getJsonArray("columns").getJsonObject(0).getLong("id").toLong
rowPost <- sendRequest("POST", s"/tables/$tableId/rows")
rowId = rowPost.getLong("id").toLong
} yield (tableId, columnId, rowId)
}
protected def createSimpleTableWithValues(
tableName: String,
columnTypes: Seq[ColumnType],
rows: Seq[Seq[Any]]
): Future[(TableId, Seq[ColumnId], Seq[RowId])] = {
for {
table <- sendRequest("POST", "/tables", Json.obj("name" -> tableName))
tableId = table.getLong("id").toLong
column <- sendRequest("POST",
s"/tables/$tableId/columns",
Json.obj("columns" -> Json.arr(columnTypes.map(_.getJson): _*)))
columnIds = column.getJsonArray("columns").asScala.toStream.map(_.asInstanceOf[JsonObject].getLong("id").toLong)
columnsPost = Json.arr(columnIds.map(id => Json.obj("id" -> id)): _*)
rowsPost = Json.arr(rows.map(values => Json.obj("values" -> Json.arr(values: _*))): _*)
rowPost <- sendRequest("POST", s"/tables/$tableId/rows", Json.obj("columns" -> columnsPost, "rows" -> rowsPost))
rowIds = rowPost.getJsonArray("rows").asScala.toStream.map(_.asInstanceOf[JsonObject].getLong("id").toLong)
} yield (tableId, columnIds, rowIds)
}
}
| campudus/tableaux | src/test/scala/com/campudus/tableaux/testtools/TableauxTestBase.scala | Scala | apache-2.0 | 24,779 |
package fpgatidbits.streams
import Chisel._
import fpgatidbits.axi._
class StreamResizer(inWidth: Int, outWidth: Int) extends Module {
val io = new Bundle {
val in = new AXIStreamSlaveIF(UInt(width = inWidth))
val out = new AXIStreamMasterIF(UInt(width = outWidth))
}
if(inWidth == outWidth) {
// no need for any resizing, directly connect in/out
io.out.valid := io.in.valid
io.out.bits := io.in.bits
io.in.ready := io.out.ready
} else if(inWidth < outWidth) {
Predef.assert(outWidth % inWidth == 0)
StreamUpsizer(io.in, outWidth) <> io.out
} else if(inWidth > outWidth) {
Predef.assert(inWidth % outWidth == 0)
StreamDownsizer(io.in, outWidth) <> io.out
}
}
| maltanar/fpga-tidbits | src/main/scala/fpgatidbits/streams/StreamResizer.scala | Scala | bsd-2-clause | 713 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Entry
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class ListShouldContainTheSameElementsAsSpec extends Spec {
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
//ADDITIONAL//
object `a List` {
val fumList: List[String] = List("fum", "foe", "fie", "fee")
val toList: List[String] = List("you", "to", "birthday", "happy")
object `when used with contain theSameElementsAs (..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should contain theSameElementsAs Set("fee", "fie", "foe", "fum")
val e1 = intercept[TestFailedException] {
fumList should contain theSameElementsAs Set("happy", "birthday", "to", "you")
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.didNotContainSameElements(decorateToStringValue(fumList), decorateToStringValue(Set("happy", "birthday", "to", "you"))))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM")
intercept[TestFailedException] {
fumList should contain theSameElementsAs Set("fee", "fie", "foe")
}
}
def `should use an explicitly provided Equality` {
(fumList should contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should contain theSameElementsAs Set("fee", "fie", "foe")) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM ")
}
(fumList should contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM ")) (after being lowerCased and trimmed)
}
}
object `when used with (contain theSameElementsAs (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
fumList should (contain theSameElementsAs Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
fumList should (contain theSameElementsAs Set("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.didNotContainSameElements(decorateToStringValue(fumList), decorateToStringValue(Set("happy", "birthday", "to", "you"))))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
fumList should (contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM"))
intercept[TestFailedException] {
fumList should (contain theSameElementsAs Set("fee", "fie", "foe"))
}
}
def `should use an explicitly provided Equality` {
(fumList should (contain theSameElementsAs Set("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should (contain theSameElementsAs Set("fee", "fie", "foe"))) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should (contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM "))
}
(fumList should (contain theSameElementsAs Set(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
}
object `when used with not contain theSameElementsAs (..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList should not contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
toList should not contain theSameElementsAs (Set("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(toList), decorateToStringValue(Set("happy", "birthday", "to", "you"))))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList should not contain theSameElementsAs (Set("happy", "birthday", "to"))
intercept[TestFailedException] {
toList should not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
def `should use an explicitly provided Equality` {
(toList should not contain theSameElementsAs (Set("happy", "birthday", "to"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList should not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList should not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
}
object `when used with (not contain theSameElementsAs (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList should (not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
val e1 = intercept[TestFailedException] {
toList should (not contain theSameElementsAs (Set("happy", "birthday", "to", "you")))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(toList), decorateToStringValue(Set("happy", "birthday", "to", "you"))))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
toList should (not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
}
}
def `should use an explicitly provided Equality` {
(toList should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should (not contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))) (decided by upperCaseStringEquality)
}
toList should (not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))
intercept[TestFailedException] {
(toList should (not contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
}
object `when used with shouldNot contain theSameElementsAs (..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList shouldNot contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
toList shouldNot contain theSameElementsAs (Set("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(toList), decorateToStringValue(Set("happy", "birthday", "to", "you"))))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList shouldNot contain theSameElementsAs (Set("happy", "birthday", "to"))
intercept[TestFailedException] {
toList shouldNot contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
def `should use an explicitly provided Equality` {
(toList shouldNot contain theSameElementsAs (Set("happy", "birthday", "to"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList shouldNot contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList shouldNot contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
}
object `when used with shouldNot (contain theSameElementsAs (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
toList shouldNot (contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
val e1 = intercept[TestFailedException] {
toList shouldNot (contain theSameElementsAs (Set("happy", "birthday", "to", "you")))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (Resources.containedSameElements(decorateToStringValue(toList), decorateToStringValue(Set("happy", "birthday", "to", "you"))))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
toList shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
toList shouldNot (contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))
}
}
def `should use an explicitly provided Equality` {
(toList shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot (contain theSameElementsAs (Set("HAPPY", "BIRTHDAY", "TO", "YOU")))) (decided by upperCaseStringEquality)
}
toList shouldNot (contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))
intercept[TestFailedException] {
(toList shouldNot (contain theSameElementsAs (Set(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
}
}
object `a col of Lists` {
val list1s: Vector[List[Int]] = Vector(List(3, 2, 1), List(3, 2, 1), List(3, 2, 1))
val lists: Vector[List[Int]] = Vector(List(3, 2, 1), List(3, 2, 1), List(4, 3, 2))
val listsNil: Vector[List[Int]] = Vector(List(3, 2, 1), List(3, 2, 1), Nil)
val hiLists: Vector[List[String]] = Vector(List("hi", "he"), List("hi", "he"), List("hi", "he"))
val toLists: Vector[List[String]] = Vector(List("to", "you"), List("to", "you"), List("to", "you"))
object `when used with contain theSameElementsAs (..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should contain theSameElementsAs Set(1, 2, 3)
atLeast (2, lists) should contain theSameElementsAs Set(1, 2, 3)
atMost (2, lists) should contain theSameElementsAs Set(1, 2, 3)
no (lists) should contain theSameElementsAs Set(3, 4, 5)
val e1 = intercept[TestFailedException] {
all (lists) should contain theSameElementsAs Set(1, 2, 3)
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(List(4, 3, 2)) + " did not contain the same elements as " + decorateToStringValue(Set(1, 2, 3)) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(lists)))
val e3 = intercept[TestFailedException] {
all (listsNil) should contain theSameElementsAs Set(1, 2, 3)
}
e3.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 3)
e3.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(Nil) + " did not contain the same elements as " + decorateToStringValue(Set(1, 2, 3)) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(listsNil)))
}
def `should use the implicit Equality in scope` {
all (hiLists) should contain theSameElementsAs Set("he", "hi")
intercept[TestFailedException] {
all (hiLists) should contain theSameElementsAs Set("ho", "hi")
}
implicit val ise = upperCaseStringEquality
all (hiLists) should contain theSameElementsAs Set("HE", "HI")
intercept[TestFailedException] {
all (hiLists) should contain theSameElementsAs Set("HO", "HI")
}
}
def `should use an explicitly provided Equality` {
(all (hiLists) should contain theSameElementsAs Set("HE", "HI")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should contain theSameElementsAs Set("HO", "HI")) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should contain theSameElementsAs Set("he", "hi")) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should contain theSameElementsAs Set("ho", "hi")) (decided by defaultEquality[String])
}
}
}
object `when used with (contain theSameElementsAs (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list1s) should (contain theSameElementsAs Set(1, 2, 3))
atLeast (2, lists) should (contain theSameElementsAs Set(1, 2, 3))
atMost (2, lists) should (contain theSameElementsAs Set(1, 2, 3))
no (lists) should (contain theSameElementsAs Set(3, 4, 5))
no (listsNil) should (contain theSameElementsAs Set(3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain theSameElementsAs Set(1, 2, 3))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(List(4, 3, 2)) + " did not contain the same elements as " + decorateToStringValue(Set(1, 2, 3)) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(lists)))
val e4 = intercept[TestFailedException] {
all (listsNil) should (contain theSameElementsAs Set(1, 2, 3))
}
e4.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e4.failedCodeLineNumber.get should be (thisLineNumber - 3)
e4.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(Nil) + " did not contain the same elements as " + decorateToStringValue(Set(1, 2, 3)) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(listsNil)))
}
def `should use the implicit Equality in scope` {
all (hiLists) should (contain theSameElementsAs Set("he", "hi"))
intercept[TestFailedException] {
all (hiLists) should (contain theSameElementsAs Set("ho", "hi"))
}
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain theSameElementsAs Set("HE", "HI"))
intercept[TestFailedException] {
all (hiLists) should (contain theSameElementsAs Set("HO", "HI"))
}
}
def `should use an explicitly provided Equality` {
(all (hiLists) should (contain theSameElementsAs Set("HE", "HI"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should (contain theSameElementsAs Set("HO", "HI"))) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should (contain theSameElementsAs Set("he", "hi"))) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should (contain theSameElementsAs Set("ho", "hi"))) (decided by defaultEquality[String])
}
}
}
object `when used with not contain theSameElementsAs (..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) should not contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
all (toLists) should not contain theSameElementsAs (Set("you", "to"))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(List("to", "you")) + " contained the same elements as " + decorateToStringValue(Set("you", "to")) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) should not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))
intercept[TestFailedException] {
all (toLists) should not contain theSameElementsAs (Set("YOU", "TO"))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) should not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should not contain theSameElementsAs (Set("YOU", "TO"))) (decided by upperCaseStringEquality)
}
all (toLists) should not contain theSameElementsAs (Set(" YOU ", " TO "))
intercept[TestFailedException] {
(all (toLists) should not contain theSameElementsAs (Set(" YOU ", " TO "))) (after being lowerCased and trimmed)
}
}
}
object `when used with (not contain theSameElementsAs (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) should (not contain theSameElementsAs (Set("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
all (toLists) should (not contain theSameElementsAs (Set("you", "to")))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(List("to", "you")) + " contained the same elements as " + decorateToStringValue(Set("you", "to")) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
all (toLists) should (not contain theSameElementsAs (Set("YOU", "TO")))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) should (not contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should (not contain theSameElementsAs (Set("YOU", "TO")))) (decided by upperCaseStringEquality)
}
all (toLists) should (not contain theSameElementsAs (Set(" YOU ", " TO ")))
intercept[TestFailedException] {
(all (toLists) should (not contain theSameElementsAs (Set(" YOU ", " TO ")))) (after being lowerCased and trimmed)
}
}
}
object `when used with shouldNot contain theSameElementsAs (..)` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) shouldNot contain theSameElementsAs (Set("fee", "fie", "foe", "fum"))
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot contain theSameElementsAs (Set("you", "to"))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(List("to", "you")) + " contained the same elements as " + decorateToStringValue(Set("you", "to")) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))
intercept[TestFailedException] {
all (toLists) shouldNot contain theSameElementsAs (Set("YOU", "TO"))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) shouldNot contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot contain theSameElementsAs (Set("YOU", "TO"))) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot contain theSameElementsAs (Set(" YOU ", " TO "))
intercept[TestFailedException] {
(all (toLists) shouldNot contain theSameElementsAs (Set(" YOU ", " TO "))) (after being lowerCased and trimmed)
}
}
}
object `when used with shouldNot (contain theSameElementsAs (..))` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (toLists) shouldNot (contain theSameElementsAs (Set("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot (contain theSameElementsAs (Set("you", "to")))
}
e1.failedCodeFileName.get should be ("ListShouldContainTheSameElementsAsSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(List("to", "you")) + " contained the same elements as " + decorateToStringValue(Set("you", "to")) + " (ListShouldContainTheSameElementsAsSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(toLists)))
}
def `should use the implicit Equality in scope` {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))
intercept[TestFailedException] {
all (toLists) shouldNot (contain theSameElementsAs (Set("YOU", "TO")))
}
}
def `should use an explicitly provided Equality` {
(all (toLists) shouldNot (contain theSameElementsAs (Set("NICE", "TO", "MEET", "YOU")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot (contain theSameElementsAs (Set("YOU", "TO")))) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot (contain theSameElementsAs (Set(" YOU ", " TO ")))
intercept[TestFailedException] {
(all (toLists) shouldNot (contain theSameElementsAs (Set(" YOU ", " TO ")))) (after being lowerCased and trimmed)
}
}
}
}
}
| rahulkavale/scalatest | scalatest-test/src/test/scala/org/scalatest/ListShouldContainTheSameElementsAsSpec.scala | Scala | apache-2.0 | 26,573 |
/**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
/**
* Author: Omer van Kloeten (https://github.com/omervk)
* Jörn Franke <zuinnote@gmail.com>
**/
package org.zuinnote.spark.bitcoin
import org.zuinnote.hadoop.bitcoin.format.common._
import scala.collection.JavaConverters._
package object model {
private def toInput(input: BitcoinTransactionInput): Input = {
Input(
input.getPrevTransactionHash, input.getPreviousTxOutIndex, input.getTxInScriptLength, input.getTxInScript,
input.getSeqNo
)
}
private def toOutput(output: BitcoinTransactionOutput): Output = {
Output(output.getValue, output.getTxOutScriptLength, output.getTxOutScript)
}
implicit class FromJavaTransaction(val transaction: BitcoinTransaction) extends AnyVal {
private def toScriptWitnessItem(item: BitcoinScriptWitnessItem): ScriptWitnessItem = {
ScriptWitnessItem(item.getStackItemCounter, item.getScriptWitnessList.asScala.map(toScriptWitnessItem))
}
private def toScriptWitnessItem(sw: BitcoinScriptWitness): ScriptWitness = {
ScriptWitness(sw.getWitnessScriptLength, sw.getWitnessScript)
}
def asScala: Transaction = {
Transaction(
transaction.getVersion,
transaction.getMarker,
transaction.getFlag,
transaction.getInCounter,
transaction.getOutCounter,
transaction.getListOfInputs.asScala.map(toInput),
transaction.getListOfOutputs.asScala.map(toOutput),
transaction.getBitcoinScriptWitness.asScala.map(toScriptWitnessItem),
transaction.getLockTime
)
}
def asScalaEnriched: EnrichedTransaction = {
EnrichedTransaction(
transaction.getVersion,
transaction.getMarker,
transaction.getFlag,
transaction.getInCounter,
transaction.getOutCounter,
transaction.getListOfInputs.asScala.map(toInput),
transaction.getListOfOutputs.asScala.map(toOutput),
transaction.getBitcoinScriptWitness.asScala.map(toScriptWitnessItem),
transaction.getLockTime,
BitcoinUtil.getTransactionHash(transaction)
)
}
def asScalaSingle(transactionHash: Array[Byte]): SingleTransaction = {
SingleTransaction(
transactionHash,
transaction.getVersion,
transaction.getMarker,
transaction.getFlag,
transaction.getInCounter,
transaction.getOutCounter,
transaction.getListOfInputs.asScala.map(toInput),
transaction.getListOfOutputs.asScala.map(toOutput),
transaction.getBitcoinScriptWitness.asScala.map(toScriptWitnessItem),
transaction.getLockTime
)
}
}
implicit class FromJavaAuxPOW(val auxPOW: BitcoinAuxPOW) extends AnyVal {
def asScala: AuxPOW = {
val coinbaseTransaction = CoinbaseTransaction(
auxPOW.getCoinbaseTransaction.getVersion,
auxPOW.getCoinbaseTransaction.getInCounter,
auxPOW.getCoinbaseTransaction.getOutCounter,
auxPOW.getCoinbaseTransaction.getListOfInputs.asScala.map(toInput),
auxPOW.getCoinbaseTransaction.getListOfOutputs.asScala.map(toOutput),
auxPOW.getCoinbaseTransaction.getLockTime
)
val coinbaseBranch = CoinbaseBranch(
auxPOW.getCoinbaseBranch.getNumberOfLinks,
auxPOW.getCoinbaseBranch.getLinks.asScala,
auxPOW.getCoinbaseBranch.getBranchSideBitmask
)
val auxBlockChainBranch = AuxBlockChainBranch(
auxPOW.getAuxBlockChainBranch.getNumberOfLinks,
auxPOW.getAuxBlockChainBranch.getLinks.asScala,
auxPOW.getCoinbaseBranch.getBranchSideBitmask
)
val parentBlockHeader = ParentBlockHeader(
auxPOW.getParentBlockHeader.getVersion,
auxPOW.getParentBlockHeader.getPreviousBlockHash,
auxPOW.getParentBlockHeader.getMerkleRoot,
auxPOW.getParentBlockHeader.getTime,
auxPOW.getParentBlockHeader.getBits,
auxPOW.getParentBlockHeader.getNonce
)
AuxPOW(
auxPOW.getVersion,
coinbaseTransaction,
auxPOW.getParentBlockHeaderHash,
coinbaseBranch,
auxBlockChainBranch,
parentBlockHeader
)
}
}
}
| ZuInnoTe/spark-hadoopcryptoledger-ds | src/main/scala/org/zuinnote/spark/bitcoin/model/package.scala | Scala | apache-2.0 | 4,766 |
package offGridOrcs
import scala.scalajs.js
import org.scalajs.dom
import org.scalajs.dom.html
final case class SimpleCanvas(element: html.Canvas, context: dom.CanvasRenderingContext2D, imageData: dom.ImageData) {
def drawPixels(colorBuffer: js.typedarray.Float64Array): Unit = {
element.style.cursor = "none"
element.style.backgroundImage = "none"
val output = imageData.data
for (i <- 0 until colorBuffer.length / 3) {
val j = i * 3
val k = i * 4
val r = (colorBuffer(j + 0) * 255).toInt
val g = (colorBuffer(j + 1) * 255).toInt
val b = (colorBuffer(j + 2) * 255).toInt
output.update(k + 0, r)
output.update(k + 1, g)
output.update(k + 2, b)
output.update(k + 3, 255)
}
context.putImageData(imageData, 0, 0)
}
def drawTitle(): Unit = {
element.style.cursor = "pointer"
context.clearRect(0, 0, Dimensions.LowRez, Dimensions.LowRez)
element.style.backgroundImage = s"url(${BitmapLibrary.TitleScreen})"
}
}
object SimpleCanvas {
def createLowRez(): SimpleCanvas = {
val canvasSize = Dimensions.LowRez.toInt
val elementSize = s"${64 * 6}px"
val document = dom.document
val element = document.createElement("canvas").asInstanceOf[html.Canvas]
element.width = canvasSize
element.height = canvasSize
element.style.width = elementSize
element.style.height = elementSize
element.style.backgroundSize = s"$elementSize $elementSize"
// https://builtvisible.com/image-scaling-in-css/
val pixelatedVariants = Seq(
("-ms-interpolation-mode", "nearest-neighbor"), // IE 7+ (non-standard property)
("image-rendering", "-webkit-optimize-contrast"), // Safari 6, UC Browser 9.9
("image-rendering", "-webkit-crisp-edges"), // Safari 7+
("image-rendering", "-moz-crisp-edges"), // Firefox 3.6+
("image-rendering", "-o-crisp-edges"), // Opera 12
("image-rendering", "pixelated") // Chrome 41+ and Opera 26+
)
pixelatedVariants.foreach(
Function.tupled(element.style.setProperty(_, _)))
document.body.appendChild(element)
val context = element.getContext("2d").asInstanceOf[dom.CanvasRenderingContext2D]
val imageData = context.getImageData(0, 0, Dimensions.LowRez, Dimensions.LowRez)
SimpleCanvas(element, context, imageData)
}
}
| dcecile/off-grid-orcs | src/SimpleCanvas.scala | Scala | mit | 2,327 |
package com.bigchange.log
import org.apache.log4j.{Logger, PropertyConfigurator}
/**
* Created by C.J.YOU on 2016/1/15.
* 打log日志的类需要继承此trait
* spark 有实现对应的log -> org.apache.spark.internal.Logging
*/
trait CLogger extends Serializable {
// PropertyConfigurator.configure("/home/telecom/conf/log4j.properties")
val loggerName = this.getClass.getName
lazy val logger = Logger.getLogger(loggerName)
def logConfigure(path: String) = PropertyConfigurator.configure(path)
def debug(msg: String) = logger.debug(msg)
def info(msg: String) = logger.info(msg)
def warn(msg: String) = logger.warn(msg + "<<<<==============")
def error(msg: String) = logger.error(msg)
def exception(e: Exception) = logger.error(e.getStackTrace)
/**
* 自定义输出的日志格式
* @param msg 消息
*/
def warnLog(msg: String) {
val info = logFileInfo
logger.warn("{}[{}]:{}", info._1, info._2, msg)
}
def errorLog(msg: String) {
val info = logFileInfo
logger.error("{}[{}]:{}", info._1, info._2, msg)
}
/**
* 获取日志所在的文件信息
* @return (文件名, 位置)
*/
def logFileInfo: (String, String) = (Thread.currentThread.getStackTrace()(2).getFileName, Thread.currentThread.getStackTrace()(2).getLineNumber.toString)
}
| bigchange/AI | src/main/scala/com/bigchange/log/CLogger.scala | Scala | apache-2.0 | 1,343 |
package io.vamp.common.akka
import akka.actor.{Actor, ActorLogging}
import io.vamp.common.notification.NotificationProvider
trait CommonSupportForActors
extends Actor
with ActorLogging
with ActorSupportForActors
with FutureSupportNotification
with ActorExecutionContextProvider
with NotificationProvider
| BanzaiMan/vamp-common | src/main/scala/io/vamp/common/akka/CommonSupportForActors.scala | Scala | apache-2.0 | 319 |
package model.dao
import javax.inject.Inject
import model.Product
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import slick.driver.JdbcProfile
import slick.lifted.{ProvenShape, TableQuery}
//import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ExecutionContext, Future}
/**
* Created by lukasz on 05.08.16.
*/
class ProductDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider, implicit val executionContext: ExecutionContext)
extends HasDatabaseConfigProvider[JdbcProfile] {
lazy val Products = TableQuery[ProductTable]
import driver.api._
class ProductTable(tag: Tag) extends Table[Product](tag, "products") {
def ean = column[Long]("ean", O.PrimaryKey, O.AutoInc)
def name = column[String]("name")
def desc = column[String]("descr")
def active = column[Boolean]("active_prod")
def * : ProvenShape[Product] = (ean, name, desc, active) <> (Product.tupled, Product.unapply)
}
def findByEan(ean: Long): Future[Seq[Product]] = db.run(Products.filter(_.ean === ean).result)
def findByName(name: String): Future[Seq[Product]] = db.run(Products.filter(_.name === name).result)
def findAll(): Future[Seq[Product]] = db.run(Products.sortBy(_.name.asc).result)
def insert(product: Product): Future[Unit] = db.run(Products += product) map (_ => ())
def delete(ean: Long): Future[Unit] =
db.run(Products.filter(_.ean === ean).delete).
map(_ => ())
}
| lszku/ProductDatabase | app/model/dao/ProductDAO.scala | Scala | bsd-3-clause | 1,491 |
package services
import drt.shared.CrunchApi.MillisSinceEpoch
import org.slf4j.Logger
case class TimeLogger(actionName: String, threshold: MillisSinceEpoch, logger: Logger) {
def time[R](action: => R): R = {
val startTime = System.currentTimeMillis()
val result = action
val timeTaken = System.currentTimeMillis() - startTime
val message = s"$actionName took ${timeTaken}ms"
if (timeTaken > threshold)
logger.warn(message)
else
logger.debug(message)
result
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/main/scala/services/TimeLogger.scala | Scala | apache-2.0 | 510 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import scala.util.Random
import org.mockito.Mockito.mock
import org.scalatest._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.network.BlockDataManager
class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
with ShouldMatchers {
private var service0: NettyBlockTransferService = _
private var service1: NettyBlockTransferService = _
override def afterEach() {
try {
if (service0 != null) {
service0.close()
service0 = null
}
if (service1 != null) {
service1.close()
service1 = null
}
} finally {
super.afterEach()
}
}
test("can bind to a random port") {
service0 = createService(port = 0)
service0.port should not be 0
}
test("can bind to two random ports") {
service0 = createService(port = 0)
service1 = createService(port = 0)
service0.port should not be service1.port
}
test("can bind to a specific port") {
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
service0 = createService(port)
verifyServicePort(expectedPort = port, actualPort = service0.port)
}
test("can bind to a specific port twice and the second increments") {
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
service0 = createService(port)
verifyServicePort(expectedPort = port, actualPort = service0.port)
service1 = createService(service0.port)
// `service0.port` is occupied, so `service1.port` should not be `service0.port`
verifyServicePort(expectedPort = service0.port + 1, actualPort = service1.port)
}
private def verifyServicePort(expectedPort: Int, actualPort: Int): Unit = {
actualPort should be >= expectedPort
// avoid testing equality in case of simultaneous tests
actualPort should be <= (expectedPort + 10)
}
private def createService(port: Int): NettyBlockTransferService = {
val conf = new SparkConf()
.set("spark.app.id", s"test-${getClass.getName}")
.set("spark.blockManager.port", port.toString)
val securityManager = new SecurityManager(conf)
val blockDataManager = mock(classOf[BlockDataManager])
val service = new NettyBlockTransferService(conf, securityManager, "localhost", numCores = 1)
service.init(blockDataManager)
service
}
}
| gioenn/xSpark | core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala | Scala | apache-2.0 | 3,245 |
package akka.rtcweb.protocol.sdp.renderer
import java.net.InetSocketAddress
import akka.parboiled2.util.Base64
import akka.rtcweb.protocol.sdp.MediaTransportProtocol._
import akka.rtcweb.protocol.sdp._
import akka.util.ByteString
trait SessionDescriptionRenderer {
import Renderer._
import Rendering._
implicit val optionRenderer = Renderer.optionRenderer[String, String]("-")
implicit val protocolVersionRenderer = new Renderer[ProtocolVersion] {
override def render[R <: Rendering](r: R, value: ProtocolVersion): r.type =
r ~ s"v=${value.value}" ~ CRLF
}
implicit val mediaRenderer = new Renderer[Media] {
override def render[R <: Rendering](r: R, value: Media): r.type = r ~ (value match {
case Media.application => "application"
case Media.audio => "audio"
case Media.video => "video"
case Media.text => "text"
case Media.message => "message"
case CustomMedia(name) => name
})
}
implicit val portRangeRenderer = new Renderer[PortRange] {
override def render[R <: Rendering](r: R, value: PortRange): r.type = r ~ (value match {
case PortRange(port, Some(range)) => s"$port/$range"
case PortRange(port, None) => s"$port"
})
}
implicit val mtpRenderer = Renderer.stringRenderer[MediaTransportProtocol] {
case MediaTransportProtocol.udp => "udp"
case `RTP/AVP` => "RTP/AVP"
case `RTP/SAVP` => "RTP/SAVP"
case `RTP/SAVPF` => "RTP/SAVPF"
case `UDP/TLS/RTP/SAVP` => "UDP/TLS/RTP/SAVP"
case `UDP/TLS/RTP/SAVPF` => "UDP/TLS/RTP/SAVPF"
case SCTP => "SCTP"
case `SCTP/DTLS` => "SCTP/DTLS"
case `DTLS/SCTP` => "DTLS/SCTP"
case `UDP/DTLS/SCTP` => "UDP/DTLS/SCTP"
case `TCP/DTLS/SCTP` => "TCP/DTLS/SCTP"
}
implicit val nettypeRenderer = Renderer.stringRenderer[NetworkType] {
case NetworkType.IN => "IN"
}
implicit val addressTypeRenderer = Renderer.stringRenderer[AddressType] {
case AddressType.IP4 => "IP4"
case AddressType.IP6 => "IP6"
}
implicit val inetSocketAddressRenderer = Renderer.stringRenderer[InetSocketAddress](_.getHostName)
implicit val encryptionKeyRenderer = Renderer.stringRenderer[EncryptionKey] {
case ClearEncryptionKey(key) => s"k=clear:$key"
case Base64EncryptionKey(bytes) => "k=base64:" + Base64.rfc2045().encodeToString(bytes, false)
case UriEncryptionKey(uri) => s"k=uri:$uri"
case PromptEncryptionKey => "k=prompt"
}
implicit val connectionDataRenderer: Renderer[ConnectionData] = new Renderer[ConnectionData] {
override def render[R <: Rendering](r: R, value: ConnectionData): r.type = value match {
case ConnectionData(networkType, addrType, connectionAddress) =>
r ~ "c=" ~ networkType ~ SP ~ addrType ~ SP ~ connectionAddress ~ CRLF
}
}
implicit val bandwidthTypeRenderer = stringRenderer[BandwidthType] {
case BandwidthType.AS => "AS"
case BandwidthType.CT => "CT"
case BandwidthType.RS => "RS"
case BandwidthType.RR => "RR"
case BandwidthType.Experimental(name) => name
}
implicit val bandwidthInformationRenderer = new Renderer[BandwidthInformation] {
override def render[R <: Rendering](r: R, value: BandwidthInformation): r.type = value match {
case BandwidthInformation(bwtype, bw) =>
r ~ "b=" ~ bwtype ~ ':' ~ bw ~ CRLF
}
}
implicit val originRenderer = originRendererMaker
implicit val repeatTimesRenderer: Renderer[RepeatTimes] = new Renderer[RepeatTimes] {
override def render[R <: Rendering](r: R, value: RepeatTimes): r.type =
r ~ "r=" ~ /* FIXME */ CRLF
}
implicit val timeZoneAdjustmentRenderer: Renderer[TimeZoneAdjustment] = new Renderer[TimeZoneAdjustment] {
override def render[R <: Rendering](r: R, value: TimeZoneAdjustment): r.type =
r ~ "t=" ~ /* FIXME */ CRLF
}
implicit val timingRenderer = new Renderer[Timing] {
override def render[R <: Rendering](r: R, value: Timing): r.type = value match {
case (Timing(startTime, stopTime, repeatings, zoneAdjustments)) =>
r ~ "t=" ~ startTime.getOrElse(0L) ~ SP ~ stopTime.getOrElse(0L) ~ CRLF
if (repeatings.isDefined) r ~ repeatings.get
zoneAdjustments.foreach(r ~ _)
r
}
}
implicit val attributeRenderer = new Renderer[Attribute] {
override def render[R <: Rendering](r: R, renderee: Attribute): r.type = renderee match {
case PropertyAttribute(key) => r ~ s"a=$key" ~ CRLF
case ValueAttribute(key, value) => r ~ s"a=$key:$value" ~ CRLF
case ea: ExtensionAttribute => r ~ "a="; renderAttributeExtensions(r, ea) ~ CRLF
}
}
implicit val mediaDescriptionRenderer = makeMediaDescriptionRenderer
implicit val sessionDescriptionRenderer = new Renderer[SessionDescription] {
override def render[R <: Rendering](r: R, s: SessionDescription): r.type = {
r ~ s.protocolVersion ~ s.origin
r ~ "s=" ~ s.sessionName ~ CRLF
s.sessionInformation.foreach(r ~ "i=" ~ _ ~ CRLF)
s.descriptionUri.foreach(r ~ "u=" ~ _ ~ CRLF)
s.emailAddresses.foreach(r ~ "e=" ~ _ ~ CRLF)
s.phoneNumbers.foreach(r ~ "p=" ~ _ ~ CRLF)
s.connectionInformation.foreach(r ~ _)
s.bandwidthInformation.foreach(r ~ _)
s.timings.foreach(r ~ _)
s.encryptionKey.foreach(r ~ _ ~ CRLF)
s.sessionAttributes.foreach(r ~ _)
s.mediaDescriptions.foreach(r ~ _)
r
}
}
def renderAttributeExtensions[R <: Rendering](r: R, renderee: ExtensionAttribute): r.type
private def originRendererMaker(implicit nettypeRenderer: Renderer[NetworkType]): Renderer[Origin] = {
new Renderer[Origin] {
override def render[R <: Rendering](r: R, o: Origin): r.type = {
r ~ "o=" ~ o.username ~ SP
r ~ o.`sess-id` ~ SP ~ o.`sess-version` ~ SP ~ o.nettype ~ SP ~ o.addrtype ~ SP ~ o.`unicast-address` ~ CRLF
}
}
}
private def makeMediaDescriptionRenderer(implicit mediaRenderer: Renderer[Media],
portRangeRenderer: Renderer[PortRange],
mtpRenderer: Renderer[MediaTransportProtocol],
connectionDataRenderer: Renderer[ConnectionData],
encryptionKeyRenderer: Renderer[EncryptionKey]): Renderer[MediaDescription] = new Renderer[MediaDescription] {
override def render[R <: Rendering](r: R, value: MediaDescription): r.type = value match {
case MediaDescription(media, mediaTitle, portRange, protocol, mediaAttributes, fmts, connectionInformation, bandwidthInformation, encryptionKey) =>
r ~ "m=" ~ media ~ SP ~ portRange ~ SP ~ protocol
fmts.foreach(r ~ SP ~ _)
r ~ CRLF
mediaTitle.foreach(r ~ "i=" ~ _ ~ CRLF)
connectionInformation.foreach(r ~ _)
bandwidthInformation.foreach(r ~ _)
mediaAttributes.foreach(r ~ _)
r
}
}
}
| danielwegener/akka-rtcweb | src/main/scala/akka/rtcweb/protocol/sdp/renderer/SessionDescriptionRenderer.scala | Scala | apache-2.0 | 6,752 |
package io.getquill.context.sql
import io.getquill.Spec
import io.getquill.WrappedValue
case class Id(value: Long) extends AnyVal with WrappedValue[Long]
trait ProductSpec extends Spec {
val context: SqlContext[_, _]
import context._
case class Product(id: Long, description: String, sku: Long)
val product = quote {
query[Product]
}
val productInsert = quote {
(p: Product) => query[Product].insert(p).returning(_.id)
}
val productInsertBatch = quote {
(b: Query[Product]) => b.foreach(p => productInsert.apply(p))
}
def productById = quote {
(id: Long) => product.filter(_.id == id)
}
val productEntries = List(
Product(0L, "Notebook", 1001L),
Product(0L, "Soap", 1002L),
Product(0L, "Pencil", 1003L)
)
val productSingleInsert = quote {
product.insert(_.id -> 0, _.description -> "Window", _.sku -> 1004L).returning(_.id)
}
}
| jcranky/quill | quill-sql/src/test/scala/io/getquill/context/sql/ProductSpec.scala | Scala | apache-2.0 | 902 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.