code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.connector.evaluate
import slamdata.Predef.StringContext
import quasar.api.resource.ResourcePath
import monocle.macros.Lenses
import cats.{Apply, Eq, Eval, NonEmptyTraverse, Order, Show}
import cats.implicits._
import shims.{equalToCats, orderToCats, showToCats}
@Lenses
final case class Source[A](path: ResourcePath, src: A) {
def map[B](f: A => B): Source[B] =
Source(path, f(src))
}
object Source extends SourceInstances
sealed abstract class SourceInstances extends SourceInstances0 {
implicit def sourceOrder[A: Order]: Order[Source[A]] =
Order.by {
case Source(p, a) => (p, a)
}
implicit def sourceShow[A: Show]: Show[Source[A]] =
Show show {
case Source(p, a) => s"Source(${p.show}, ${a.show})"
}
implicit val sourceNonEmptyTraverse: NonEmptyTraverse[Source] =
new NonEmptyTraverse[Source] {
override def map[A, B](sa: Source[A])(f: A => B) =
sa map f
def nonEmptyTraverse[F[_]: Apply, A, B](fa: Source[A])(f: A => F[B]) =
f(fa.src) map (Source(fa.path, _))
def foldLeft[A, B](fa: Source[A], b: B)(f: (B, A) => B): B =
f(b, fa.src)
def foldRight[A, B](fa: Source[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] =
f(fa.src, lb)
def reduceLeftTo[A, B](fa: Source[A])(f: A => B)(g: (B, A) => B): B =
f(fa.src)
def reduceRightTo[A, B](fa: Source[A])(f: A => B)(g: (A, Eval[B]) => Eval[B]): Eval[B] =
Eval.now(f(fa.src))
}
}
sealed abstract class SourceInstances0 {
implicit def sourceEq[A: Eq]: Eq[Source[A]] =
Eq by {
case Source(p, a) => (p, a)
}
}
| djspiewak/quasar | connector/src/main/scala/quasar/connector/evaluate/Source.scala | Scala | apache-2.0 | 2,234 |
package misc.utils
import scala.annotation.tailrec
trait NumericUtils {
def isEvenInt(x: Int): Boolean = {
x % 2 == 0
}
def isEvenT[T](x: T)(implicit int: Integral[T]): Boolean = {
import int._
x % fromInt(2) equiv zero
}
def isEven[@specialized T](x: T)(implicit int: Integral[T]): Boolean = {
import int._
x % fromInt(2) equiv zero
}
def isOddT[T: Integral](x: T) = !isEvenT(x)
def isOdd[@specialized T: Integral](x: T) = !isEven(x)
def digitsNaive(x: Int): List[Int] =
x.toString.map(_.toString.toInt)(collection.breakOut)
def digitsInt(x: Int): List[Int] = {
@tailrec def loop(x: Int, ds: List[Int]): List[Int] =
if (x == 0) if (ds.isEmpty) x :: ds else ds
else loop(x / 10, (x % 10).abs :: ds)
loop(x, Nil)
}
def digitsT[T](x: T)(implicit int: Integral[T]): List[Int] = {
import int._
val ten = fromInt(10) // doesn't impact at all
@tailrec def loop(x: T, ds: List[Int]): List[Int] =
if (x equiv zero) if (ds.isEmpty) x.toInt :: ds else ds
else loop(x / ten, (x % ten).abs.toInt :: ds)
loop(x, Nil)
}
def digits[@specialized T](x: T)(implicit int: Integral[T]): List[Int] = {
import int._
val ten = fromInt(10) // gives two times speed increase
@tailrec def loop(x: T, ds: List[Int]): List[Int] =
if (x equiv zero) if (ds.isEmpty) x.toInt :: ds else ds
else loop(x / ten, (x % ten).abs.toInt :: ds)
loop(x, Nil)
}
def poly(coeffs: Iterable[Int], x: Int = 10): Double =
coeffs.zipWithIndex
.map { case (digit, index) ⇒ digit * Math.pow(x, index) }
.sum
def isPalindrome[A](xs: Seq[A]): Boolean = {
val size = xs.size
val (left, rt) = xs.splitAt(size / 2)
val right = if (isOdd(size)) rt.tail else rt
left zip right.reverse forall { case (l, r) ⇒ l == r }
}
}
object NumericUtils extends NumericUtils
| 4e6/sandbox | scala/core/src/main/scala/misc/utils/NumericUtils.scala | Scala | mit | 1,894 |
package predict4s
package sgp
package ref
import org.scalatest.FunSuite
import org.scalactic.TolerantNumerics
import org.scalactic.Equality
import predict4s.coord.SGP72Constants
import predict4s.sgp._
import predict4s.coord.SGPElems
import predict4s.coord.SGPElemsConversions
/* Near earth examples from Vallado's */
//trait NearTLEs extends TLE00005 with TLE06251 with TLE28057 {
// def tles = List(tle00005, tle06251, tle28057)
// def lines = List(lines00005,lines06251,lines28057)
//}
trait ValladoNearTLEsTest extends TLE00005 with TLE06251 with TLE28057 with ValladoNearTLEsCheck[Double] with ValladoNearTLEsPVCheck[Double] { self : FunSuite =>
implicit val wgs = SGP72Constants.tleDoubleConstants
// Propagators for all TLEs
def propags : List[SGP4Vallado[Double]]
def sgpImpl : String
val tles = List(tle00005,tle06251,tle28057)
def results = (propags,tles, tlesTimes).zipped.toList map { p => // tuple of propagator, tle and the propagation times
val sgp4 = p._1 ; val tle = p._2; val times = p._3
val res = for (t <- times) yield (sgp4.propagate(t), t)
(sgp4, res, tle)
}
}
class HardcodedValladoCheck extends FunSuite with TLE00005 with TLE06251 with TLE28057 with ValladoNearTLEsCheck[Double] with ValladoNearTLEsPVCheck[Double] {
val wgs = SGP72Constants.tleDoubleConstants
val toMinus9 : Equality[Double]= TolerantNumerics.tolerantDoubleEquality(1E-9)
import spire.std.any.DoubleAlgebra
val tles = List(tle00005,tle06251,tle28057)
def propags : List[SGP4Vallado[Double]] = tles map {tle =>
import spire.std.any.DoubleAlgebra
SGP4Vallado.build[Double](tle, wgs).get
}
def sgpImpl : String = "Vallado SGP4"
val sgps = propags
def sgp00005 = sgps(0)
def sgp06251 = sgps(1)
def sgp28057 = sgps(2)
val results00005 = for (t <- times00005) yield Sgp4ValladoResult(sgps(0), sgp00005.propagate(t).get, tle00005, t)
val results06251 = for (t <- times06251) yield Sgp4ValladoResult(sgps(1), sgp06251.propagate(t).get, tle06251, t)
val results28057 = for (t <- times28057) yield Sgp4ValladoResult(sgps(2), sgp28057.propagate(t).get, tle28057, t)
test(s"${sgpImpl}: compare Intermediate Propagation Results with Vallado's cpp implementation for near TLEs") {
// call the checks for the corresponding result
check00005 zip results00005 foreach { p => p._1(p._2)(toMinus9) }
check06251 zip results06251 foreach { p => p._1(p._2)(toMinus9) }
check28057 zip results28057 foreach { p => p._1(p._2)(toMinus9) }
}
test(s"${sgpImpl}: compare Position/Velocity Propagation Results with Vallado's cpp implementation for near TLEs") {
// call the checks for the corresponding result
pvCheck00005 zip results00005 foreach { p => p._1(p._2)(toMinus9) }
pvCheck06251 zip results06251 foreach { p => p._1(p._2)(toMinus9) }
pvCheck28057 zip results28057 foreach { p => p._1(p._2)(toMinus9) }
}
}
| pleira/SGP4Extensions | tests/src/test/scala/predict4s/sgp/ref/ValladoTest.scala | Scala | apache-2.0 | 2,945 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue}
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.duration._
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually._
import org.apache.spark._
import org.apache.spark.deploy.DeployMessages.{DecommissionWorkers, MasterStateResponse, RequestMasterState}
import org.apache.spark.deploy.master.{ApplicationInfo, Master, WorkerInfo}
import org.apache.spark.deploy.worker.Worker
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.network.TransportContext
import org.apache.spark.network.netty.SparkTransportConf
import org.apache.spark.network.shuffle.ExternalBlockHandler
import org.apache.spark.rpc.{RpcAddress, RpcEnv}
import org.apache.spark.scheduler._
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.Utils
class DecommissionWorkerSuite
extends SparkFunSuite
with Logging
with LocalSparkContext
with BeforeAndAfterEach {
private var masterAndWorkerConf: SparkConf = null
private var masterAndWorkerSecurityManager: SecurityManager = null
private var masterRpcEnv: RpcEnv = null
private var master: Master = null
private var workerIdToRpcEnvs: mutable.HashMap[String, RpcEnv] = null
private var workers: mutable.ArrayBuffer[Worker] = null
override def beforeEach(): Unit = {
super.beforeEach()
masterAndWorkerConf = new SparkConf()
.set(config.DECOMMISSION_ENABLED, true)
masterAndWorkerSecurityManager = new SecurityManager(masterAndWorkerConf)
masterRpcEnv = RpcEnv.create(
Master.SYSTEM_NAME,
"localhost",
0,
masterAndWorkerConf,
masterAndWorkerSecurityManager)
master = makeMaster()
workerIdToRpcEnvs = mutable.HashMap.empty
workers = mutable.ArrayBuffer.empty
}
override def afterEach(): Unit = {
try {
masterRpcEnv.shutdown()
workerIdToRpcEnvs.values.foreach(_.shutdown())
workerIdToRpcEnvs.clear()
master.stop()
workers.foreach(_.stop())
workers.clear()
masterRpcEnv = null
} finally {
super.afterEach()
}
}
// Unlike TestUtils.withListener, it also waits for the job to be done
def withListener(sc: SparkContext, listener: RootStageAwareListener)
(body: SparkListener => Unit): Unit = {
sc.addSparkListener(listener)
try {
body(listener)
sc.listenerBus.waitUntilEmpty()
listener.waitForJobDone()
} finally {
sc.listenerBus.removeListener(listener)
}
}
test("decommission workers should not result in job failure") {
val maxTaskFailures = 2
val numTimesToKillWorkers = maxTaskFailures + 1
val numWorkers = numTimesToKillWorkers + 1
createWorkers(numWorkers)
// Here we will have a single task job and we will keep decommissioning (and killing) the
// worker running that task K times. Where K is more than the maxTaskFailures. Since the worker
// is notified of the decommissioning, the task failures can be ignored and not fail
// the job.
sc = createSparkContext(config.TASK_MAX_FAILURES.key -> maxTaskFailures.toString)
val executorIdToWorkerInfo = getExecutorToWorkerAssignments
val taskIdsKilled = new ConcurrentHashMap[Long, Boolean]
val listener = new RootStageAwareListener {
override def handleRootTaskStart(taskStart: SparkListenerTaskStart): Unit = {
val taskInfo = taskStart.taskInfo
if (taskIdsKilled.size() < numTimesToKillWorkers) {
val workerInfo = executorIdToWorkerInfo(taskInfo.executorId)
decommissionWorkerOnMaster(workerInfo, "partition 0 must die")
killWorkerAfterTimeout(workerInfo, 1)
taskIdsKilled.put(taskInfo.taskId, true)
}
}
}
withListener(sc, listener) { _ =>
val jobResult = sc.parallelize(1 to 1, 1).map { _ =>
Thread.sleep(5 * 1000L); 1
}.count()
assert(jobResult === 1)
}
// single task job that gets to run numTimesToKillWorkers + 1 times.
assert(listener.getTasksFinished().size === numTimesToKillWorkers + 1)
listener.rootTasksStarted.asScala.foreach { taskInfo =>
assert(taskInfo.index == 0, s"Unknown task index ${taskInfo.index}")
}
listener.rootTasksEnded.asScala.foreach { taskInfo =>
assert(taskInfo.index === 0, s"Expected task index ${taskInfo.index} to be 0")
// If a task has been killed then it shouldn't be successful
val taskSuccessExpected = !taskIdsKilled.getOrDefault(taskInfo.taskId, false)
val taskSuccessActual = taskInfo.successful
assert(taskSuccessActual === taskSuccessExpected,
s"Expected task success $taskSuccessActual == $taskSuccessExpected")
}
}
test("decommission workers ensure that shuffle output is regenerated even with shuffle service") {
createWorkers(2)
val ss = new ExternalShuffleServiceHolder()
sc = createSparkContext(
config.Tests.TEST_NO_STAGE_RETRY.key -> "true",
config.SHUFFLE_MANAGER.key -> "sort",
config.SHUFFLE_SERVICE_ENABLED.key -> "true",
config.SHUFFLE_SERVICE_PORT.key -> ss.getPort.toString
)
TestUtils.waitUntilExecutorsUp(sc, 2, 60000)
// Here we will create a 2 stage job: The first stage will have two tasks and the second stage
// will have one task. The two tasks in the first stage will be long and short. We decommission
// and kill the worker after the short task is done. Eventually the driver should get the
// executor lost signal for the short task executor. This should trigger regenerating
// the shuffle output since we cleanly decommissioned the executor, despite running with an
// external shuffle service.
try {
val executorIdToWorkerInfo = getExecutorToWorkerAssignments
val workerForTask0Decommissioned = new AtomicBoolean(false)
// single task job
val listener = new RootStageAwareListener {
override def handleRootTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = {
val taskInfo = taskEnd.taskInfo
if (taskInfo.index == 0) {
if (workerForTask0Decommissioned.compareAndSet(false, true)) {
val workerInfo = executorIdToWorkerInfo(taskInfo.executorId)
decommissionWorkerOnMaster(workerInfo, "Kill early done map worker")
killWorkerAfterTimeout(workerInfo, 0)
logInfo(s"Killed the node ${workerInfo.hostPort} that was running the early task")
}
}
}
}
withListener(sc, listener) { _ =>
val jobResult = sc.parallelize(1 to 2, 2).mapPartitionsWithIndex((pid, _) => {
val sleepTimeSeconds = if (pid == 0) 1 else 10
Thread.sleep(sleepTimeSeconds * 1000L)
List(1).iterator
}, preservesPartitioning = true).repartition(1).sum()
assert(jobResult === 2)
}
val tasksSeen = listener.getTasksFinished()
// 4 tasks: 2 from first stage, one retry due to decom, one more from the second stage.
assert(tasksSeen.size === 4, s"Expected 4 tasks but got $tasksSeen")
listener.rootTasksStarted.asScala.foreach { taskInfo =>
assert(taskInfo.index <= 1, s"Expected ${taskInfo.index} <= 1")
assert(taskInfo.successful, s"Task ${taskInfo.index} should be successful")
}
val tasksEnded = listener.rootTasksEnded.asScala
tasksEnded.filter(_.index != 0).foreach { taskInfo =>
assert(taskInfo.attemptNumber === 0, "2nd task should succeed on 1st attempt")
}
val firstTaskAttempts = tasksEnded.filter(_.index == 0)
assert(firstTaskAttempts.size > 1, s"Task 0 should have multiple attempts")
} finally {
ss.close()
}
}
def testFetchFailures(initialSleepMillis: Int): Unit = {
createWorkers(2)
sc = createSparkContext(
config.Tests.TEST_NO_STAGE_RETRY.key -> "false",
"spark.test.executor.decommission.initial.sleep.millis" -> initialSleepMillis.toString,
config.UNREGISTER_OUTPUT_ON_HOST_ON_FETCH_FAILURE.key -> "true")
TestUtils.waitUntilExecutorsUp(sc, 2, 60000)
val executorIdToWorkerInfo = getExecutorToWorkerAssignments
val executorToDecom = executorIdToWorkerInfo.keysIterator.next
// The task code below cannot call executorIdToWorkerInfo, so we need to pre-compute
// the worker to decom to force it to be serialized into the task.
val workerToDecom = executorIdToWorkerInfo(executorToDecom)
// The setup of this job is similar to the one above: 2 stage job with first stage having
// long and short tasks. Except that we want the shuffle output to be regenerated on a
// fetch failure instead of an executor lost. Since it is hard to "trigger a fetch failure",
// we manually raise the FetchFailed exception when the 2nd stage's task runs and require that
// fetch failure to trigger a recomputation.
logInfo(s"Will try to decommission the task running on executor $executorToDecom")
val listener = new RootStageAwareListener {
override def handleRootTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = {
val taskInfo = taskEnd.taskInfo
if (taskInfo.executorId == executorToDecom && taskInfo.attemptNumber == 0 &&
taskEnd.stageAttemptId == 0 && taskEnd.stageId == 0) {
decommissionWorkerOnMaster(workerToDecom,
"decommission worker after task on it is done")
}
}
}
withListener(sc, listener) { _ =>
val jobResult = sc.parallelize(1 to 2, 2).mapPartitionsWithIndex((_, _) => {
val executorId = SparkEnv.get.executorId
val context = TaskContext.get()
// Only sleep in the first attempt to create the required window for decommissioning.
// Subsequent attempts don't need to be delayed to speed up the test.
if (context.attemptNumber() == 0 && context.stageAttemptNumber() == 0) {
val sleepTimeSeconds = if (executorId == executorToDecom) 10 else 1
Thread.sleep(sleepTimeSeconds * 1000L)
}
List(1).iterator
}, preservesPartitioning = true)
.repartition(1).mapPartitions(iter => {
val context = TaskContext.get()
if (context.attemptNumber == 0 && context.stageAttemptNumber() == 0) {
// Wait a bit for the decommissioning to be triggered in the listener
Thread.sleep(5000)
// MapIndex is explicitly -1 to force the entire host to be decommissioned
// However, this will cause both the tasks in the preceding stage since the host here is
// "localhost" (shortcoming of this single-machine unit test in that all the workers
// are actually on the same host)
throw new FetchFailedException(BlockManagerId(executorToDecom,
workerToDecom.host, workerToDecom.port), 0, 0, -1, 0, "Forcing fetch failure")
}
val sumVal: List[Int] = List(iter.sum)
sumVal.iterator
}, preservesPartitioning = true)
.sum()
assert(jobResult === 2)
}
// 6 tasks: 2 from first stage, 2 rerun again from first stage, 2nd stage attempt 1 and 2.
val tasksSeen = listener.getTasksFinished()
assert(tasksSeen.size === 6, s"Expected 6 tasks but got $tasksSeen")
}
test("decommission stalled workers ensure that fetch failures lead to rerun") {
testFetchFailures(3600 * 1000)
}
test("decommission eager workers ensure that fetch failures lead to rerun") {
testFetchFailures(0)
}
private abstract class RootStageAwareListener extends SparkListener {
private var rootStageId: Option[Int] = None
private val tasksFinished = new ConcurrentLinkedQueue[String]()
private val jobDone = new AtomicBoolean(false)
val rootTasksStarted = new ConcurrentLinkedQueue[TaskInfo]()
val rootTasksEnded = new ConcurrentLinkedQueue[TaskInfo]()
protected def isRootStageId(stageId: Int): Boolean =
(rootStageId.isDefined && rootStageId.get == stageId)
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted): Unit = {
if (stageSubmitted.stageInfo.parentIds.isEmpty && rootStageId.isEmpty) {
rootStageId = Some(stageSubmitted.stageInfo.stageId)
}
}
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = {
jobEnd.jobResult match {
case JobSucceeded => jobDone.set(true)
case JobFailed(exception) => logError(s"Job failed", exception)
}
}
protected def handleRootTaskEnd(end: SparkListenerTaskEnd) = {}
protected def handleRootTaskStart(start: SparkListenerTaskStart) = {}
private def getSignature(taskInfo: TaskInfo, stageId: Int, stageAttemptId: Int):
String = {
s"${stageId}:${stageAttemptId}:" +
s"${taskInfo.index}:${taskInfo.attemptNumber}-${taskInfo.status}"
}
override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = {
val signature = getSignature(taskStart.taskInfo, taskStart.stageId, taskStart.stageAttemptId)
logInfo(s"Task started: $signature")
if (isRootStageId(taskStart.stageId)) {
rootTasksStarted.add(taskStart.taskInfo)
handleRootTaskStart(taskStart)
}
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = {
val taskSignature = getSignature(taskEnd.taskInfo, taskEnd.stageId, taskEnd.stageAttemptId)
logInfo(s"Task End $taskSignature")
tasksFinished.add(taskSignature)
if (isRootStageId(taskEnd.stageId)) {
rootTasksEnded.add(taskEnd.taskInfo)
handleRootTaskEnd(taskEnd)
}
}
def getTasksFinished(): Seq[String] = {
tasksFinished.asScala.toList
}
def waitForJobDone(): Unit = {
eventually(timeout(10.seconds), interval(100.milliseconds)) {
assert(jobDone.get(), "Job isn't successfully done yet")
}
}
}
private def getExecutorToWorkerAssignments: Map[String, WorkerInfo] = {
val executorIdToWorkerInfo = mutable.HashMap[String, WorkerInfo]()
master.workers.foreach { wi =>
assert(wi.executors.size <= 1, "There should be at most one executor per worker")
// Cast the executorId to string since the TaskInfo.executorId is a string
wi.executors.values.foreach { e =>
val executorIdString = e.id.toString
val oldWorkerInfo = executorIdToWorkerInfo.put(executorIdString, wi)
assert(oldWorkerInfo.isEmpty,
s"Executor $executorIdString already present on another worker ${oldWorkerInfo}")
}
}
executorIdToWorkerInfo.toMap
}
private def makeMaster(): Master = {
val master = new Master(
masterRpcEnv,
masterRpcEnv.address,
0,
masterAndWorkerSecurityManager,
masterAndWorkerConf)
masterRpcEnv.setupEndpoint(Master.ENDPOINT_NAME, master)
master
}
private def createWorkers(numWorkers: Int, cores: Int = 1, memory: Int = 1024): Unit = {
val workerRpcEnvs = (0 until numWorkers).map { i =>
RpcEnv.create(
Worker.SYSTEM_NAME + i,
"localhost",
0,
masterAndWorkerConf,
masterAndWorkerSecurityManager)
}
workers.clear()
val rpcAddressToRpcEnv: mutable.HashMap[RpcAddress, RpcEnv] = mutable.HashMap.empty
workerRpcEnvs.foreach { rpcEnv =>
val workDir = Utils.createTempDir(namePrefix = this.getClass.getSimpleName()).toString
val worker = new Worker(rpcEnv, 0, cores, memory, Array(masterRpcEnv.address),
Worker.ENDPOINT_NAME, workDir, masterAndWorkerConf, masterAndWorkerSecurityManager)
rpcEnv.setupEndpoint(Worker.ENDPOINT_NAME, worker)
workers.append(worker)
val oldRpcEnv = rpcAddressToRpcEnv.put(rpcEnv.address, rpcEnv)
logInfo(s"Created a worker at ${rpcEnv.address} with workdir $workDir")
assert(oldRpcEnv.isEmpty, s"Detected duplicate rpcEnv ${oldRpcEnv} for ${rpcEnv.address}")
}
workerIdToRpcEnvs.clear()
// Wait until all workers register with master successfully
eventually(timeout(1.minute), interval(1.seconds)) {
val workersOnMaster = getMasterState.workers
val numWorkersCurrently = workersOnMaster.length
logInfo(s"Waiting for $numWorkers workers to come up: So far $numWorkersCurrently")
assert(numWorkersCurrently === numWorkers)
workersOnMaster.foreach { workerInfo =>
val rpcAddress = RpcAddress(workerInfo.host, workerInfo.port)
val rpcEnv = rpcAddressToRpcEnv(rpcAddress)
assert(rpcEnv != null, s"Cannot find the worker for $rpcAddress")
val oldRpcEnv = workerIdToRpcEnvs.put(workerInfo.id, rpcEnv)
assert(oldRpcEnv.isEmpty, s"Detected duplicate rpcEnv ${oldRpcEnv} for worker " +
s"${workerInfo.id}")
}
}
logInfo(s"Created ${workers.size} workers")
}
private def getMasterState: MasterStateResponse = {
master.self.askSync[MasterStateResponse](RequestMasterState)
}
private def getApplications(): Seq[ApplicationInfo] = {
getMasterState.activeApps
}
def decommissionWorkerOnMaster(workerInfo: WorkerInfo, reason: String): Unit = {
logInfo(s"Trying to decommission worker ${workerInfo.id} for reason `$reason`")
master.self.send(DecommissionWorkers(Seq(workerInfo.id)))
}
def killWorkerAfterTimeout(workerInfo: WorkerInfo, secondsToWait: Int): Unit = {
val env = workerIdToRpcEnvs(workerInfo.id)
Thread.sleep(secondsToWait * 1000L)
env.shutdown()
env.awaitTermination()
}
def createSparkContext(extraConfs: (String, String)*): SparkContext = {
val conf = new SparkConf()
.setMaster(masterRpcEnv.address.toSparkURL)
.setAppName("test")
.setAll(extraConfs)
sc = new SparkContext(conf)
val appId = sc.applicationId
eventually(timeout(1.minute), interval(1.seconds)) {
val apps = getApplications()
assert(apps.size === 1)
assert(apps.head.id === appId)
assert(apps.head.getExecutorLimit === Int.MaxValue)
}
sc
}
private class ExternalShuffleServiceHolder() {
// The external shuffle service can start with default configs and not get polluted by the
// other configs used in this test.
private val transportConf = SparkTransportConf.fromSparkConf(new SparkConf(),
"shuffle", numUsableCores = 2)
private val rpcHandler = new ExternalBlockHandler(transportConf, null)
private val transportContext = new TransportContext(transportConf, rpcHandler)
private val server = transportContext.createServer()
def getPort: Int = server.getPort
def close(): Unit = {
Utils.tryLogNonFatalError {
server.close()
}
Utils.tryLogNonFatalError {
rpcHandler.close()
}
Utils.tryLogNonFatalError {
transportContext.close()
}
}
}
}
| maropu/spark | core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala | Scala | apache-2.0 | 19,714 |
package org.http4s.build
import sbt._, Keys._
import com.typesafe.tools.mima.plugin.MimaPlugin, MimaPlugin.autoImport._
import sbtrelease._
import sbtrelease.ReleasePlugin.autoImport._
import scala.util.Properties.envOrNone
import verizon.build.RigPlugin, RigPlugin._
object Http4sPlugin extends AutoPlugin {
object autoImport {
val http4sMimaVersion = settingKey[Option[String]]("Version to target for MiMa compatibility")
}
import autoImport._
override def trigger = allRequirements
override def requires = RigPlugin && MimaPlugin
override lazy val projectSettings: Seq[Setting[_]] = Seq(
// Override rig's default of the Travis build number being the bugfix number
releaseVersion := { ver =>
Version(ver).map(_.withoutQualifier.string).getOrElse(versionFormatError)
},
scalaVersion := (sys.env.get("TRAVIS_SCALA_VERSION") orElse sys.env.get("SCALA_VERSION") getOrElse "2.12.2-bin-typelevel-4"),
scalaOrganization := {
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, n)) if n >= 11 => "org.typelevel"
case _ => "org.scala-lang"
}
},
scalacOptions in Compile ++= Seq(
"-Yno-adapted-args", // Curiously missing from RigPlugin
"-Ypartial-unification" // Needed on 2.11 for Either, good idea in general
) ++ {
// https://issues.scala-lang.org/browse/SI-8340
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, n)) if n >= 11 => Seq("-Ywarn-numeric-widen")
case _ => Seq.empty
}
},
http4sMimaVersion := {
version.value match {
case VersionNumber(Seq(major, minor, patch), _, _) if patch.toInt > 0 =>
Some(s"${major}.${minor}.0")
case _ =>
None
}
},
mimaFailOnProblem := http4sMimaVersion.value.isDefined,
mimaPreviousArtifacts := (http4sMimaVersion.value map {
organization.value % s"${moduleName.value}_${scalaBinaryVersion.value}" % _
}).toSet
)
def extractApiVersion(version: String) = {
val VersionExtractor = """(\\d+)\\.(\\d+)\\..*""".r
version match {
case VersionExtractor(major, minor) => (major.toInt, minor.toInt)
}
}
/**
* @return the version we want to document, for example in tuts,
* given the version being built.
*
* For snapshots after a stable release, return the previous stable
* release. For snapshots of 0.16.0 and 0.17.0, return the latest
* milestone. Otherwise, just return the current version. Favors
* scalaz-7.2 "a" versions for 0.15.x and 0.16.x.
*/
def docExampleVersion(currentVersion: String) = {
val MilestoneVersionExtractor = """(0).(16|17).(0)a?-SNAPSHOT""".r
val latestMilestone = "M1"
val VersionExtractor = """(\\d+)\\.(\\d+)\\.(\\d+).*""".r
currentVersion match {
case MilestoneVersionExtractor(major, minor, patch) if minor.toInt == 16 =>
s"${major.toInt}.${minor.toInt}.${patch.toInt}a-$latestMilestone" // scalaz-7.2 for 0.16.x
case MilestoneVersionExtractor(major, minor, patch) =>
s"${major.toInt}.${minor.toInt}.${patch.toInt}-$latestMilestone"
case VersionExtractor(major, minor, patch) if minor.toInt == 15 =>
s"${major.toInt}.${minor.toInt}.${patch.toInt - 1}a" // scalaz-7.2 for 0.15.x
case VersionExtractor(major, minor, patch) if patch.toInt > 0 =>
s"${major.toInt}.${minor.toInt}.${patch.toInt - 1}"
case _ =>
currentVersion
}
}
val macroParadiseSetting =
libraryDependencies += compilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.patch)
lazy val alpnBoot = "org.mortbay.jetty.alpn" % "alpn-boot" % "8.1.11.v20170118"
lazy val argonaut = "io.argonaut" %% "argonaut" % "6.2"
lazy val asyncHttpClient = "org.asynchttpclient" % "async-http-client" % "2.0.33"
lazy val blaze = "org.http4s" %% "blaze-http" % "0.12.6"
lazy val catsKernelLaws = "org.typelevel" %% "cats-kernel-laws" % catsLaws.revision
lazy val catsLaws = "org.typelevel" %% "cats-laws" % "0.9.0"
lazy val circeGeneric = "io.circe" %% "circe-generic" % circeJawn.revision
lazy val circeJawn = "io.circe" %% "circe-jawn" % "0.8.0"
lazy val circeLiteral = "io.circe" %% "circe-literal" % circeJawn.revision
lazy val circeParser = "io.circe" %% "circe-parser" % circeJawn.revision
lazy val cryptobits = "org.reactormonk" %% "cryptobits" % "1.1"
lazy val discipline = "org.typelevel" %% "discipline" % "0.7.3"
lazy val fs2Cats = "co.fs2" %% "fs2-cats" % "0.3.0"
lazy val fs2Io = "co.fs2" %% "fs2-io" % "0.9.5"
lazy val fs2ReactiveStreams = "com.github.zainab-ali" %% "fs2-reactive-streams" % "0.1.0"
lazy val gatlingTest = "io.gatling" % "gatling-test-framework" % "2.2.5"
lazy val gatlingHighCharts = "io.gatling.highcharts" % "gatling-charts-highcharts" % gatlingTest.revision
lazy val http4sWebsocket = "org.http4s" %% "http4s-websocket" % "0.2.0"
lazy val javaxServletApi = "javax.servlet" % "javax.servlet-api" % "3.1.0"
lazy val jawnJson4s = "org.spire-math" %% "jawn-json4s" % "0.10.4"
lazy val jawnFs2 = "org.http4s" %% "jawn-fs2" % "0.10.1"
lazy val jettyServer = "org.eclipse.jetty" % "jetty-server" % "9.4.6.v20170531"
lazy val jettyServlet = "org.eclipse.jetty" % "jetty-servlet" % jettyServer.revision
lazy val json4sCore = "org.json4s" %% "json4s-core" % "3.5.3"
lazy val json4sJackson = "org.json4s" %% "json4s-jackson" % json4sCore.revision
lazy val json4sNative = "org.json4s" %% "json4s-native" % json4sCore.revision
lazy val jspApi = "javax.servlet.jsp" % "javax.servlet.jsp-api" % "2.3.1" // YourKit hack
lazy val log4s = "org.log4s" %% "log4s" % "1.3.6"
lazy val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.2.3"
lazy val macroCompat = "org.typelevel" %% "macro-compat" % "1.1.1"
lazy val metricsCore = "io.dropwizard.metrics" % "metrics-core" % "3.2.3"
lazy val metricsJson = "io.dropwizard.metrics" % "metrics-json" % metricsCore.revision
lazy val quasiquotes = "org.scalamacros" %% "quasiquotes" % "2.1.0"
lazy val scalacheck = "org.scalacheck" %% "scalacheck" % "1.13.5"
def scalaCompiler(so: String, sv: String) = so % "scala-compiler" % sv
def scalaReflect(so: String, sv: String) = so % "scala-reflect" % sv
lazy val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.0.6"
lazy val scodecBits = "org.scodec" %% "scodec-bits" % "1.1.4"
lazy val specs2Core = "org.specs2" %% "specs2-core" % "3.8.6"
lazy val specs2MatcherExtra = "org.specs2" %% "specs2-matcher-extra" % specs2Core.revision
lazy val specs2Scalacheck = "org.specs2" %% "specs2-scalacheck" % specs2Core.revision
lazy val tomcatCatalina = "org.apache.tomcat" % "tomcat-catalina" % "8.5.19"
lazy val tomcatCoyote = "org.apache.tomcat" % "tomcat-coyote" % tomcatCatalina.revision
lazy val twirlApi = "com.typesafe.play" %% "twirl-api" % "1.3.3"
}
| ZizhengTai/http4s | project/Http4sPlugin.scala | Scala | apache-2.0 | 8,732 |
package com.varunvats.practice.linkedlist
import com.varunvats.practice.sorting.UnitSpec
class PalindromeCheckerReverseSpec extends UnitSpec {
"Palindrome checker (reverse)" should {
"return false when passed a null" in {
PalindromeCheckerReverse.isPalindrome(null) shouldBe false
}
"return true when passed a linked-list with one element" in {
// 5
val list = new Node(5)
PalindromeCheckerReverse.isPalindrome(list) shouldBe true
}
"return true when passed a linked list containing two elements that have the same value" in {
// 5 -> 5
val list = new Node(5, new Node(5))
PalindromeCheckerReverse.isPalindrome(list) shouldBe true
}
"return false when passed a linked list containing two different elements" in {
// 5 -> 6
val list = new Node(5, new Node(6))
PalindromeCheckerReverse.isPalindrome(list) shouldBe false
}
"return true when passed in a palindrome list containing three elements" in {
// 5 -> 6 -> 5
val list = new Node(5, new Node(6, new Node(5)))
PalindromeCheckerReverse.isPalindrome(list) shouldBe true
}
"return false when passed a non-palindrome list containing three elements" in {
// 5 -> 6 -> 7
val list = new Node(5, new Node(6, new Node(7)))
PalindromeCheckerReverse.isPalindrome(list) shouldBe false
}
"return true when passed a palindrome list containing four elements" in {
// 5 -> 6 -> 6 -> 5
val list = new Node(5, new Node(6, new Node(6, new Node(5))))
PalindromeCheckerReverse.isPalindrome(list) shouldBe true
}
"return false when passed a non-palindrome list containing four elements" in {
// 5 -> 6 -> 5 -> 6
val list = new Node(5, new Node(6, new Node(5, new Node(6))))
PalindromeCheckerReverse.isPalindrome(list) shouldBe false
}
}
}
| varunvats/practice | jvm/src/test/scala/com/varunvats/practice/linkedlist/PalindromeCheckerReverseSpec.scala | Scala | mit | 1,880 |
package com.yoshitaka.hirai
object Main extends App {
println("Welcom, hryshtk casino!")
val playerType = "program"
val numSlotMachine = 10
val banditServer = new BanditServer(numSlotMachine)
var totalReward = 0
val numPlay = 10
if (playerType == "human") {
for (i <- 0 until numPlay) {
print("Input slot machine id:")
val slotMachineId = io.StdIn.readLine()
val reward = banditServer.play(slotMachineId.toInt)
println("reward:" +reward.toString)
totalReward += reward
}
println("total reward:" + totalReward)
}
else if (playerType == "program") {
val banditClient = new BanditClient(numSlotMachine)
for (i <- 0 until numPlay) {
print("Input slot machine id:")
val slotMachineId = banditClient.choice()
println(slotMachineId)
val reward = banditServer.play(slotMachineId)
println("reward:" +reward.toString)
totalReward += reward
banditClient.teach(reward)
}
println("total reward:" + totalReward)
}
}
| hryshtk/bandit | bandit-scala/src/main/scala/com/yoshitaka/hirai/Main.scala | Scala | mit | 1,024 |
/**
* Copyright (c) 2013 Saddle Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.saddle.index
import org.saddle._
/**
* A Splitter operates on an input index whose elements have arity N, and yields the following
* pair of output indexes: the left has elements whose arity is N-1, where each element has the
* first N-1 constituents of the original tuple; and the right is an index whose elements were
* those in the Nth position of the original tuple.
*
* For example,
*
* {{{
* Index[(Char, Int)](('a', 1), ('a', 2), ('b', 1), ('b', 2)).split
* }}}
*
* yields
*
* {{{
* (Index[Char]('a', 'a', 'b', 'b'), Index[Int](1, 2, 1, 2))
* }}}
*
* @tparam I Input index whose elements have arity > 1
* @tparam OL Left output index whose elements have arity >= 1
* @tparam OR Right output index whose elements have arity 1
*/
trait Splitter[I, OL, OR] {
def apply(i: Index[I]): (Index[OL], Index[OR])
}
/**
* Companion object houses implicit instances of Splitter
*/
object Splitter {
implicit def split2nd[T1: ST: ORD, T2: ST: ORD] =
new Splitter[(T1, T2), T1, T2] {
def apply(i: Index[(T1, T2)]) = (i.map(_._1), i.map(_._2))
}
implicit def split3rd[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD] =
new Splitter[(T1, T2, T3), (T1, T2), T3] {
def apply(i: Index[(T1, T2, T3)]) = (i.map(t => (t._1, t._2)), i.map(_._3))
}
implicit def split4th[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD, T4: ST: ORD] =
new Splitter[(T1, T2, T3, T4), (T1, T2, T3), T4] {
def apply(i: Index[(T1, T2, T3, T4)]) = (i.map(t => (t._1, t._2, t._3)), i.map(_._4))
}
implicit def split5th[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD, T4: ST: ORD, T5: ST: ORD] =
new Splitter[(T1, T2, T3, T4, T5), (T1, T2, T3, T4), T5] {
def apply(i: Index[(T1, T2, T3, T4, T5)]) = (i.map(t => (t._1, t._2, t._3, t._4)), i.map(_._5))
}
implicit def split6th[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD, T4: ST: ORD, T5: ST: ORD, T6: ST: ORD] =
new Splitter[(T1, T2, T3, T4, T5, T6), (T1, T2, T3, T4, T5), T6] {
def apply(i: Index[(T1, T2, T3, T4, T5, T6)]) = (i.map(t => (t._1, t._2, t._3, t._4, t._5)), i.map(_._6))
}
implicit def split7th[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD, T4: ST: ORD, T5: ST: ORD, T6: ST: ORD, T7: ST: ORD] =
new Splitter[(T1, T2, T3, T4, T5, T6, T7), (T1, T2, T3, T4, T5, T6), T7] {
def apply(i: Index[(T1, T2, T3, T4, T5, T6, T7)]) = (i.map(t => (t._1, t._2, t._3, t._4, t._5, t._6)), i.map(_._7))
}
implicit def split8th[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD, T4: ST: ORD, T5: ST: ORD, T6: ST: ORD, T7: ST: ORD, T8: ST: ORD] =
new Splitter[(T1, T2, T3, T4, T5, T6, T7, T8), (T1, T2, T3, T4, T5, T6, T7), T8] {
def apply(i: Index[(T1, T2, T3, T4, T5, T6, T7, T8)]) = (i.map(t => (t._1, t._2, t._3, t._4, t._5, t._6, t._7)), i.map(_._8))
}
implicit def split9th[T1: ST: ORD, T2: ST: ORD, T3: ST: ORD, T4: ST: ORD, T5: ST: ORD, T6: ST: ORD, T7: ST: ORD, T8: ST: ORD, T9: ST: ORD] =
new Splitter[(T1, T2, T3, T4, T5, T6, T7, T8, T9), (T1, T2, T3, T4, T5, T6, T7, T8), T9] {
def apply(i: Index[(T1, T2, T3, T4, T5, T6, T7, T8, T9)]) = (i.map(t => (t._1, t._2, t._3, t._4, t._5, t._6, t._7, t._8)), i.map(_._9))
}
} | saddle/saddle | saddle-core/src/main/scala/org/saddle/index/Splitter.scala | Scala | apache-2.0 | 3,771 |
package edu.cmu.cs.oak.nodes
import edu.cmu.cs.oak.value.SymbolValue
case class SymbolNode(sv: SymbolValue) extends DNode {
def getChildren(): Seq[DNode] = null
override def toXml = {
<Symbolic Text={sv.e.toString()} />
}
override def ifdefy(): List[String] = List(sv.toString())
override def isEmpty() = (sv == null)
} | smba/oak | edu.cmu.cs.oak/src/main/scala/edu/cmu/cs/oak/nodes/SymbolNode.scala | Scala | lgpl-3.0 | 341 |
package edu.umd.mith.banana.io
package object sesame {
implicit def RDFJsonWriter = new RDFJsonWriter {}
}
| umd-mith/banana-utils | io-sesame/src/main/scala/io/sesame/package.scala | Scala | apache-2.0 | 111 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package docs.home.actor
import com.lightbend.lagom.docs.ServiceSupport
import scala.concurrent.duration._
import akka.actor.ActorSystem
import akka.testkit.ImplicitSender
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
import akka.cluster.Cluster
import java.util.concurrent.TimeUnit
object ActorServiceSpec {
def config = ConfigFactory.parseString("""
akka.actor.provider = cluster
akka.remote.artery.canonical.port = 0
akka.remote.artery.canonical.hostname = 127.0.0.1
""")
}
class ActorServiceSpec
extends TestKit(ActorSystem("ActorServiceSpec", ActorServiceSpec.config))
with ServiceSupport
with BeforeAndAfterAll
with TypeCheckedTripleEquals
with ImplicitSender {
val workerRoleConfig = ConfigFactory.parseString("akka.cluster.roles = [worker-node]")
val node2 = ActorSystem("ActorServiceSpec", workerRoleConfig.withFallback(system.settings.config))
val node3 = ActorSystem("ActorServiceSpec", workerRoleConfig.withFallback(system.settings.config))
override def beforeAll {
Cluster(system).join(Cluster(system).selfAddress)
Cluster(node2).join(Cluster(system).selfAddress)
Cluster(node3).join(Cluster(system).selfAddress)
node2.actorOf(Worker.props(), "worker");
node3.actorOf(Worker.props(), "worker");
within(15.seconds) {
awaitAssert {
Cluster(system).state.members.size should ===(3)
}
}
}
override def afterAll {
shutdown()
shutdown(node2)
shutdown(node3)
}
"Integration with actors" must {
"work with for example clustered consistent hashing" in withServiceInstance[WorkerService](
new WorkerServiceImpl(system)
).apply { app => client =>
{
val job = Job.of("123", "compute", "abc")
// might take a while until cluster is formed and router knows about the nodes
within(15.seconds) {
awaitAssert {
client.doWork().invoke(job).toCompletableFuture.get(3, TimeUnit.SECONDS) should ===(JobAccepted.of("123"))
}
}
}
}
}
}
| rcavalcanti/lagom | docs/src/test/scala/docs/home/actor/ActorServiceSpec.scala | Scala | apache-2.0 | 2,316 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.storage.RDDInfo
/**
* :: DeveloperApi ::
* Stores information about a stage to pass from the scheduler to SparkListeners.
*/
@DeveloperApi
class StageInfo(val stageId: Int, val name: String, val numTasks: Int, val rddInfos: Seq[RDDInfo]) {
/** When this stage was submitted from the DAGScheduler to a TaskScheduler. */
var submissionTime: Option[Long] = None
/** Time when all tasks in the stage completed or when the stage was cancelled. */
var completionTime: Option[Long] = None
/** If the stage failed, the reason why. */
var failureReason: Option[String] = None
var emittedTaskSizeWarning = false
def stageFailed(reason: String) {
failureReason = Some(reason)
completionTime = Some(System.currentTimeMillis)
}
}
private[spark] object StageInfo {
/**
* Construct a StageInfo from a Stage.
*
* Each Stage is associated with one or many RDDs, with the boundary of a Stage marked by
* shuffle dependencies. Therefore, all ancestor RDDs related to this Stage's RDD through a
* sequence of narrow dependencies should also be associated with this Stage.
*/
def fromStage(stage: Stage): StageInfo = {
val ancestorRddInfos = stage.rdd.getNarrowAncestors.map(RDDInfo.fromRdd)
val rddInfos = Seq(RDDInfo.fromRdd(stage.rdd)) ++ ancestorRddInfos
new StageInfo(stage.id, stage.name, stage.numTasks, rddInfos)
}
}
| yelshater/hadoop-2.3.0 | spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/scheduler/StageInfo.scala | Scala | apache-2.0 | 2,283 |
import org.scalacheck._
object A extends Properties("A") {
property("Ran second") = Prop.secure(Counter.i == B.value)
} | pdalpra/sbt | sbt/src/sbt-test/tests/order/src/test/scala/A.scala | Scala | bsd-3-clause | 121 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.repl
import java.io._
import java.net.URLClassLoader
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.tools.nsc.interpreter.SparkILoop
import com.google.common.io.Files
import org.scalatest.FunSuite
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.spark.SparkContext
import org.apache.spark.util.Utils
class ReplSuite extends FunSuite {
def runInterpreter(master: String, input: String): String = {
val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath"
val in = new BufferedReader(new StringReader(input + "\\n"))
val out = new StringWriter()
val cl = getClass.getClassLoader
var paths = new ArrayBuffer[String]
if (cl.isInstanceOf[URLClassLoader]) {
val urlLoader = cl.asInstanceOf[URLClassLoader]
for (url <- urlLoader.getURLs) {
if (url.getProtocol == "file") {
paths += url.getFile
}
}
}
val classpath = paths.mkString(File.pathSeparator)
val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH)
System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
System.setProperty("spark.master", master)
val interp = {
new SparkILoop(in, new PrintWriter(out))
}
org.apache.spark.repl.Main.interp = interp
Main.s.processArguments(List("-classpath", classpath), true)
Main.main(Array()) // call main
org.apache.spark.repl.Main.interp = null
if (oldExecutorClasspath != null) {
System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath)
} else {
System.clearProperty(CONF_EXECUTOR_CLASSPATH)
}
return out.toString
}
def assertContains(message: String, output: String) {
val isContain = output.contains(message)
assert(isContain,
"Interpreter output did not contain '" + message + "':\\n" + output)
}
def assertDoesNotContain(message: String, output: String) {
val isContain = output.contains(message)
assert(!isContain,
"Interpreter output contained '" + message + "':\\n" + output)
}
test("propagation of local properties") {
// A mock ILoop that doesn't install the SIGINT handler.
class ILoop(out: PrintWriter) extends SparkILoop(None, out) {
settings = new scala.tools.nsc.Settings
settings.usejavacp.value = true
org.apache.spark.repl.Main.interp = this
override def createInterpreter() {
intp = new SparkILoopInterpreter
intp.setContextClassLoader()
}
}
val out = new StringWriter()
Main.interp = new ILoop(new PrintWriter(out))
Main.sparkContext = new SparkContext("local", "repl-test")
Main.interp.createInterpreter()
Main.sparkContext.setLocalProperty("someKey", "someValue")
// Make sure the value we set in the caller to interpret is propagated in the thread that
// interprets the command.
Main.interp.interpret("org.apache.spark.repl.Main.sparkContext.getLocalProperty(\\"someKey\\")")
assert(out.toString.contains("someValue"))
Main.sparkContext.stop()
System.clearProperty("spark.driver.port")
}
test("simple foreach with accumulator") {
val output = runInterpreter("local",
"""
|val accum = sc.accumulator(0)
|sc.parallelize(1 to 10).foreach(x => accum += x)
|accum.value
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res1: Int = 55", output)
}
test("external vars") {
val output = runInterpreter("local",
"""
|var v = 7
|sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
|v = 10
|sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Int = 70", output)
assertContains("res1: Int = 100", output)
}
test("external classes") {
val output = runInterpreter("local",
"""
|class C {
|def foo = 5
|}
|sc.parallelize(1 to 10).map(x => (new C).foo).collect.reduceLeft(_+_)
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Int = 50", output)
}
test("external functions") {
val output = runInterpreter("local",
"""
|def double(x: Int) = x + x
|sc.parallelize(1 to 10).map(x => double(x)).collect.reduceLeft(_+_)
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Int = 110", output)
}
test("external functions that access vars") {
val output = runInterpreter("local",
"""
|var v = 7
|def getV() = v
|sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
|v = 10
|sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Int = 70", output)
assertContains("res1: Int = 100", output)
}
test("broadcast vars") {
// Test that the value that a broadcast var had when it was created is used,
// even if that variable is then modified in the driver program
// TODO: This doesn't actually work for arrays when we run in local mode!
val output = runInterpreter("local",
"""
|var array = new Array[Int](5)
|val broadcastArray = sc.broadcast(array)
|sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
|array(0) = 5
|sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Array[Int] = Array(0, 0, 0, 0, 0)", output)
assertContains("res2: Array[Int] = Array(5, 0, 0, 0, 0)", output)
}
test("interacting with files") {
val tempDir = Files.createTempDir()
tempDir.deleteOnExit()
val out = new FileWriter(tempDir + "/input")
out.write("Hello world!\\n")
out.write("What's up?\\n")
out.write("Goodbye\\n")
out.close()
val output = runInterpreter("local",
"""
|var file = sc.textFile("%s").cache()
|file.count()
|file.count()
|file.count()
""".stripMargin.format(StringEscapeUtils.escapeJava(
tempDir.getAbsolutePath + File.separator + "input")))
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Long = 3", output)
assertContains("res1: Long = 3", output)
assertContains("res2: Long = 3", output)
Utils.deleteRecursively(tempDir)
}
test("local-cluster mode") {
val output = runInterpreter("local-cluster[1,1,512]",
"""
|var v = 7
|def getV() = v
|sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
|v = 10
|sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
|var array = new Array[Int](5)
|val broadcastArray = sc.broadcast(array)
|sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
|array(0) = 5
|sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Int = 70", output)
assertContains("res1: Int = 100", output)
assertContains("res2: Array[Int] = Array(0, 0, 0, 0, 0)", output)
assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
}
test("SPARK-1199 two instances of same class don't type check.") {
val output = runInterpreter("local-cluster[1,1,512]",
"""
|case class Sum(exp: String, exp2: String)
|val a = Sum("A", "B")
|def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" }
|b(a)
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
}
test("SPARK-2452 compound statements.") {
val output = runInterpreter("local",
"""
|val x = 4 ; def f() = x
|f()
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
}
test("SPARK-2576 importing SQLContext.createSchemaRDD.") {
// We need to use local-cluster to test this case.
val output = runInterpreter("local-cluster[1,1,512]",
"""
|val sqlContext = new org.apache.spark.sql.SQLContext(sc)
|import sqlContext.createSchemaRDD
|case class TestCaseClass(value: Int)
|sc.parallelize(1 to 10).map(x => TestCaseClass(x)).toSchemaRDD.collect
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
}
test("SPARK-2632 importing a method from non serializable class and not using it.") {
val output = runInterpreter("local",
"""
|class TestClass() { def testMethod = 3 }
|val t = new TestClass
|import t.testMethod
|case class TestCaseClass(value: Int)
|sc.parallelize(1 to 10).map(x => TestCaseClass(x)).collect
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
}
if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
test("running on Mesos") {
val output = runInterpreter("localquiet",
"""
|var v = 7
|def getV() = v
|sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
|v = 10
|sc.parallelize(1 to 10).map(x => getV()).collect.reduceLeft(_+_)
|var array = new Array[Int](5)
|val broadcastArray = sc.broadcast(array)
|sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
|array(0) = 5
|sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).collect
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("res0: Int = 70", output)
assertContains("res1: Int = 100", output)
assertContains("res2: Array[Int] = Array(0, 0, 0, 0, 0)", output)
assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
}
}
test("collecting objects of class defined in repl") {
val output = runInterpreter("local[2]",
"""
|case class Foo(i: Int)
|val ret = sc.parallelize((1 to 100).map(Foo), 10).collect
""".stripMargin)
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
assertContains("ret: Array[Foo] = Array(Foo(1),", output)
}
}
| hengyicai/OnlineAggregationUCAS | repl/scala-2.11/src/test/scala/org/apache/spark/repl/ReplSuite.scala | Scala | apache-2.0 | 11,647 |
import sbt._
import sbt.Keys._
object ApplicationBuild extends Build {
val AppName = "CountryReconciliator"
val AppOrg = "es.weso"
val AppVersion = "0.3.0-SNAPSHOT"
val ScalaV = "2.10.2"
/**
* Dependancies Versions
*/
val ConfigV = "1.9"
val CucumberV = "1.1.4"
val JunitV = "4.11"
val SeleniumV = "2.35.0"
val ScalatestV = "2.0.M8"
val LogbackV = "1.0.13"
val LuceneV = "4.0.0"
val TypeConfigV = "1.0.1"
lazy val countryReconciliator = Project(
id = AppName.toLowerCase,
base = file("."),
settings = Project.defaultSettings ++ Seq(
name := AppName,
organization := AppOrg,
version := AppVersion,
scalaVersion := ScalaV,
/*Test Dependencies*/
libraryDependencies += "junit" % "junit" % JunitV,
libraryDependencies += "info.cukes" % "cucumber-jvm" % CucumberV,
libraryDependencies += "info.cukes" % "cucumber-core" % CucumberV,
libraryDependencies += "info.cukes" % "cucumber-junit" % CucumberV,
libraryDependencies += "org.scalatest" %% "scalatest" % ScalatestV,
libraryDependencies += "info.cukes" %% "cucumber-scala" % CucumberV,
/*Java Dependencies*/
libraryDependencies += "org.seleniumhq.selenium" % "selenium-java" % SeleniumV,
libraryDependencies += "commons-configuration" % "commons-configuration" % ConfigV,
libraryDependencies += "com.typesafe" % "config" % TypeConfigV,
libraryDependencies += "org.apache.lucene" % "lucene-core" % LuceneV,
libraryDependencies += "org.apache.solr" % "solr-core" % LuceneV,
libraryDependencies += "ch.qos.logback" % "logback-classic" % LogbackV,
/*Exterrn Repositories*/
resolvers += "Sonatype snapshots" at "http://oss.sonatype.org/content/repositories/snapshots",
resolvers += "Templemore Repository" at "http://templemore.co.uk/repo/",
/*Local Repositories*/
resolvers += Resolver.url("Local Ivy Repository", url("file://" + Path.userHome.absolutePath + "/.ivy2/local/"))(Resolver.ivyStylePatterns),
resolvers += "Local Maven Repository" at "file://" + Path.userHome.absolutePath + "/.m2/repository"))
}
| weso/CountryReconciliator | project/Build.scala | Scala | apache-2.0 | 2,170 |
package io.rampant.vulgar.security
import java.io.ByteArrayInputStream
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.iteratee.{Enumeratee, Enumerator, Iteratee}
import play.api.libs.json.Json
import play.api.mvc.{Filter, RequestHeader, Result}
import scala.concurrent.Future
object JsonFilter extends Filter {
override def apply(next: (RequestHeader) => Future[Result])(rh: RequestHeader): Future[Result] = {
next(rh).flatMap { result =>
if (result.header.headers.getOrElse("Content-Type", "").startsWith("application/json")) {
val bytesToString: Enumeratee[Array[Byte], String] = Enumeratee.map[Array[Byte]] { bytes => new String(bytes)}
val resultBody: Future[String] = result.body |>>> bytesToString &>> Iteratee.consume[String]()
resultBody.map { body =>
val wrappedBody = Json.obj(
"payload" -> Json.parse(body)
).toString()
val enumeratedBody: Enumerator[Array[Byte]] = {
Enumerator.fromStream(new ByteArrayInputStream(wrappedBody.getBytes))
}
result.copy(body = enumeratedBody)
}
}
else {
Future.successful(result)
}
}
}
}
| duaiwe/vulgar | app/io/rampant/vulgar/security/JsonFilter.scala | Scala | mit | 1,153 |
package org.broadinstitute.clio.integrationtest
import akka.http.scaladsl.model.Uri
import better.files.File
import org.broadinstitute.clio.integrationtest.tests._
/**
* An integration spec that runs entirely against a Clio instance
* and elasticsearch cluster deployed into one of our environments.
*
* @param env the environment to test against, either "dev", "staging", or "prod"
*/
abstract class EnvIntegrationSpec(env: String)
extends BaseIntegrationSpec(s"Clio in $env") {
override val clioHostname = s"clio.gotc-$env.broadinstitute.org"
override val clioPort = 443
override val useHttps = true
override val elasticsearchUri: Uri = Uri(
s"http://elasticsearch1.gotc-$env.broadinstitute.org:9200"
)
override lazy val rootPersistenceDir: File =
rootPathForBucketInEnv(env, readOnly = true)
}
/** The integration specs that run against Clio in dev. */
abstract class DevIntegrationSpec extends EnvIntegrationSpec("dev")
class DevEnvBasicSpec extends DevIntegrationSpec with BasicTests
class DevEnvUbamSpec extends DevIntegrationSpec with UbamTests
class DevEnvBamSpec extends DevIntegrationSpec with BamTests
class DevEnvCramSpec extends DevIntegrationSpec with CramTests
class DevEnvGvcfSpec extends DevIntegrationSpec with GvcfTests
class DevEnvArraysSpec extends DevIntegrationSpec with ArraysTests
/** The integration specs that run against Clio in staging. */
abstract class StagingIntegrationSpec extends EnvIntegrationSpec("staging")
class StagingEnvBasicSpec extends StagingIntegrationSpec with BasicTests
class StagingEnvUbamSpec extends StagingIntegrationSpec with UbamTests
class StagingEnvBamSpec extends StagingIntegrationSpec with BamTests
class StagingEnvCramSpec extends StagingIntegrationSpec with CramTests
class StagingEnvGvcfSpec extends StagingIntegrationSpec with GvcfTests
class StagingEnvArraysSpec extends StagingIntegrationSpec with ArraysTests
| broadinstitute/clio | clio-integration-test/src/it/scala/org/broadinstitute/clio/integrationtest/EnvIntegrationSpec.scala | Scala | bsd-3-clause | 1,916 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.parsing.model
import com.flaminem.flamy.conf.FlamyContext
import com.flaminem.flamy.graph.TableGraph
import com.flaminem.flamy.model.columns.ColumnValue
import com.flaminem.flamy.model.core.CompleteModelFactory
import com.flaminem.flamy.model.files.TableFile
import com.flaminem.flamy.model.{DummyTableFile, Variables}
import org.scalatest.FreeSpec
import scala.language.implicitConversions
class TableDependencyTest extends FreeSpec {
/**
* We allow implicit casting of text into DummyTableFiles
* @param text
* @return
*/
implicit def stringToTableFile(text: String): TableFile = {
DummyTableFile(text)
}
implicit def stringToOption(s: String): Option[String] = Option(s)
implicit val flamyContext = new FlamyContext("flamy.model.dir.paths" -> "src/test/resources/empty_test")
val tableGraph = TableGraph(flamyContext, Nil, checkNoMissingTable = false)
def testSuccess(
creates: Seq[String] = Nil,
views: Seq[String] = Nil,
populates: Seq[String] = Nil,
variables: Variables = new Variables(),
expected: Option[String] = None
): Unit = {
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
creates.foreach{modelFactory.analyzeCreate(_)}
views.foreach{modelFactory.analyzeView(_)}
val tableDeps: Seq[TableDependency] =
populates.flatMap{modelFactory.analyzePopulateDependencies(_, isView = false, variables)}
expected match {
case Some(e) =>
val td: TableDependency = tableDeps.last
assert(td.toString === e)
case None =>
tableDeps.foreach{println}
}
}
def testFailure(
creates: Seq[String],
views: Seq[String],
populates: Seq[String],
variables: Variables = new Variables()
) {
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
creates.foreach{modelFactory.analyzeCreate(_)}
intercept[Exception] {
views.foreach{modelFactory.analyzeView(_)}
populates.foreach{modelFactory.analyzePopulate(_, variables)}
}
}
"a simple query should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT t1.col1 FROM db2.Source T1"
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source], colDeps[db2.source.col1])"
testSuccess(
creates = createQuery :: Nil,
populates = populateQuery :: Nil,
expected = expected
)
}
"a simple query with a partition variable should succeed" in {
/* The bug here was that the Analyzer tries to resolve the ColumnValue part1 in a context where db2.source is not visible */
val createQuery = "CREATE TABLE db2.source (col1 INT) PARTITIONED BY (part1 STRING)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT ${partition:part1} as col1
|FROM (
| SELECT
| col1
| FROM db2.source
|) T1 """.stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source], colDeps[db2.source.col1])"
val vars = new Variables()
vars += "partition:part1" -> "'${partition:part1}'"
testSuccess(
creates = createQuery :: Nil,
populates = populateQuery :: Nil,
variables = vars,
expected = None
)
}
"a query with an aliased column in a subquery with a star should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T.col1 as col2 FROM (
| SELECT * FROM db2.source
|) T
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col2")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a query with an aliased column in a subquery with a prefixed star should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T.col1 as col2 FROM (
| SELECT T.* FROM db2.source T
|) T
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col2")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a query with an aliased column in two nested subqueries with a start should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T.col1 as col2
|FROM (
| SELECT * FROM (
| SELECT col1 FROM db2.source
| ) T
|) T
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col2")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a query with an aliased column in a subquery should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T.col1 as col2
|FROM (
| SELECT col1 FROM db2.source
|) T
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col2")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a query with an aliased column in a subquery with a union should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T.col1 as col2
|FROM (
| SELECT * FROM db2.source
|) T
|UNION ALL
|SELECT col1 as col2 FROM db2.source
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col2")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a query with a constant in a subquery should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T.col1 as col2
|FROM (
| SELECT 1 as col1 FROM db2.source
|) T
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col2")
assert(tableDeps.head.columns.head.value === ColumnValue("1"))
}
"a query with an aliased column in a view should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val viewQuery =
"""CREATE VIEW db2.view AS
|SELECT col1 as col2 FROM db2.source
""".stripMargin
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col2 as col3
|FROM db2.view
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
modelFactory.analyzeView(viewQuery)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col3")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a query with an aliased column in two views should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val viewQuery1 = "CREATE VIEW db2.view1 AS SELECT col1 as col2 FROM db2.source"
val viewQuery2 = "CREATE VIEW db2.view2 AS SELECT col2 as col3 FROM db2.view1"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col3 as col4
|FROM db2.view2
|""".stripMargin
val modelFactory: CompleteModelFactory = new CompleteModelFactory(flamyContext, tableGraph)
modelFactory.analyzeCreate(createQuery)
modelFactory.analyzeView(viewQuery1)
modelFactory.analyzeView(viewQuery2)
val tableDeps: Seq[TableDependency] = modelFactory.analyzePopulateDependencies(populateQuery, isView = false, new Variables())
assert(tableDeps.head.columns.head.columnName === "col4")
assert(tableDeps.head.columns.head.value === new ColumnDependency("col1", "db2", "source"))
}
"a simple query on a partitioned table should succeed" in {
val createQuery = "CREATE TABLE db1.dest (col1 INT) PARTITIONED BY (part1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest PARTITION(part1) SELECT col1, part1 FROM db2.source"
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1, part1], partitions[part1], tableDeps[db2.source], colDeps[col1, part1])"
testSuccess(
creates = createQuery :: Nil,
populates = populateQuery :: Nil,
expected = expected
)
}
"a simple query with missing table should succeed" in {
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT col1 FROM db2.source"
val expected = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source], colDeps[col1])"
testSuccess(
populates = populateQuery::Nil,
expected = expected
)
}
"an undefined column with no missing table should fail" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT t1.WRONG_COLUMN FROM db2.Source T1"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a wrong table reference should fail" in {
val createQuery = "CREATE TABLE db2.source (col1 INT, E STRUCT<col:INT>)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT MISSING.col1 FROM db2.source T1 "
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"subquery" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT t1.col1 FROM (SELECT col1 FROM db2.Source) T1"
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source], colDeps[db2.source.col1])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"column defined from subquery" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT t1.c FROM (SELECT source.col1 as c FROM db2.Source) T1"
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[c=db2.source.col1], tableDeps[db2.source], colDeps[db2.source.col1])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"undefined column in subquery" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT t1.col1 FROM (SELECT c as col1 FROM db2.Source) T1"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a missing table should be OK" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""FROM
|(
| SELECT *
| FROM db2.source T1
| JOIN db2.MISSING_TABLE T2
| on T1.col1=T2.col1
|) T
|INSERT OVERWRITE TABLE db1.dest
|SELECT col2 """.stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col2], tableDeps[db2.missing_table, db2.source], colDeps[*, col2, db2.missing_table.col1, db2.source.col1])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"An unknown table reference in a WHERE clause with all table definitions should fail" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT col1 FROM db2.source T1 WHERE E.col IS NOT NULL"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"An unknown table reference in a WHERE clause with some missing table definitions known should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT col1 FROM db2.missing T1 WHERE E.col IS NOT NULL"
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.missing], colDeps[col1, E.col])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"A struct col in a WHERE clause should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT, E STRUCT<col:INT>)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT col1 FROM db2.source T1 WHERE E.col IS NOT NULL"
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source], colDeps[db2.source.col1, db2.source.E.col])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"A MAP REDUCE query should succeed" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| REDUCE * using ''
| AS colA, colB, colC
| FROM (SELECT * FROM db2.source) mapped ;""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[colA, colB, colC], tableDeps[db2.source], colDeps[db2.source.*])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a query with a lateral view should succeed" in {
val createQuery = "CREATE TABLE db2.source (label_map MAP<STRING,STRING>)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT label_map, label_key, label_value
| FROM db2.source
| LATERAL VIEW explode(label_map) T1 as label_key, label_value""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[label_map, label_key, label_value], tableDeps[db2.source], colDeps[db2.source.label_map])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a query with a lateral view and a missing column should fail" in {
val createQuery = "CREATE TABLE db2.source (label_map MAP<STRING,STRING>)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT label_map, label_key, WRONG_COLUMN
| FROM db2.source
| LATERAL VIEW explode(label_map) T1 as label_key, label_value""".stripMargin
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a query with a lateral view and a select * should succeed" in {
val createQuery = "CREATE TABLE db2.source (label_map MAP<STRING,STRING>)"
val populateQuery =
"""FROM
|(
| SELECT *
| FROM db2.source
| LATERAL VIEW explode(label_map) T AS label_key, label_value
|) T2
|INSERT OVERWRITE TABLE db1.dest
|SELECT label_map, label_key, label_value""".stripMargin
val expected = "TableDependency(type=REF, name=dest, schema=db1, columns[label_map, label_key, label_value], tableDeps[db2.source], colDeps[db2.source.label_map])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a create VIEW with multiple columns having the same name should fail" in {
val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
val viewQuery = "CREATE VIEW db2.view AS SELECT id, *, T1.*, T2.* FROM db2.source T1 JOIN db2.source T2 "
testFailure(createQuery::Nil, Nil, viewQuery::Nil)
}
// "a multiple CREATE VIEW should succeed" in {
// val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
// val viewQuery1: String = "CREATE VIEW db2.view1 AS SELECT id, number WHERE "
// val viewQuery: String = "CREATE VIEW db2.view AS SELECT * FROM db2.source T1 JOIN db2.source T2 "
// testFailure(createQuery::Nil, viewQuery::Nil)
// }
"a query with UNION ALL should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM
|(
| SELECT col1, col2 FROM db2.source1
| UNION ALL
| SELECT col1, NULL as col2 FROM db2.source2
|) T""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.col1, db2.source1.col2, db2.source2.col1])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a query with UNION ALL and stars should be OK bis" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT, col2 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT * FROM
|(
| SELECT * FROM db2.source1
| UNION ALL
| SELECT * FROM db2.source2
|) T
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1, col2], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.*, db2.source2.*])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with WHERE ... IN (SUB_QUERY) clauses should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col2 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM db2.source1
|WHERE col1 IN (SELECT col2 FROM db2.source2)
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.col1, db2.source2.col2])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with WHERE ... IN (SUB_QUERY with external column ref) should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col2 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM db2.source1 T
|WHERE T.col1 IN (SELECT col2 FROM db2.source2 WHERE col2 = T.col1)
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.col1, db2.source2.col2])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = None
)
}
"a correct query with WHERE ... IN (SUB_QUERY) clauses should be OK bis" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM db2.source1 A
|WHERE A.col1 IN (SELECT col1 FROM db2.source2)
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.col1, db2.source2.col1])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"an incorrect query with WHERE ... IN (SUB_QUERY) clauses should NOT be OK" ignore {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM db2.source1
|WHERE col1 IN (SELECT col1 FROM db2.source2)
|""".stripMargin
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"a correct query with WHERE EXISTS (SUB_QUERY) clauses should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col2 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM db2.source1
|WHERE EXISTS (SELECT col2 FROM db2.source2)
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.col1, db2.source2.col2])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"an incorrect query with WHERE EXISTS (SUB_QUERY) clauses should NOT be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col2 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM db2.source1
|WHERE EXISTS (SELECT col1 FROM db2.source2)
|""".stripMargin
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"a correct query with a CTE and a LATERAL VIEW should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""WITH T AS (SELECT col1 FROM db2.source1)
|INSERT OVERWRITE TABLE db1.dest
|SELECT col2
|FROM T
|LATERAL VIEW explode(col1) LV as col2
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col2], tableDeps[db2.source1], colDeps[db2.source1.col1])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with the same alias used in a CTE and a subquery should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""WITH T AS (SELECT col1 FROM db2.source1)
|INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM (SELECT col1 FROM T) T
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1], colDeps[db2.source1.col1])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with a CTE and an unknown table with a complete name should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""WITH T AS (SELECT col1 FROM db2.source1)
|INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM (SELECT T.col1 FROM db2.unknown_table) T
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.unknown_table], colDeps[T.col1])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with a CTE and an unknown table with a short name should not be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""WITH T AS (SELECT col1 FROM db2.source1)
|INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM (SELECT T.col1 FROM T2) T
|""".stripMargin
testFailure(createQuery1::Nil, Nil, populateQuery::Nil)
}
"a correct query with the same alias used in a CTE and nested subqueries should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""WITH T AS (SELECT col1 FROM db2.source1 T)
|INSERT OVERWRITE TABLE db1.dest
|SELECT col1
|FROM (
| SELECT col1 FROM T
| UNION ALL
| SELECT col1 FROM T
|) T2
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1], colDeps[db2.source1.col1])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with the same alias used in a table and a subquery should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT S.col1
|FROM db2.source1 S
|JOIN (SELECT S.col1 FROM db2.source1 S) T
|ON S.col1 = T.col1
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1], colDeps[db2.source1.col1])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with a DISTRIBUTE BY should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE db1.dest
| SELECT col1 as col2
| FROM db2.source1
| DISTRIBUTE BY col2
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col2=db2.source1.col1], tableDeps[db2.source1], colDeps[db2.source1.col1], postColDeps[col2])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"an incorrect query with a DISTRIBUTE BY should NOT be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| col1 as col2
| FROM db2.source1
| DISTRIBUTE BY col1
| """.stripMargin
testFailure(createQuery1::Nil, Nil, populateQuery::Nil)
}
"a correct query with a DISTRIBUTE BY and a prefixed column should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE db1.dest
| SELECT S1.col1
| FROM db2.source1 S1
| DISTRIBUTE BY S1.col1
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col2], tableDeps[db2.source1], colDeps[db2.source1.col1], postColDeps[col2])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = None
)
}
/* In this context, S1.col1 should not be recognized because it is not in the SELECT clause... */
"an incorrect query with a DISTRIBUTE BY and a one-time-prefixed column should NOT be OK" ignore {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE db1.dest
| SELECT col1
| FROM db2.source1 S1
| DISTRIBUTE BY S1.col1
| """.stripMargin
testFailure(createQuery1::Nil, Nil, populateQuery::Nil)
}
"a correct query with a ORDER BY should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE db1.dest
| SELECT
| col1 as col2
| FROM db2.source1
| ORDER BY col2
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col2=db2.source1.col1], tableDeps[db2.source1], colDeps[db2.source1.col1], postColDeps[col2])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with partition variables should succeed" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 STRING, part3 INT, part4 STRING)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 STRING, part3 INT, part4 STRING)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1, part2, part3, part4)
| SELECT
| col1,
| col2,
| ${partition:part1} as part1,
| ${partition:part2} as part2,
| 0 as part3,
| "0" as part4
| FROM db2.source1 S1
|""".stripMargin
val expected = "TableDependency(type=REF, name=dest, schema=db1, columns[col1, col2], partitions[part1, part2, part3=0, part4=0], tableDeps[db2.source1], colDeps[db2.source1.col1, db2.source1.col2])"
testSuccess(
creates = createQuery1 :: createQuery2 :: Nil,
populates = populateQuery :: Nil,
variables = new Variables("partition:part1" -> "${partition:part1}", "partition:part2" -> "${partition:part2}"),
expected = Option(expected)
)
}
"column name checking: " - {
"a table with correct partition specification should be ok" in {
val createQuery = "CREATE TABLE db1.dest (col1 INT) PARTITIONED BY (partA STRING)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest PARTITION(partA) SELECT col1, 'A' as partA FROM db2.source"
testSuccess(createQuery::Nil, Nil, populateQuery::Nil)
}
"a table with partition specification that differs in the CREATE and the POPULATE should fail" in {
val createQuery = "CREATE TABLE db1.dest (col1 INT) PARTITIONED BY (partA STRING)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest PARTITION(wrongPartition) SELECT col1, 'A' as wrongPartition FROM db2.source"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a missing column from a VIEW should fail" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val viewQuery = "CREATE VIEW db2.view AS SELECT col1 as col2 FROM db2.source"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT col1 FROM db2.view"
testFailure(createQuery::Nil, viewQuery::Nil, populateQuery::Nil)
}
"a query with JOIN with tables with same column names should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT T1.col1 FROM
|(
| SELECT col1 FROM db2.source1
|) T1
|JOIN
|(
| SELECT col1 FROM db2.source1
|) T2
|ON T1.col1 = T2.col1
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1], colDeps[db2.source1.col1])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a column existing in a table used in a subquery but not kept in that subquery should fail" in {
val createQuery = "CREATE TABLE db2.source (label_map MAP<STRING,STRING>)"
val populateQuery =
"""FROM
|(
| SELECT 1 as label
| FROM db2.source
|) T
|INSERT OVERWRITE TABLE db1.dest
|SELECT label_map""".stripMargin
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a column used in a WHERE clause in a subquery but not kept should fail" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""FROM (
| SELECT 1 as col2
| FROM db2.source
| WHERE col1 >= 1
|) T
|INSERT OVERWRITE TABLE db1.dest
|SELECT col1""".stripMargin
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a column used in a WHERE clause in a CTE but not kept should fail" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery =
"""WITH T AS (
| SELECT 1 as col2
| FROM db2.source
| WHERE col1 >= 1
|)
|INSERT OVERWRITE TABLE db1.dest
|SELECT col1 FROM T""".stripMargin
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a * column FROM two sub-query should be correct" in {
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT *
|FROM (SELECT 1 as col1) T1
|JOIN (SELECT 2 as col2) T2
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1=1, col2=2])"
testSuccess(
populates = populateQuery::Nil,
expected = expected
)
}
"an ambiguous column available in two known tables should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (id STRING)"
val createQuery2 = "CREATE TABLE db2.source2 (id STRING)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| id
| FROM db2.source1 S1
| JOIN db2.source2 S2
| ON S1.id = S2.id
| """.stripMargin
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"an ambiguous column available in a known tables and a subquery should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (id STRING)"
val createQuery2 = "CREATE TABLE db2.source2 (id STRING)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| id
| FROM db2.source1 S1
| JOIN (SELECT id FROM db2.source2) S2
| ON S1.id = S2.id
| """.stripMargin
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"an ambiguous column available in a known table and a LATERAL VIEW should fail" in {
val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| number, id
| FROM db2.source S
| LATERAL VIEW OUTER explode(array(1)) LV as number
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id, number2], tableDeps[db2.source], colDeps[db2.source.id, db2.source.number], bothColDeps[db2.source.number])"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a correct query with a DISTRIBUTE BY and a one-time-prefixed column should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE db1.dest
| SELECT S1.col1
| FROM db2.source1 S1
| DISTRIBUTE BY col1
| """.stripMargin
val expected = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1], colDeps[db2.source1.col1], postColDeps[col1])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"an ambiguous column in a DISTRIBUTE BY or SORT BY clause should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 STRING)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 STRING)"
val populateQuery =
""" INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| S1.col1
| FROM db2.source1 S1
| JOIN db2.source2 S2
| ON S1.col1 = S2.col1
| DISTRIBUTE BY col1
| SORT BY col1
| """.stripMargin
// val expected = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.id, db2.source2.id], postColDeps[id])"
testSuccess(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"an ambiguous column in a ORDER BY clause and present in the SELECT should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (id STRING)"
val createQuery2 = "CREATE TABLE db2.source2 (id STRING)"
val populateQuery =
""" INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| S1.id
| FROM db2.source1 S1
| JOIN db2.source2 S2
| ON S1.id = S2.id
| ORDER BY id
| """.stripMargin
val expected = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.id, db2.source2.id], postColDeps[id])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"an column in a ORDER BY but absent from the SELECT should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| col1 as col2
| FROM db2.source1
| ORDER BY col1
| """.stripMargin
testFailure(createQuery1::Nil, Nil, populateQuery::Nil)
}
"an ambiguous column in a ORDER BY clause and absent from the SELECT should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (id STRING)"
val createQuery2 = "CREATE TABLE db2.source2 (id STRING)"
val populateQuery =
""" INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| S1.id as toto
| FROM db2.source1 S1
| JOIN db2.source2 S2
| ON S1.id = S2.id
| ORDER BY id
| """.stripMargin
val expected = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.id, db2.source2.id], postColDeps[id])"
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
}
"column number checking : a table" - {
"with an incorrect number of columns should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT col1
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1:: createQuery2::Nil, Nil, populateQuery::Nil)
}
}
"partition number checking: " - {
"a table with dynamic partitioning" - {
"with an correct number of partition in the select should succeed" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1, part2)
| SELECT
| col1,
| col2,
| "1" as part1,
| "2" as part2
| FROM db2.source1 S1
|""".stripMargin
val expected = "TableDependency(type=REF, name=dest, schema=db1, columns[col1, col2], partitions[part1=1, part2=2], tableDeps[db2.source1], colDeps[db2.source1.col1, db2.source1.col2])"
testSuccess(
creates = createQuery1::createQuery2 :: Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"with an incorrect number of partition in the select should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1, part2)
| SELECT col1, col2, col1 as part1
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1::createQuery2 :: Nil, Nil, populateQuery::Nil)
}
"with an incorrect number of partition and a SELECT * should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery = """" INSERT OVERWRITE TABLE db1.dest PARTITION(part1, part2)
| SELECT *
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1:: createQuery2::Nil, Nil, populateQuery::Nil)
}
"with an incorrect number of declared partition should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1)
| SELECT col1, col2, col1 as part1, col2 as part2
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1::createQuery2:: Nil, Nil, populateQuery ::Nil)
}
"with an incorrect number of partition everywhere should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1)
| SELECT col1, col2, col1 as part1
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1::createQuery2:: Nil, Nil, populateQuery ::Nil)
}
}
"a table with no dynamic partitioning" - {
"with an correct number of partitions in the select should succeed" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1=1, part2=2)
| SELECT col1, col2
| FROM db2.source1 S1
|""".stripMargin
val expected = "TableDependency(type=REF, name=dest, schema=db1, columns[col1, col2], partitions[part1=1, part2=2], tableDeps[db2.source1], colDeps[db2.source1.col1, db2.source1.col2])"
testSuccess(
creates = createQuery1::createQuery2 :: Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"with an incorrect number of partitions in the select should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1=1, part2=2)
| SELECT col1, col2, col1 as part1
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1::createQuery2 :: Nil, Nil, populateQuery::Nil)
}
"with an incorrect number of declared partitions should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1=1)
| SELECT col1, col2
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1::createQuery2:: Nil, Nil, populateQuery ::Nil)
}
"with an incorrect number of partitions everywhere should fail" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT, col2 INT)"
val createQuery2 = "CREATE TABLE db1.dest (col1 INT, col2 INT) PARTITIONED BY (part1 INT, part2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest PARTITION(part1=1)
| SELECT col1, col2, col2 as part2
| FROM db2.source1 S1
|""".stripMargin
testFailure(createQuery1::createQuery2:: Nil, Nil, populateQuery ::Nil)
}
}
}
"a correct CREATE VIEW query with WHERE ... IN should be OK" in {
val populateQuery =
"""CREATE VIEW db1.view
|AS
|SELECT * FROM (SELECT 1 as client_id) A
|WHERE A.client_id NOT IN (SELECT 1 as client_id)
|""".stripMargin
testSuccess(
populates = populateQuery::Nil,
expected = None
)
}
"a correct query with subquery and distribute by should be OK" in {
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT T0.*
| FROM (
| SELECT D.day
| FROM db2.source D
| ) T0
| DISTRIBUTE BY day -- use this when generating many partitions
|""".stripMargin
testSuccess(
populates = populateQuery::Nil,
expected = None
)
}
"a correct query with LATERAL VIEW on a subquery should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (arr ARRAY<INT>)"
val populateQuery =
"""
|INSERT OVERWRITE TABLE db1.dest
|SELECT a
| FROM (
| SELECT arr FROM db2.source1
| ) T
|LATERAL VIEW explode(T.arr) LV as a
|""".stripMargin
testSuccess(createQuery1::Nil, Nil, populateQuery::Nil)
}
"Creating a view with wrong column names should fail" in {
val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
val viewQuery = "CREATE VIEW db2.view AS SELECT id, *, T1.*, T2.* FROM db2.source T1 JOIN db2.source T2 "
testFailure(createQuery::Nil, viewQuery::Nil, Nil)
}
"a correct query with HAVING should succeed" in {
val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| id,
| COUNT(1) as nb
| FROM db2.source
| GROUP BY id
| HAVING nb > 10
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id, nb], tableDeps[db2.source], colDeps[db2.source.id], bothColDeps[nb])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with HAVING on aggregation should succeed" in {
val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| id,
| COUNT(1) as nb
| FROM db2.source
| GROUP BY id
| HAVING SUM(number) > 10
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id, nb], tableDeps[db2.source], colDeps[db2.source.id], bothColDeps[db2.source.number])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with HAVING on aggregation with same name should succeed" in {
val createQuery = "CREATE TABLE db2.source (id INT, number INT)"
val populateQuery =
"""INSERT OVERWRITE TABLE db1.dest
|SELECT
| T.id
|FROM ( SELECT * FROM db2.source ) T
|GROUP BY id
|HAVING T.number = 1
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[id], tableDeps[db2.source], colDeps[db2.source.*, db2.source.id])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"wrong column name in HAVING" in {
val createQuery = "CREATE TABLE db2.source (col1 INT)"
val populateQuery = "INSERT OVERWRITE TABLE db1.dest SELECT COUNT(1) as num FROM db2.source HAVING unknown_column > 0"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a query with a lateral view should not be bugged" in {
val createQuery = "CREATE TABLE db2.source (label_map MAP<STRING,STRING>)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT label_map, label_key, label_value, TOK_FUNCTION, TOK_TABALIAS
| FROM db2.source
| LATERAL VIEW explode(label_map) T1 as label_key, label_value""".stripMargin
// val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[label_map, label_key, label_value], tableDeps[db2.source], colDeps[db2.source.label_map])"
testFailure(createQuery::Nil, Nil, populateQuery::Nil)
}
"a correct query with LEFT SEMI JOIN should be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT, col2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT col1
| FROM db2.source1 S1
| LEFT SEMI JOIN db2.source2 S2
| ON S1.col1 = S2.col1
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col1], tableDeps[db2.source1, db2.source2], colDeps[db2.source1.col1, db2.source2.col1])"
testSuccess(
creates = createQuery1::createQuery2::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"an incorrect query with LEFT SEMI JOIN should NOT be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT, col2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT col2
| FROM db2.source1 S1
| LEFT SEMI JOIN db2.source2 S2
| ON S1.col1 = S2.col1
|""".stripMargin
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"an incorrect query with LEFT SEMI JOIN and same alias used twice should NOT be OK" in {
val createQuery1 = "CREATE TABLE db2.source1 (col1 INT)"
val createQuery2 = "CREATE TABLE db2.source2 (col1 INT, col2 INT)"
val populateQuery =
""" INSERT OVERWRITE TABLE db1.dest
| SELECT col1
| FROM db2.source1 S1
| LEFT SEMI JOIN db2.source2 S1
| ON S1.col1 = S2.col1
|""".stripMargin
testFailure(createQuery1::createQuery2::Nil, Nil, populateQuery::Nil)
}
"a query with joins should be correctly parsed" in {
val query =
""" INSERT OVERWRITE TABLE db1.Toto
| SELECT colA
| FROM db2.tutu AS T2
| JOIN db3.tutu AS T3
| ON T3.id=T2.id
| LEFT JOIN db4.tutu AS T4
| ON T4.id=T2.id
| RIGHT JOIN db5.tutu AS T5
| ON T5.id=T2.id
| FULL JOIN db6.tutu AS T6
| ON T6.id=T2.id
| LEFT OUTER JOIN db7.tutu AS T7
| ON T7.id=T2.id
| RIGHT OUTER JOIN db8.tutu AS T8
| ON T8.id=T2.id
| FULL OUTER JOIN db9.tutu AS T9
| ON T9.id=T2.id
| LEFT SEMI JOIN db10.tutu AS T10
| ON (T10.id=T2.id)
| CROSS JOIN db11.tutu AS T11
| ON T11.id=T2.id
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=toto, schema=db1, columns[colA], tableDeps[db10.tutu, db11.tutu, db2.tutu, db3.tutu, db4.tutu, db5.tutu, db6.tutu, db7.tutu, db8.tutu, db9.tutu], colDeps[colA, db10.tutu.id, db11.tutu.id, db2.tutu.id, db3.tutu.id, db4.tutu.id, db5.tutu.id, db6.tutu.id, db7.tutu.id, db8.tutu.id, db9.tutu.id])"
testSuccess(
populates = query :: Nil,
expected = expected
)
}
"a correct query with a multi-insert should succeed" in {
val createQuery1 = "CREATE TABLE db2.source (col1 INT, col2 INT, col3 INT, col4 INT)"
val populateQuery =
"""FROM (SELECT col1, col2, col3, col4 FROM db2.source) T
|INSERT OVERWRITE TABLE db1.dest SELECT col1 WHERE col2 > 0
|INSERT OVERWRITE TABLE db1.dest SELECT col3 WHERE col4 < 0
|""".stripMargin
val expected: String = "TableDependency(type=REF, name=dest, schema=db1, columns[col3], tableDeps[db2.source], colDeps[db2.source.col1, db2.source.col2, db2.source.col3, db2.source.col4])"
testSuccess(
creates = createQuery1::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
"a correct query with a table alias in a lateral view should succeed" in {
val createQuery = "CREATE TABLE db2.source (id STRING, number INT)"
val populateQuery =
"""
| INSERT OVERWRITE TABLE user_model.daily_users
| SELECT
| S.id, LV.number
| FROM db2.source S
| LATERAL VIEW OUTER explode(array(1)) LV as number
| """.stripMargin
val expected: String = "TableDependency(type=REF, name=daily_users, schema=user_model, columns[id, number], tableDeps[db2.source], colDeps[db2.source.id])"
testSuccess(
creates = createQuery::Nil,
populates = populateQuery::Nil,
expected = expected
)
}
}
| flaminem/flamy | src/test/scala/com/flaminem/flamy/parsing/model/TableDependencyTest.scala | Scala | apache-2.0 | 55,440 |
class A {
def foo: Int = 45
}
object Test {
new A {
/* */foo
}
} | ilinum/intellij-scala | testdata/resolve2/bug/AnonymousClassMethods.scala | Scala | apache-2.0 | 74 |
import scala.annotation.{StaticAnnotation, compileTimeOnly}
import scala.language.experimental.macros
package object samurai {
@compileTimeOnly("enable macro paradise to expand macro annotations")
class sam extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro SamuraiMacros.instImpl
}
}
| scalalandio/samurai | samurai/src/main/scala/samurai/package.scala | Scala | apache-2.0 | 323 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.rayguns.items.misc
import com.castlebravostudios.rayguns.api.LensGrinderRecipeRegistry
import com.castlebravostudios.rayguns.mod.Config
import com.castlebravostudios.rayguns.mod.ModularRayguns
import net.minecraft.block.Block
import net.minecraft.item.Item
import net.minecraft.item.ItemStack
object Tier2GainMedium extends Item {
setCreativeTab(ModularRayguns.raygunsTab)
setUnlocalizedName("rayguns.Tier2GainMedium")
setTextureName("rayguns:gain_medium_t2")
} | Redattack34/ModularRayguns | src/main/scala/com/castlebravostudios/rayguns/items/misc/Tier2GainMedium.scala | Scala | bsd-3-clause | 2,099 |
package models.hms.util
import models.hms.Transcode
import play.api.libs.json.Json
/**
* author: cvandrei
* since: 2016-02-17
*/
object TranscodeHelper {
val DEFAULT_SOURCE_TYPE = "Media"
val DEFAULT_SOURCES = SourceHelper.defaultList
val DEFAULT_COLLAPSED = true
val DEFAULT_COLLAPSED_NAME = "collapsedName"
val DEFAULT_DOWNLOAD_PROVISION = "HTTP"
val DEFAULT_PUSH_FINISHED_NOTIFICATION = true
val DEFAULT_PUSH_ERROR_NOTIFICATION = false
val DEFAULT_PUSH_STATUS_NOTIFICATION = true
val DEFAULT_PUSH_NOTIFICATION_CALLBACK = "http://server/path"
def default: Transcode = Transcode(DEFAULT_SOURCE_TYPE,
DEFAULT_SOURCES,
Some(DEFAULT_COLLAPSED),
Some(DEFAULT_COLLAPSED_NAME),
DEFAULT_DOWNLOAD_PROVISION,
DEFAULT_PUSH_FINISHED_NOTIFICATION,
DEFAULT_PUSH_ERROR_NOTIFICATION,
DEFAULT_PUSH_STATUS_NOTIFICATION,
DEFAULT_PUSH_NOTIFICATION_CALLBACK)
def defaultMinimum: Transcode = Transcode(DEFAULT_SOURCE_TYPE,
DEFAULT_SOURCES,
None,
None,
DEFAULT_DOWNLOAD_PROVISION,
DEFAULT_PUSH_FINISHED_NOTIFICATION,
DEFAULT_PUSH_ERROR_NOTIFICATION,
DEFAULT_PUSH_STATUS_NOTIFICATION,
DEFAULT_PUSH_NOTIFICATION_CALLBACK)
def defaultJson: String = {
val sources = Json.toJson(SourceHelper.defaultList)
s"""{
| "SourceType": "$DEFAULT_SOURCE_TYPE",
| "Sources": $sources,
| "Collapsed": "$DEFAULT_COLLAPSED",
| "CollapsedName": "$DEFAULT_COLLAPSED_NAME",
| "DownloadProvision": "$DEFAULT_DOWNLOAD_PROVISION",
| "PushFinishedNotification": "$DEFAULT_PUSH_FINISHED_NOTIFICATION",
| "PushErrorNotification": "$DEFAULT_PUSH_ERROR_NOTIFICATION",
| "PushStatusNotification": "$DEFAULT_PUSH_STATUS_NOTIFICATION",
| "PushNotificationCallback": "$DEFAULT_PUSH_NOTIFICATION_CALLBACK"
|}"""
.stripMargin
}
def defaultJsonMinimum: String = {
val sources = Json.toJson(SourceHelper.defaultList)
s"""{
| "SourceType": "$DEFAULT_SOURCE_TYPE",
| "Sources": $sources,
| "DownloadProvision": "$DEFAULT_DOWNLOAD_PROVISION",
| "PushFinishedNotification": "$DEFAULT_PUSH_FINISHED_NOTIFICATION",
| "PushErrorNotification": "$DEFAULT_PUSH_ERROR_NOTIFICATION",
| "PushStatusNotification": "$DEFAULT_PUSH_STATUS_NOTIFICATION",
| "PushNotificationCallback": "$DEFAULT_PUSH_NOTIFICATION_CALLBACK"
|}"""
.stripMargin
}
}
| indarium/hbbTVPlugin | test/models/hms/util/TranscodeHelper.scala | Scala | agpl-3.0 | 2,471 |
package org.globalnames.parser
trait TestParserInstance {
val scientificNameParser: ScientificNameParser = new ScientificNameParser {
override val version: String = "test_version"
}
}
| GlobalNamesArchitecture/gnparser | parser-render/src/test/scala/org/globalnames/parser/TestParserInstance.scala | Scala | mit | 195 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.impl
import org.apache.spark.ml.linalg.BLAS
private[spark] object Utils {
lazy val EPSILON = {
var eps = 1.0
while ((1.0 + (eps / 2.0)) != 1.0) {
eps /= 2.0
}
eps
}
/**
* Convert an n * (n + 1) / 2 dimension array representing the upper triangular part of a matrix
* into an n * n array representing the full symmetric matrix (column major).
*
* @param n The order of the n by n matrix.
* @param triangularValues The upper triangular part of the matrix packed in an array
* (column major).
* @return A dense matrix which represents the symmetric matrix in column major.
*/
def unpackUpperTriangular(
n: Int,
triangularValues: Array[Double]): Array[Double] = {
val symmetricValues = new Array[Double](n * n)
var r = 0
var i = 0
while (i < n) {
var j = 0
while (j <= i) {
symmetricValues(i * n + j) = triangularValues(r)
symmetricValues(j * n + i) = triangularValues(r)
r += 1
j += 1
}
i += 1
}
symmetricValues
}
/**
* Indexing in an array representing the upper triangular part of a matrix
* into an n * n array representing the full symmetric matrix (column major).
* val symmetricValues = unpackUpperTriangularMatrix(n, triangularValues)
* val matrix = new DenseMatrix(n, n, symmetricValues)
* val index = indexUpperTriangularMatrix(n, i, j)
* then: symmetricValues(index) == matrix(i, j)
*
* @param n The order of the n by n matrix.
*/
def indexUpperTriangular(
n: Int,
i: Int,
j: Int): Int = {
require(i >= 0 && i < n, s"Expected 0 <= i < $n, got i = $i.")
require(j >= 0 && j < n, s"Expected 0 <= j < $n, got j = $j.")
if (i <= j) {
j * (j + 1) / 2 + i
} else {
i * (i + 1) / 2 + j
}
}
/**
* When `x` is positive and large, computing `math.log(1 + math.exp(x))` will lead to arithmetic
* overflow. This will happen when `x > 709.78` which is not a very large number.
* It can be addressed by rewriting the formula into `x + math.log1p(math.exp(-x))`
* when `x` is positive.
* @param x a floating-point value as input.
* @return the result of `math.log(1 + math.exp(x))`.
*/
def log1pExp(x: Double): Double = {
if (x > 0) {
x + math.log1p(math.exp(-x))
} else {
math.log1p(math.exp(x))
}
}
/**
* Perform in-place softmax conversion.
*/
def softmax(values: Array[Double]): Unit = {
var maxValue = Double.MinValue
var i = 0
while (i < values.length) {
val value = values(i)
if (value.isPosInfinity) {
java.util.Arrays.fill(values, 0)
values(i) = 1.0
return
} else if (value > maxValue) {
maxValue = value
}
i += 1
}
var sum = 0.0
i = 0
while (i < values.length) {
val exp = math.exp(values(i) - maxValue)
values(i) = exp
sum += exp
i += 1
}
BLAS.javaBLAS.dscal(values.length, 1.0 / sum, values, 1)
}
}
| maropu/spark | mllib-local/src/main/scala/org/apache/spark/ml/impl/Utils.scala | Scala | apache-2.0 | 3,897 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.load
import java.util
import com.univocity.parsers.common.TextParsingException
import org.apache.hadoop.conf.Configuration
import org.apache.spark.TaskContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{CarbonToSparkAdapter, Row}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.util.LongAccumulator
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException
import org.apache.carbondata.core.datastore.row.CarbonRow
import org.apache.carbondata.core.util.{CarbonProperties, ThreadLocalSessionInfo}
import org.apache.carbondata.processing.loading._
import org.apache.carbondata.processing.loading.converter.impl.RowConverterImpl
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException
import org.apache.carbondata.processing.loading.model.CarbonLoadModel
import org.apache.carbondata.processing.loading.parser.RowParser
import org.apache.carbondata.processing.loading.parser.impl.{RangeColumnParserImpl, RowParserImpl}
import org.apache.carbondata.processing.loading.row.CarbonRowBatch
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler
import org.apache.carbondata.processing.loading.steps.{DataWriterProcessorStepImpl, SortProcessorStepImpl}
import org.apache.carbondata.processing.sort.sortdata.SortParameters
import org.apache.carbondata.processing.store.{CarbonFactHandler, CarbonFactHandlerFactory}
import org.apache.carbondata.processing.util.{CarbonBadRecordUtil, CarbonDataProcessorUtil}
import org.apache.carbondata.spark.rdd.{NewRddIterator, StringArrayRow}
import org.apache.carbondata.spark.util.CommonUtil
object DataLoadProcessorStepOnSpark {
private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def toStringArrayRow(row: InternalRow, columnCount: Int): StringArrayRow = {
val outRow = new StringArrayRow(new Array[String](columnCount))
outRow.setValues(row.asInstanceOf[GenericInternalRow].values.asInstanceOf[Array[String]])
}
def toRDDIterator(
rows: Iterator[Row],
modelBroadcast: Broadcast[CarbonLoadModel]): Iterator[Array[AnyRef]] = {
new Iterator[Array[AnyRef]] {
val iter = new NewRddIterator(rows, modelBroadcast.value, TaskContext.get())
override def hasNext: Boolean = iter.hasNext
override def next(): Array[AnyRef] = iter.next
}
}
def inputFunc(
rows: Iterator[Array[AnyRef]],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
rowCounter: LongAccumulator): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val rowParser = new RowParserImpl(conf.getDataFields, conf)
val isRawDataRequired = CarbonDataProcessorUtil.isRawDataRequired(conf)
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
var row : CarbonRow = null
if(isRawDataRequired) {
val rawRow = rows.next()
row = new CarbonRow(rowParser.parseRow(rawRow), rawRow)
} else {
row = new CarbonRow(rowParser.parseRow(rows.next()))
}
rowCounter.add(1)
row
}
}
}
def inputFuncForCsvRows(
rows: Iterator[StringArrayRow],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
rowCounter: LongAccumulator): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val rowParser = new RowParserImpl(conf.getDataFields, conf)
val isRawDataRequired = CarbonDataProcessorUtil.isRawDataRequired(conf)
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
val rawRow = rows.next().values.asInstanceOf[Array[Object]]
val row = if (isRawDataRequired) {
new CarbonRow(rowParser.parseRow(rawRow), rawRow)
} else {
new CarbonRow(rowParser.parseRow(rawRow))
}
rowCounter.add(1)
row
}
}
}
def internalInputFunc(
rows: Iterator[InternalRow],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
rowCounter: Option[LongAccumulator],
rangeField: Option[DataField]): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val rowParser: RowParser = if (rangeField.isEmpty) {
new RowParserImpl(conf.getDataFields, conf)
} else {
new RangeColumnParserImpl(rangeField.get, conf)
}
val isRawDataRequired = CarbonDataProcessorUtil.isRawDataRequired(conf)
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
var row: CarbonRow = null
val rawRow =
rows.next().asInstanceOf[GenericInternalRow].values.asInstanceOf[Array[Object]]
if (isRawDataRequired) {
row = new CarbonRow(rowParser.parseRow(rawRow), rawRow)
} else {
row = new CarbonRow(rowParser.parseRow(rawRow))
}
if (rowCounter.isDefined) {
rowCounter.get.add(1)
}
row
}
}
}
def inputAndConvertFunc(
rows: Iterator[Array[AnyRef]],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
partialSuccessAccum: LongAccumulator,
rowCounter: LongAccumulator,
keepActualData: Boolean = false): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val rowParser = new RowParserImpl(conf.getDataFields, conf)
val isRawDataRequired = CarbonDataProcessorUtil.isRawDataRequired(conf)
val badRecordLogger = BadRecordsLoggerProvider.createBadRecordLogger(conf)
if (keepActualData) {
conf.getDataFields.foreach(_.setUseActualData(keepActualData))
}
val rowConverter = new RowConverterImpl(conf.getDataFields, conf, badRecordLogger)
rowConverter.initialize()
CarbonToSparkAdapter.addTaskCompletionListener {
val hasBadRecord: Boolean = CarbonBadRecordUtil.hasBadRecord(model)
close(conf, badRecordLogger, rowConverter)
GlobalSortHelper.badRecordsLogger(model, partialSuccessAccum, hasBadRecord)
}
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
val hasBadRecord : Boolean = CarbonBadRecordUtil.hasBadRecord(model)
close(conf, badRecordLogger, rowConverter)
GlobalSortHelper.badRecordsLogger(model, partialSuccessAccum, hasBadRecord)
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
var row : CarbonRow = null
if(isRawDataRequired) {
val rawRow = rows.next()
row = new CarbonRow(rowParser.parseRow(rawRow), rawRow)
} else {
row = new CarbonRow(rowParser.parseRow(rows.next()))
}
row = rowConverter.convert(row)
rowCounter.add(1)
row
}
}
}
def convertFunc(
rows: Iterator[CarbonRow],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
partialSuccessAccum: LongAccumulator,
rowCounter: LongAccumulator,
keepActualData: Boolean = false,
isCompactionFlow: Boolean = false): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val badRecordLogger = BadRecordsLoggerProvider.createBadRecordLogger(conf, isCompactionFlow)
if (keepActualData) {
conf.getDataFields.foreach(_.setUseActualData(keepActualData))
}
val rowConverter = new RowConverterImpl(conf.getDataFields, conf, badRecordLogger)
rowConverter.initialize()
CarbonToSparkAdapter.addTaskCompletionListener {
val hasBadRecord: Boolean = CarbonBadRecordUtil.hasBadRecord(model)
close(conf, badRecordLogger, rowConverter)
GlobalSortHelper.badRecordsLogger(model, partialSuccessAccum, hasBadRecord)
}
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
val hasBadRecord : Boolean = CarbonBadRecordUtil.hasBadRecord(model)
close(conf, badRecordLogger, rowConverter)
GlobalSortHelper.badRecordsLogger(model, partialSuccessAccum, hasBadRecord)
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
rowCounter.add(1)
rowConverter.convert(rows.next())
}
}
}
def sampleConvertFunc(
rows: Iterator[CarbonRow],
rangeField: DataField,
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel]
): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val badRecordLogger = BadRecordsLoggerProvider.createBadRecordLogger(conf)
val rowConverter = new RowConverterImpl(Array(rangeField), conf, badRecordLogger)
rowConverter.initialize()
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
rowConverter.convert(rows.next())
}
}
}
def close(conf: CarbonDataLoadConfiguration,
badRecordLogger: BadRecordsLogger,
rowConverter: RowConverterImpl): Unit = {
if (badRecordLogger != null) {
badRecordLogger.closeStreams()
CarbonBadRecordUtil.renameBadRecord(conf)
}
if (rowConverter != null) {
rowConverter.finish()
}
}
def convertTo3Parts(
rows: Iterator[CarbonRow],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
rowCounter: LongAccumulator): Iterator[CarbonRow] = {
val model: CarbonLoadModel = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model)
val sortParameters = SortParameters.createSortParameters(conf)
val sortStepRowHandler = new SortStepRowHandler(sortParameters)
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
val row =
new CarbonRow(sortStepRowHandler.convertRawRowTo3Parts(rows.next().getData))
rowCounter.add(1)
row
}
}
}
def convertTo3PartsFromObjectArray(
rows: Iterator[Array[AnyRef]],
index: Int,
model: CarbonLoadModel,
rowCounter: LongAccumulator): Iterator[CarbonRow] = {
val conf = DataLoadProcessBuilder.createConfiguration(model)
val sortParameters = SortParameters.createSortParameters(conf)
val sortStepRowHandler = new SortStepRowHandler(sortParameters)
TaskContext.get().addTaskFailureListener { (t: TaskContext, e: Throwable) =>
wrapException(e, model)
}
new Iterator[CarbonRow] {
override def hasNext: Boolean = rows.hasNext
override def next(): CarbonRow = {
val row =
new CarbonRow(sortStepRowHandler.convertRawRowTo3Parts(rows.next()))
rowCounter.add(1)
row
}
}
}
def writeFunc(
rows: Iterator[CarbonRow],
index: Int,
model: CarbonLoadModel,
rowCounter: LongAccumulator,
conf: Configuration): Unit = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME,
conf.get(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME))
ThreadLocalSessionInfo.setConfigurationToCurrentThread(conf)
var tableName: String = null
var dataWriter: DataWriterProcessorStepImpl = null
try {
val storeLocation = CommonUtil.getTempStoreLocations(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model, storeLocation)
tableName = model.getTableName
dataWriter = new DataWriterProcessorStepImpl(conf)
val dataHandlerModel = dataWriter.getDataHandlerModel
var dataHandler: CarbonFactHandler = null
var rowsNotExist = true
while (rows.hasNext) {
if (rowsNotExist) {
rowsNotExist = false
dataHandler = CarbonFactHandlerFactory.createCarbonFactHandler(dataHandlerModel)
dataHandler.initialise()
}
val row = dataWriter.processRow(rows.next(), dataHandler)
rowCounter.add(1)
row
}
if (!rowsNotExist) {
dataWriter.finish(dataHandler)
}
} catch {
case e: CarbonDataWriterException =>
LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
throw new CarbonDataLoadingException("Error while initializing data handler : " +
e.getMessage)
case e: Exception =>
LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage, e)
} finally {
// close the dataWriter once the write in done success or fail. if not closed then thread to
// to prints the rows processed in each step for every 10 seconds will never exit.
if (null != dataWriter) {
dataWriter.close()
}
// clean up the folders and files created locally for data load operation
TableProcessingOperations.deleteLocalDataLoadFolderLocation(model, false, false)
}
}
private def wrapException(e: Throwable, model: CarbonLoadModel): Unit = {
e match {
case e: CarbonDataLoadingException => throw e
case e: TextParsingException =>
LOGGER.error("Data Loading failed for table " + model.getTableName, e)
throw new CarbonDataLoadingException("Data Loading failed for table " + model.getTableName,
e)
case e: Exception =>
LOGGER.error("Data Loading failed for table " + model.getTableName, e)
throw new CarbonDataLoadingException("Data Loading failed for table " + model.getTableName,
e)
}
}
def sortAndWriteFunc(
rows: Iterator[CarbonRow],
index: Int,
modelBroadcast: Broadcast[CarbonLoadModel],
rowCounter: LongAccumulator,
conf: Configuration) {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME,
conf.get(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME))
ThreadLocalSessionInfo.setConfigurationToCurrentThread(conf)
var model: CarbonLoadModel = null
var tableName: String = null
var inputProcessor: NewInputProcessorStepImpl = null
var sortProcessor: SortProcessorStepImpl = null
var dataWriter: DataWriterProcessorStepImpl = null
try {
model = modelBroadcast.value.getCopyWithTaskNo(index.toString)
val storeLocation = CommonUtil.getTempStoreLocations(index.toString)
val conf = DataLoadProcessBuilder.createConfiguration(model, storeLocation)
tableName = model.getTableName
inputProcessor = new NewInputProcessorStepImpl(conf, rows)
sortProcessor = new SortProcessorStepImpl(conf, inputProcessor)
dataWriter = new DataWriterProcessorStepImpl(conf, sortProcessor)
dataWriter.initialize()
dataWriter.execute()
} catch {
case e: CarbonDataWriterException =>
LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
throw new CarbonDataLoadingException("Error while initializing data handler : " +
e.getMessage)
case e: Exception =>
LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage, e)
} finally {
// close the dataWriter once the write in done success or fail. if not closed then thread to
// to prints the rows processed in each step for every 10 seconds will never exit.
if (null != dataWriter) {
dataWriter.close()
}
// clean up the folders and files created locally for data load operation
TableProcessingOperations.deleteLocalDataLoadFolderLocation(model, false, false)
}
}
}
class NewInputProcessorStepImpl(configuration: CarbonDataLoadConfiguration,
rows: Iterator[CarbonRow]) extends AbstractDataLoadProcessorStep(configuration, null) {
/**
* Transform the data as per the implementation.
*
* @return Array of Iterator with data. It can be processed parallel if implementation class wants
* @throws CarbonDataLoadingException
*/
override def execute(): Array[util.Iterator[CarbonRowBatch]] = {
val batchSize = CarbonProperties.getInstance.getBatchSize
val iteratorArray = new Array[util.Iterator[CarbonRowBatch]](1)
iteratorArray(0) = new util.Iterator[CarbonRowBatch] {
val rowBatch = new CarbonRowBatch(batchSize) {
var count = 0
override def next(): CarbonRow = {
count = count + 1
rows.next()
}
override def hasNext: Boolean = rows.hasNext && count < batchSize
def reset(): Unit = {
count = 0
}
}
override def next(): CarbonRowBatch = {
rowBatch.reset()
rowBatch
}
override def hasNext: Boolean = {
rows.hasNext
}
}
iteratorArray
}
/**
* Get the step name for logging purpose.
*
* @return Step name
*/
override protected def getStepName: String = {
"Input Processor for RANGE_SORT"
}
}
| zzcclp/carbondata | integration/spark/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala | Scala | apache-2.0 | 19,417 |
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.tensors.ops
import org.platanios.tensorflow.api.core.Shape
import org.platanios.tensorflow.api.core.exception.InvalidShapeException
import org.platanios.tensorflow.api.core.types._
import org.platanios.tensorflow.api.implicits.Implicits._
import org.platanios.tensorflow.api.tensors._
import org.platanios.tensorflow.api.utilities.DefaultsTo.IntDefault
import org.platanios.tensorflow.jni.generated.tensors.{Math => NativeTensorOpsMath}
/** Contains functions for executing general math-related ops.
*
* @author Emmanouil Antonios Platanios
*/
trait Math {
/** $OpDocMathSelect
*
* @group MathOps
* @param condition Condition tensor.
* @param x Tensor which may have the same shape as `condition`. If `condition` has rank `1`, then `t` may
* have a higher rank, but its first dimension must match the size of `condition`.
* @param y Tensor with the same data type and shape as `t`.
* @return Result as a new tensor.
*/
def select[T: TF](
condition: Tensor[Boolean],
x: Tensor[T],
y: Tensor[T]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.select(
executionContext.value.nativeHandle, condition.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathRange
*
* @group MathOps
* @param start Rank 0 (i.e., scalar) tensor that contains the starting value of the number sequence.
* @param limit Rank 0 (i.e., scalar) tensor that contains the ending value (exclusive) of the number sequence.
* @param delta Rank 0 (i.e., scalar) tensor that contains the difference between consecutive numbers in the
* sequence.
* @return Result as a new tensor.
*/
def range[T: TF : IsNumeric](
start: Tensor[T],
limit: Tensor[T],
delta: Tensor[T] = null
): Tensor[T] = {
val deltaWithDefault = if (delta == null) Tensor.ones(start.dataType, Shape()) else delta
Tensor.fromNativeHandle[T](NativeTensorOpsMath.range(
executionContext.value.nativeHandle, start.nativeHandle, limit.nativeHandle,
deltaWithDefault.nativeHandle))
}
/** $OpDocMathLinspace
*
* @group MathOps
* @param start Rank 0 (i.e., scalar) tensor that contains the starting value of the number sequence.
* @param stop Rank 0 (i.e., scalar) tensor that contains the ending value (inclusive) of the number
* sequence.
* @param numberOfValues Rank 0 (i.e., scalar) tensor that contains the number of values in the number sequence.
* @return Result as a new tensor.
*/
def linspace[T: TF : IsTruncatedHalfOrFloatOrDouble, I: TF : IsIntOrLong](
start: Tensor[T],
stop: Tensor[T],
numberOfValues: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.linSpace(
executionContext.value.nativeHandle, start.nativeHandle, stop.nativeHandle, numberOfValues.nativeHandle))
}
/** $OpDocMathAddN
*
* @group MathOps
* @param inputs Input tensors.
* @return Result as a new tensor.
*/
def addN[T: TF : IsNumeric](inputs: Seq[Tensor[T]]): Tensor[T] = {
if (inputs.length == 1)
inputs.head
else
Tensor.fromNativeHandle[T](NativeTensorOpsMath.addN(
executionContext.value.nativeHandle, inputs.map(_.nativeHandle).toArray))
}
// TODO: [OPS] accumulateN
//region Unary Ops
/** $OpDocMathAbs
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def abs[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
if (x.dataType.isComplex) {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.complexAbs(
executionContext.value.nativeHandle, t.nativeHandle, x.dataType.cValue))
})
} else {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.abs(executionContext.value.nativeHandle, t.nativeHandle))
})
}
}
/** $OpDocMathNegate
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def negate[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.neg(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathReciprocal
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def reciprocal[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.reciprocal(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathSquare
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def square[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.square(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathSqrt
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def sqrt[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sqrt(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathRsqrt
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def rsqrt[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.rsqrt(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathExp
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def exp[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.exp(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathExpm1
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def expm1[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.expm1(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathLog
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def log[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.log(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathLog1p
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def log1p[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.log1p(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathSin
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def sin[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sin(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathCos
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def cos[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.cos(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathTan
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def tan[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.tan(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathAsin
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def asin[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.asin(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathAcos
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def acos[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.acos(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathAtan
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def atan[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.atan(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathSinh
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def sinh[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sinh(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathCosh
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def cosh[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.cosh(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathTanh
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def tanh[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.tanh(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathAsinh
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def asinh[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.asinh(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathAcosh
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def acosh[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.acosh(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathAtanh
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def atanh[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.atanh(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathLogGamma
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def logGamma[T: TF : IsFloatOrDouble, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.lgamma(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathDigamma
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def digamma[T: TF : IsFloatOrDouble, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.digamma(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathErf
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def erf[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.erf(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathErfc
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def erfc[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.erfc(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathSigmoid
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def sigmoid[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sigmoid(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathLogSigmoid
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def logSigmoid[T: TF : IsReal, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
negate(NN.softplus(negate(x)))
}
/** $OpDocMathSign
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def sign[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sign(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathRound
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def round[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](x: TL[T])(implicit
ev: TensorOps.Aux[TL, T]
): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.round(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathRoundInt
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def roundInt[T: TF : IsHalfOrFloatOrDouble, TL[A] <: TensorLike[A]](
x: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.rint(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathFloor
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def floor[T: TF : IsHalfOrFloatOrDouble, TL[A] <: TensorLike[A]](
x: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.floor(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathCeil
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def ceil[T: TF : IsHalfOrFloatOrDouble, TL[A] <: TensorLike[A]](
x: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[T] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.ceil(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathIsNaN
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def isNaN[T: TF : IsHalfOrFloatOrDouble, TL[A] <: TensorLike[A]](
x: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[Boolean] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.isNan(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathIsInf
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def isInf[T: TF : IsHalfOrFloatOrDouble, TL[A] <: TensorLike[A]](
x: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[Boolean] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.isInf(executionContext.value.nativeHandle, t.nativeHandle))
})
}
/** $OpDocMathIsFinite
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def isFinite[T: TF : IsHalfOrFloatOrDouble, TL[A] <: TensorLike[A]](
x: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[Boolean] = {
ev.applyUnary(x, t => {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.isFinite(
executionContext.value.nativeHandle, t.nativeHandle))
})
}
//endregion Unary Ops
//region Binary Ops
/** $OpDocMathAdd
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def add[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.add(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathSubtract
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def subtract[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sub(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathMultiply
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def multiply[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.mul(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathDivide
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def divide[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.div(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathFloorDivide
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
@deprecated("Use `truncateDivide` instead.", "0.1")
def floorDivide[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.floorDiv(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathTruncateDivide
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def truncateDivide[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.truncateDiv(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathRealDivide
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def realDivide[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.realDiv(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathSquaredDifference
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def squaredDifference[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.squaredDifference(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathMod
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def mod[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.mod(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathFloorMod
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def floorMod[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.floorMod(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathTruncateMod
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def truncateMod[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.truncateMod(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathPow
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def pow[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.pow(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
// TODO: !!! [TENSORS] Fix this.
/** $OpDocMathIgammac
*
* @group MathOps
* @param a First input tensor.
* @param x Second input tensor.
* @return Result as a new tensor.
*/
def igammac[T: TF : IsFloatOrDouble](a: Tensor[T], x: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.igammac(
executionContext.value.nativeHandle, a.nativeHandle, x.nativeHandle))
}
/** $OpDocMathIgamma
*
* @group MathOps
* @param a First input tensor.
* @param x Second input tensor.
* @return Result as a new tensor.
*/
def igamma[T: TF : IsFloatOrDouble](a: Tensor[T], x: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.igamma(
executionContext.value.nativeHandle, a.nativeHandle, x.nativeHandle))
}
/** $OpDocMathZeta
*
* @group MathOps
* @param x First input tensor.
* @param q Second input tensor.
* @return Result as a new tensor.
*/
def zeta[T: TF : IsFloatOrDouble](x: Tensor[T], q: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.zeta(
executionContext.value.nativeHandle, x.nativeHandle, q.nativeHandle))
}
/** $OpDocMathPolygamma
*
* @group MathOps
* @param n First input tensor.
* @param x Second input tensor.
* @return Result as a new tensor.
*/
def polygamma[T: TF : IsFloatOrDouble](n: Tensor[T], x: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.polygamma(
executionContext.value.nativeHandle, n.nativeHandle, x.nativeHandle))
}
/** $OpDocMathAtan2
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def atan2[T: TF : IsFloatOrDouble](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.atan2(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathMinimum
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def minimum[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.minimum(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathMaximum
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def maximum[T: TF : IsNotQuantized](x: Tensor[T], y: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.maximum(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
//endregion Binary Ops
/** $OpDocMathIncompleteBeta
*
* @group MathOps
* @param a First input tensor.
* @param b Second input tensor.
* @param x Third input tensor.
* @return Result as a new tensor.
*/
def incompleteBeta[T: TF : IsFloatOrDouble](a: Tensor[T], b: Tensor[T], x: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.betainc(
executionContext.value.nativeHandle, a.nativeHandle, b.nativeHandle, x.nativeHandle))
}
//region Logical Ops
/** $OpDocMathLogicalNot
*
* @group MathOps
* @param x Input tensor.
* @return Result as a new tensor.
*/
def logicalNot(x: Tensor[Boolean]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.logicalNot(
executionContext.value.nativeHandle, x.nativeHandle))
}
/** $OpDocMathLogicalAnd
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def logicalAnd(x: Tensor[Boolean], y: Tensor[Boolean]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.logicalAnd(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathLogicalOr
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def logicalOr(x: Tensor[Boolean], y: Tensor[Boolean]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.logicalOr(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathLogicalXOr
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def logicalXOr(x: Tensor[Boolean], y: Tensor[Boolean]): Tensor[Boolean] = {
logicalAnd(logicalOr(x, y), logicalNot(logicalAnd(x, y)))
}
//endregion Logical Ops
//region Comparison Ops
/** $OpDocMathEqual
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def equal[T: TF](x: Tensor[T], y: Tensor[T]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.equal(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathNotEqual
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def notEqual[T: TF](x: Tensor[T], y: Tensor[T]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.notEqual(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** $OpDocMathApproximatelyEqual
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @param tolerance Comparison tolerance value.
* @return Result as a new tensor.
*/
def approximatelyEqual[T: TF : IsNumeric](
x: Tensor[T],
y: Tensor[T],
tolerance: Float = 0.00001f
): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.approximateEqual(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle, tolerance))
}
/** OpDocMathLess
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def less[T: TF : IsNumeric](x: Tensor[T], y: Tensor[T]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.less(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** OpDocMathLessEqual
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def lessEqual[T: TF : IsNumeric](x: Tensor[T], y: Tensor[T]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.lessEqual(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** OpDocMathGreater
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def greater[T: TF : IsNumeric](x: Tensor[T], y: Tensor[T]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.greater(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
/** OpDocMathGreaterEqual
*
* @group MathOps
* @param x First input tensor.
* @param y Second input tensor.
* @return Result as a new tensor.
*/
def greaterEqual[T: TF : IsNumeric](x: Tensor[T], y: Tensor[T]): Tensor[Boolean] = {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.greaterEqual(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle))
}
//endregion Comparison Ops
//region Reduction Ops
private def defaultReductionAxes[T: TF, TL[A] <: TensorLike[A]](
tensorLike: TL[T]
): Tensor[Int] = {
// Fast path: Avoid creating range and rank ops if the rank is known statically.
val reductionAxes = tensorLike match {
case t: Tensor[T] if t.rank > -1 => (0 until t.rank).toTensor
case t: TensorIndexedSlices[T] if t.denseShape.shape.isFullyDefined =>
(0 until t.denseShape.shape(0)).toTensor
case t: SparseTensor[T] if t.denseShape.shape.isFullyDefined =>
(0 until t.denseShape.shape(0)).toTensor
case _ => // Otherwise, we rely on range and rank to do the right thing at run-time.
range(0, Basic.rank(tensorLike))
}
reductionAxes
}
/** $OpDocMathSum
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def sum[T: TF : IsNumeric, I: IntDefault : TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[T] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sum(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sum(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathMean
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def mean[T: TF : IsNumeric, I: IntDefault : TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[T] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.mean(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.mean(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathProd
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def prod[T: TF : IsNotQuantized, I: IntDefault : TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[T] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.prod(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.prod(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathMin
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def min[T: TF : IsNotQuantized, I: IntDefault : TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[T] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.min(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.min(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathMax
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def max[T: TF : IsNotQuantized, I: IntDefault : TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[T] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.max(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.max(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathAll
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def all[I: IntDefault : TF : IsIntOrLong](
input: Tensor[Boolean],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[Boolean] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.all(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.all(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathAny
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def any[I: IntDefault : TF : IsIntOrLong](
input: Tensor[Boolean],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[Boolean] = {
if (input.rank == 0) {
input
} else if (axes == null) {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.any(
executionContext.value.nativeHandle, input.nativeHandle, defaultReductionAxes(input).nativeHandle, keepDims))
} else {
Tensor.fromNativeHandle[Boolean](NativeTensorOpsMath.any(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, keepDims))
}
}
/** $OpDocMathLogSumExp
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Integer sequence containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def logSumExp[T: TF : IsNotQuantized](
input: Tensor[T],
axes: Seq[Int] = null,
keepDims: Boolean = false
): Tensor[T] = {
if (input.rank == 0) {
input
} else {
val axesTensor: Tensor[Int] = axes
val maxValue = Basic.stopGradient(max(input, axesTensor, keepDims = true))
val result = add(log(sum(exp(subtract(input, maxValue)), axesTensor, keepDims = true)), maxValue)
if (keepDims)
result
else
Basic.squeeze(result, axes)
}
}
/** $OpDocMathCountNonZero
*
* @group MathOps
* @param input Input tensor to reduce.
* @param axes Tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def countNonZero[T: TF : IsNumeric, I: IntDefault : TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I] = null,
keepDims: Boolean = false
): Tensor[Long] = {
sum(notEqual(input, Tensor.zeros(input.dataType, Shape())).castTo[Long], axes, keepDims)
}
/** $OpDocMathArgmin
*
* @group MathOps
* @param input Input tensor.
* @param axes Integer tensor containing the axes to reduce.
* @return Result as a new tensor.
*/
def argmin[T: TF : IsNotQuantized, I: TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I]
): Tensor[Long] = {
argmin(input, axes, INT64)
}
/** $OpDocMathArgmin
*
* @group MathOps
* @param input Input tensor.
* @param axes Integer tensor containing the axes to reduce.
* @param outputDataType Data type for the output tensor.
* @return Result as a new tensor.
*/
def argmin[T: TF : IsNotQuantized, I: TF : IsIntOrLong, IR: TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I],
outputDataType: DataType[IR]
): Tensor[IR] = {
Tensor.fromNativeHandle[IR](NativeTensorOpsMath.argMin(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, outputDataType.cValue))
}
/** $OpDocMathArgmax
*
* @group MathOps
* @param input Input tensor.
* @param axes Integer tensor containing the axes to reduce.
* @return Result as a new tensor.
*/
def argmax[T: TF : IsNotQuantized, I: TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I]
): Tensor[Long] = {
argmax(input, axes, INT64)
}
/** $OpDocMathArgmax
*
* @group MathOps
* @param input Input tensor.
* @param axes Integer tensor containing the axes to reduce.
* @param outputDataType Data type for the output tensor.
* @return Result as a new tensor.
*/
def argmax[T: TF : IsNotQuantized, I: TF : IsIntOrLong, IR: TF : IsIntOrLong](
input: Tensor[T],
axes: Tensor[I],
outputDataType: DataType[IR]
): Tensor[IR] = {
Tensor.fromNativeHandle[IR](NativeTensorOpsMath.argMax(
executionContext.value.nativeHandle, input.nativeHandle, axes.nativeHandle, outputDataType.cValue))
}
/** $OpDocMathCumsum
*
* @group MathOps
* @param input Input tensor.
* @param axis Tensor containing the axis along which to perform the cumulative sum.
* @param exclusive Boolean value indicating whether to perform an exclusive cumulative sum.
* @param reverse Boolean value indicating whether to perform a reverse cumulative sum.
* @return Result as a new tensor.
*/
def cumsum[T: TF : IsNotQuantized, I: TF : IsIntOrLong](
input: Tensor[T],
axis: Tensor[I],
exclusive: Boolean = false,
reverse: Boolean = false
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.cumsum(
executionContext.value.nativeHandle, input.nativeHandle, axis.nativeHandle, exclusive, reverse))
}
/** $OpDocMathCumprod
*
* @group MathOps
* @param input Input tensor.
* @param axis Tensor containing the axis along which to perform the cumulative product.
* @param exclusive Boolean value indicating whether to perform an exclusive cumulative product.
* @param reverse Boolean value indicating whether to perform a reverse cumulative product.
* @return Result as a new tensor.
*/
def cumprod[T: TF : IsNotQuantized, I: TF : IsIntOrLong](
input: Tensor[T],
axis: Tensor[I],
exclusive: Boolean = false,
reverse: Boolean = false
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.cumprod(
executionContext.value.nativeHandle, input.nativeHandle, axis.nativeHandle, exclusive, reverse))
}
//endregion Reduction Ops
/** $OpDocMathBinCount
*
* @group MathOps
* @param input Tensor containing non-negative values.
* @param dataType If `weights` is `null`, this determines the data type used for the output tensor (i.e., the
* tensor containing the bin counts).
* @param weights If not `null`, this tensor must have the same shape as `input`. For each value in `input`, the
* corresponding bin count will be incremented by the corresponding weight instead of `1`.
* @param minLength If not `null`, this ensures the output has length at least `minLength`, padding with zeros at
* the end, if necessary.
* @param maxLength If not `null`, this skips values in `input` that are equal or greater than `maxLength`,
* ensuring that the output has length at most `maxLength`.
* @return Result as a new tensor.
*/
def binCount[T: TF : IsIntOrLongOrFloatOrDouble](
input: Tensor[Int],
dataType: DataType[T],
weights: Tensor[T] = null,
minLength: Tensor[Int] = null,
maxLength: Tensor[Int] = null
): Tensor[T] = {
val inputNonEmpty = greater(prod(Basic.shape(input).toInt), 0)
var outputSize = inputNonEmpty.toInt * add(max(input), Tensor.ones[Int](Shape()))
if (minLength != null)
outputSize = maximum(minLength, outputSize)
if (maxLength != null)
outputSize = minimum(maxLength, outputSize)
val effectiveWeights = {
if (weights != null) {
weights
} else if (dataType == null) {
Tensor.zeros[Int](Shape.scalar())
} else {
Tensor.zeros(dataType, Shape.scalar())
}
}
Tensor.fromNativeHandle[T](NativeTensorOpsMath.bincount(
executionContext.value.nativeHandle, input.nativeHandle, outputSize.nativeHandle, effectiveWeights.nativeHandle))
}
//region Segment Ops
/** $OpDocMathSegmentSum
*
* @group MathOps
* @param data Data (must have a numeric data type -- i.e., representing a number).
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentSum[T: TF : IsNumeric, I: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.segmentSum(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle))
}
/** $OpDocMathSegmentMean
*
* @group MathOps
* @param data Data (must have a numeric data type -- i.e., representing a number).
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentMean[T: TF : IsNotQuantized, I: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.segmentMean(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle))
}
/** $OpDocMathSegmentProd
*
* @group MathOps
* @param data Data (must have a numeric data type -- i.e., representing a number).
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentProd[T: TF : IsNumeric, I: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.segmentProd(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle))
}
/** $OpDocMathSegmentMin
*
* @group MathOps
* @param data Data (must have a numeric data type -- i.e., representing a number).
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentMin[T: TF : IsReal, I: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.segmentMin(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle))
}
/** $OpDocMathSegmentMax
*
* @group MathOps
* @param data Data (must have a numeric data type -- i.e., representing a number).
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentMax[T: TF : IsReal, I: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.segmentMax(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle))
}
/** $OpDocMathUnsortedSegmentSum
*
* @group MathOps
* @param data Data tensor.
* @param segmentIndices Segment indices.
* @param segmentsNumber Number of segments.
* @return Result as a new tensor.
*/
def unsortedSegmentSum[T: TF : IsNumeric, I1: TF : IsIntOrLong, I2: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I1],
segmentsNumber: Tensor[I2]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.unsortedSegmentSum(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle, segmentsNumber.nativeHandle))
}
// TODO: [TENSORS] Missing 'unsortedSegmentMean'.
// TODO: [TENSORS] Missing 'unsortedSegmentProd'.
// TODO: [TENSORS] Missing 'unsortedSegmentMin'.
/** $OpDocMathUnsortedSegmentMax
*
* @group MathOps
* @param data Data tensor.
* @param segmentIndices Segment indices.
* @param segmentsNumber Number of segments.
* @return Result as a new tensor.
*/
def unsortedSegmentMax[T: TF : IsReal, I1: TF : IsIntOrLong, I2: TF : IsIntOrLong](
data: Tensor[T],
segmentIndices: Tensor[I1],
segmentsNumber: Tensor[I2]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.unsortedSegmentMax(
executionContext.value.nativeHandle, data.nativeHandle, segmentIndices.nativeHandle, segmentsNumber.nativeHandle))
}
// TODO: [TENSORS] Missing 'unsortedSegmentSqrtN'.
/** $OpDocMathSparseSegmentSum
*
* @group MathOps
* @param data Data tensor.
* @param indices One-dimensional tensor with rank equal to that of `segmentIndices`.
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @param numSegments Optional scalar indicating the size of the output tensor.
* @return Result as a new tensor.
*/
def sparseSegmentSum[T: TF : IsReal, I1: TF : IsIntOrLong, I2: IntDefault : TF : IsIntOrLong](
data: Tensor[T],
indices: Tensor[I1],
segmentIndices: Tensor[Int],
numSegments: Tensor[I2] = null
): Tensor[T] = {
if (numSegments == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sparseSegmentSum(
executionContext.value.nativeHandle, data.nativeHandle, indices.nativeHandle, segmentIndices.nativeHandle))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sparseSegmentSumWithNumSegments(
executionContext.value.nativeHandle, data.nativeHandle, indices.nativeHandle, segmentIndices.nativeHandle,
numSegments.nativeHandle))
}
}
/** $OpDocMathSparseSegmentMean
*
* @group MathOps
* @param data Data tensor.
* @param indices One-dimensional tensor with rank equal to that of `segmentIndices`.
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @param numSegments Optional scalar indicating the size of the output tensor.
* @return Result as a new tensor.
*/
def sparseSegmentMean[T: TF : IsReal, I1: TF : IsIntOrLong, I2: IntDefault : TF : IsIntOrLong](
data: Tensor[T],
indices: Tensor[I1],
segmentIndices: Tensor[Int],
numSegments: Tensor[I2] = null
): Tensor[T] = {
if (numSegments == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sparseSegmentMean(
executionContext.value.nativeHandle, data.nativeHandle, indices.nativeHandle, segmentIndices.nativeHandle))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sparseSegmentMeanWithNumSegments(
executionContext.value.nativeHandle, data.nativeHandle, indices.nativeHandle, segmentIndices.nativeHandle,
numSegments.nativeHandle))
}
}
/** $OpDocMathSparseSegmentSumSqrtN
*
* @group MathOps
* @param data Data tensor.
* @param indices One-dimensional tensor with rank equal to that of `segmentIndices`.
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @param numSegments Optional scalar indicating the size of the output tensor.
* @return Result as a new tensor.
*/
def sparseSegmentSumSqrtN[T: TF : IsReal, I1: TF : IsIntOrLong, I2: IntDefault : TF : IsIntOrLong](
data: Tensor[T],
indices: Tensor[I1],
segmentIndices: Tensor[Int],
numSegments: Tensor[I2] = null
): Tensor[T] = {
if (numSegments == null) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sparseSegmentSqrtN(
executionContext.value.nativeHandle, data.nativeHandle, indices.nativeHandle, segmentIndices.nativeHandle))
} else {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.sparseSegmentSqrtNWithNumSegments(
executionContext.value.nativeHandle, data.nativeHandle, indices.nativeHandle, segmentIndices.nativeHandle,
numSegments.nativeHandle))
}
}
//endregion Segment Ops
//region Matrix Ops
/** $OpDocMathDiag
*
* @group MathOps
* @param diagonal Diagonal values, represented as a rank-`K` tensor, where `K` can be at most `3`.
* @return Result as a new tensor.
*/
def diag[T: TF : IsNotQuantized](diagonal: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.diag(
executionContext.value.nativeHandle, diagonal.nativeHandle))
}
/** $OpDocMathDiagPart
*
* @group MathOps
* @param input Rank-`K` input tensor, where `K` is either `2`, `4`, or `6`.
* @return Result as a new tensor.
*/
def diagPart[T: TF : IsNotQuantized](input: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.diagPart(
executionContext.value.nativeHandle, input.nativeHandle))
}
/** $OpDocMathMatrixDiag
*
* @group MathOps
* @param diagonal Rank-`K` input tensor, where `K >= 1`.
* @return Result as a new tensor with rank equal to `K + 1` and shape equal to the shape of `diagonal`, with its
* last dimension duplicated.
*/
def matrixDiag[T: TF](diagonal: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.matrixDiag(
executionContext.value.nativeHandle, diagonal.nativeHandle))
}
/** $OpDocMathMatrixSetDiag
*
* @group MathOps
* @param input Rank-`K+1` tensor, where `K >= 2`.
* @param diagonal Rank-`K` tensor, where `K >= 1`.
* @return Result as a new tensor with rank equal to `K + 1` and shape equal to the shape of `input`.
*/
def matrixSetDiag[T: TF](input: Tensor[T], diagonal: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle(NativeTensorOpsMath.matrixSetDiag(
executionContext.value.nativeHandle, input.nativeHandle, diagonal.nativeHandle))
}
/** $OpDocMathMatrixDiagPart
*
* @group MathOps
* @param input Rank-`K` tensor, where `K >= 2`.
* @return Result as a new tensor containing the diagonal(s) and having shape equal to
* `input.shape[:-2] + [min(input.shape[-2:])]`.
*/
def matrixDiagPart[T: TF](input: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.matrixDiagPart(
executionContext.value.nativeHandle, input.nativeHandle))
}
/** $OpDocMathMatrixBandPart
*
* @group MathOps
* @param input Input tensor.
* @param numSubDiagonals Scalar tensor that contains the number of sub-diagonals to keep. If negative,
* the entire lower triangle is kept.
* @param numSuperDiagonals Scalar tensor that contains the number of super-diagonals to keep. If negative,
* the entire upper triangle is kept.
* @return Result as a new tensor containing the expected banded tensor and has rank `K` and same shape as `input`.
*/
def matrixBandPart[T: TF, I: TF : IsIntOrLong](
input: Tensor[T],
numSubDiagonals: Tensor[I],
numSuperDiagonals: Tensor[I]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.matrixBandPart(
executionContext.value.nativeHandle, input.nativeHandle, numSubDiagonals.nativeHandle,
numSuperDiagonals.nativeHandle))
}
/** $OpDocMathTrace
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def trace[T: TF : IsNumeric](input: Tensor[T]): Tensor[T] = {
sum(matrixDiagPart(input), axes = -1)
}
/** $OpDocMathScalarMul
*
* @group MathOps
* @param scalar Scalar tensor.
* @param tensor Tensor to multiply the scalar tensor with.
* @return Result as a new tensor.
*/
def scalarMul[T: TF : IsNotQuantized, TL[A] <: TensorLike[A]](
scalar: Tensor[T],
tensor: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[T] = {
ev.applyUnary(tensor, t => multiply(scalar, t))
}
// TODO: [TENSORS] The following type constraints are wrong.
/** $OpDocMathMatmul
*
* @group MathOps
* @param a First input tensor.
* @param b Second input tensor.
* @param transposeA If `true`, `a` is transposed before the multiplication.
* @param transposeB If `true`, `b` is transposed before the multiplication.
* @param conjugateA If `true`, `a` is conjugated before the multiplication.
* @param conjugateB If `true`, `b` is conjugated before the multiplication.
* @param aIsSparse If `true`, `a` is treated as a sparse matrix (i.e., it is assumed it contains many zeros).
* @param bIsSparse If `true`, `b` is treated as a sparse matrix (i.e., it is assumed it contains many zeros).
* @return Result as a new tensor.
*/
def matmul[T: TF : IsNotQuantized](
a: Tensor[T],
b: Tensor[T],
transposeA: Boolean = false,
transposeB: Boolean = false,
conjugateA: Boolean = false,
conjugateB: Boolean = false,
aIsSparse: Boolean = false,
bIsSparse: Boolean = false
): Tensor[T] = {
val sparseMatMulDataTypes = Set[DataType[_]](BFLOAT16, FLOAT32)
if (!aIsSparse && !bIsSparse && (a.rank == -1 || a.rank > 2) && (b.rank == -1 || b.rank > 2)) {
// "BatchMatMul" does not support transpose, so we conjugate the matrix and use adjoint instead.
// The "conj" op is a no-op for real matrices.
val (x, adjointX) = transposeConjugateToAdjoint(a, transposeA, conjugateA)
val (y, adjointY) = transposeConjugateToAdjoint(b, transposeB, conjugateB)
Tensor.fromNativeHandle(NativeTensorOpsMath.batchMatMul(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle, adjointX, adjointY))
} else if ((aIsSparse || bIsSparse) &&
sparseMatMulDataTypes.contains(a.dataType) &&
sparseMatMulDataTypes.contains(b.dataType)) {
val (x, transposeX) = transposeConjugateToTranspose(a, transposeA, conjugateA)
val (y, transposeY) = transposeConjugateToTranspose(b, transposeB, conjugateB)
Tensor.fromNativeHandle(NativeTensorOpsMath.sparseMatMul(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle, transposeX, transposeY,
aIsSparse, bIsSparse))
} else {
val (x, transposeX) = transposeConjugateToTranspose(a, transposeA, conjugateA)
val (y, transposeY) = transposeConjugateToTranspose(b, transposeB, conjugateB)
Tensor.fromNativeHandle(NativeTensorOpsMath.matMul(
executionContext.value.nativeHandle, x.nativeHandle, y.nativeHandle, transposeX, transposeY))
}
}
private[this] def transposeConjugateToAdjoint[T: TF : IsNotQuantized](
tensor: Tensor[T],
transpose: Boolean,
conj: Boolean
): (Tensor[T], Boolean) = {
// TODO: [TYPES] These runtime checks are not elegant.
(transpose, conj) match {
case (false, false) => (tensor, false)
case (false, true) if tensor.dataType == COMPLEX64 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexFloat]]).asInstanceOf[Tensor[T]], false)
case (false, true) if tensor.dataType == COMPLEX128 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexDouble]]).asInstanceOf[Tensor[T]], false)
case (false, true) => (tensor, false)
case (true, false) if tensor.dataType == COMPLEX64 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexFloat]]).asInstanceOf[Tensor[T]], true)
case (true, false) if tensor.dataType == COMPLEX128 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexDouble]]).asInstanceOf[Tensor[T]], true)
case (true, false) => (tensor, true)
case (true, true) => (tensor, true)
}
}
private[this] def transposeConjugateToTranspose[T: TF : IsNotQuantized](
tensor: Tensor[T],
transpose: Boolean,
conj: Boolean
): (Tensor[T], Boolean) = {
// TODO: [TYPES] These runtime checks are not elegant.
(transpose, conj) match {
case (false, false) => (tensor, false)
case (false, true) if tensor.dataType == COMPLEX64 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexFloat]]).asInstanceOf[Tensor[T]], false)
case (false, true) if tensor.dataType == COMPLEX128 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexDouble]]).asInstanceOf[Tensor[T]], false)
case (false, true) => (tensor, false)
case (true, false) => (tensor, true)
case (true, true) if tensor.dataType == COMPLEX64 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexFloat]]).asInstanceOf[Tensor[T]], true)
case (true, true) if tensor.dataType == COMPLEX128 =>
(conjugate(tensor.asInstanceOf[Tensor[ComplexDouble]]).asInstanceOf[Tensor[T]], true)
case (true, true) => (tensor, true)
}
}
/** $OpDocMathCross
*
* @group MathOps
* @param a First input tensor.
* @param b Second input tensor.
* @return Result as a new tensor.
*/
def cross[T: TF : IsReal](a: Tensor[T], b: Tensor[T]): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.cross(
executionContext.value.nativeHandle, a.nativeHandle, b.nativeHandle))
}
/** Dynamic version (i.e., where `numAxes` may be a tensor) of the `tensorDot` op.
*
* $OpDocMathTensorDot
* @group MathOps
* @param a First tensor.
* @param b Second tensor.
* @param numAxes Number of axes to contract.
* @return Created op output.
* @throws InvalidShapeException If `numAxes` is not a scalar.
*/
@throws[InvalidShapeException]
def tensorDot[T: TF : IsNotQuantized](
a: Tensor[T],
b: Tensor[T],
numAxes: Tensor[Int]
): Tensor[T] = {
if (numAxes.rank != 0)
throw InvalidShapeException("'numAxes' must be a scalar.")
tensorDot(a, b, range(subtract(a.rank, numAxes), a.rank), range(0, numAxes))
}
/** Dynamic version (i.e., where `axesA` and `axesB` may be tensors) of the `tensorDot` op.
*
* $OpDocMathTensorDot
*
* @group MathOps
* @param a First tensor.
* @param b Second tensor.
* @param axesA Axes to contract in `a`.
* @param axesB Axes to contract in `b`.
* @return Created op output.
* @throws InvalidShapeException If `axesA` or `axesB` is not a scalar.
*/
@throws[InvalidShapeException]
def tensorDot[T: TF : IsNotQuantized](
a: Tensor[T],
b: Tensor[T],
axesA: Tensor[Int],
axesB: Tensor[Int]
): Tensor[T] = {
if (axesA.rank != 1)
throw InvalidShapeException("'axesA' must be a vector.")
if (axesB.rank != 1)
throw InvalidShapeException("'axesB' must be a vector.")
/** Helper method to perform transpose and reshape for the tensor contraction op. This method is helpful in reducing
* `tensorDot` to `matmul` using the `transpose` and the `reshape` ops. The method takes a tensor and performs the
* correct transpose and reshape operations for the provided indices. It returns the reshaped tensor as well as a
* list of indices necessary to reshape the tensor back to its proper shape after the matrix multiplication.
*
* @param a Tensor being reshaped.
* @param axes Sequence of unique indices of axes of `a`.
* @param flipped If `true`, the method assumes that `a` is the second argument in the contraction operation.
* @return Tuple that contains: (i) the reshaped tensor `a` that allows contraction via `matmul`, and (ii) a tensor
* that contains the shape of the free axes.
*/
def tensorDotReshape(a: Tensor[T], axes: Tensor[Int], flipped: Boolean = false): (Tensor[T], Tensor[Int]) = {
val shapeA = Basic.shape(a)
val rankA = Basic.rank(a)
val mappedAxes = ((axes >= 0).toInt * axes) + ((axes < 0).toInt * (axes + rankA.toInt))
val (free, _) = Basic.listDiff(Math.range(0, rankA), mappedAxes, indicesDataType = Int)
val freeAxes = Basic.gather(shapeA, free)
val axesAxes = Basic.gather(shapeA, mappedAxes)
val prodFree = freeAxes.prod()
val prodAxes = axesAxes.prod()
val (permutation, newShape) = {
if (flipped) {
val permutation = Basic.concatenate(Seq(mappedAxes, free), 0)
val newShape = Basic.stack(Seq(prodAxes, prodFree))
(permutation, newShape)
} else {
val permutation = Basic.concatenate(Seq(free, mappedAxes), 0)
val newShape = Basic.stack(Seq(prodFree, prodAxes))
(permutation, newShape)
}
}
val reshapedA = Basic.reshape(Basic.transpose(a, permutation), newShape)
(reshapedA, freeAxes.toInt)
}
val (reshapedA, freeA) = tensorDotReshape(a, axesA)
val (reshapedB, freeB) = tensorDotReshape(b, axesB, flipped = true)
val abMatmul = matmul(reshapedA, reshapedB)
Basic.reshape(abMatmul, Basic.concatenate(Seq(freeA, freeB), 0))
}
//endregion Matrix Ops
//region Complex Ops
/** $OpDocMathComplex
*
* @group MathOps
* @param real Tensor containing the real component.
* @param imag Tensor containing the imaginary component.
* @return Result as a new tensor.
*/
def complexFloat(real: Tensor[Float], imag: Tensor[Float]): Tensor[ComplexFloat] = {
Tensor.fromNativeHandle[ComplexFloat](NativeTensorOpsMath.complex(
executionContext.value.nativeHandle, real.nativeHandle, imag.nativeHandle, COMPLEX64.cValue))
}
/** $OpDocMathComplex
*
* @group MathOps
* @param real Tensor containing the real component.
* @param imag Tensor containing the imaginary component.
* @return Result as a new tensor.
*/
def complexDouble(real: Tensor[Double], imag: Tensor[Double]): Tensor[ComplexDouble] = {
Tensor.fromNativeHandle[ComplexDouble](NativeTensorOpsMath.complex(
executionContext.value.nativeHandle, real.nativeHandle, imag.nativeHandle, COMPLEX128.cValue))
}
/** $OpDocMathReal
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def realFloat[TL[A] <: TensorLike[A]](
input: TL[ComplexFloat],
name: String = "Real"
)(implicit
ev: TensorOps.Aux[TL, ComplexFloat]
): TL[Float] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Float](NativeTensorOpsMath.real(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT32.cValue))
})
}
/** $OpDocMathReal
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def realDouble[TL[A] <: TensorLike[A]](
input: TL[ComplexDouble],
name: String = "Real"
)(implicit
ev: TensorOps.Aux[TL, ComplexDouble]
): TL[Double] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Double](NativeTensorOpsMath.real(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT64.cValue))
})
}
/** $OpDocMathImag
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def imagFloat[TL[A] <: TensorLike[A]](
input: TL[ComplexFloat],
name: String = "Imag"
)(implicit
ev: TensorOps.Aux[TL, ComplexFloat]
): TL[Float] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Float](NativeTensorOpsMath.imag(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT32.cValue))
})
}
/** $OpDocMathImag
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def imagDouble[TL[A] <: TensorLike[A]](
input: TL[ComplexDouble],
name: String = "Imag"
)(implicit
ev: TensorOps.Aux[TL, ComplexDouble]
): TL[Double] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Double](NativeTensorOpsMath.imag(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT64.cValue))
})
}
/** $OpDocMathAbs
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def magnitudeFloat[TL[A] <: TensorLike[A]](
input: TL[ComplexFloat],
name: String = "Magnitude"
)(implicit
ev: TensorOps.Aux[TL, ComplexFloat]
): TL[Float] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Float](NativeTensorOpsMath.complexAbs(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT32.cValue))
})
}
/** $OpDocMathAbs
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def magnitudeDouble[TL[A] <: TensorLike[A]](
input: TL[ComplexDouble],
name: String = "Magnitude"
)(implicit
ev: TensorOps.Aux[TL, ComplexDouble]
): TL[Double] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Double](NativeTensorOpsMath.complexAbs(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT64.cValue))
})
}
/** $OpDocMathAngle
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def angleFloat[TL[A] <: TensorLike[A]](
input: TL[ComplexFloat],
name: String = "Angle"
)(implicit
ev: TensorOps.Aux[TL, ComplexFloat]
): TL[Float] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Float](NativeTensorOpsMath.angle(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT32.cValue))
})
}
/** $OpDocMathAngle
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def angleDouble[TL[A] <: TensorLike[A]](
input: TL[ComplexDouble],
name: String = "Angle"
)(implicit
ev: TensorOps.Aux[TL, ComplexDouble]
): TL[Double] = {
ev.applyUnary(input, t => {
Tensor.fromNativeHandle[Double](NativeTensorOpsMath.angle(
executionContext.value.nativeHandle, t.nativeHandle, FLOAT64.cValue))
})
}
/** $OpDocMathConjugate
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def conjugate[T: TF, TL[A] <: TensorLike[A]](
input: TL[T]
)(implicit ev: TensorOps.Aux[TL, T]): TL[T] = {
ev.applyUnary(input, t => {
if (t.dataType.isComplex) {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.conj(
executionContext.value.nativeHandle, t.nativeHandle))
} else {
t
}
})
}
//endregion Complex Ops
//region Quantization Ops
// TODO: [OPS] quantization
//endregion Quantization Ops
//region Bucketization Ops
/** $OpDocMathBucketize
*
* @group MathOps
* @param input Numeric tensor to bucketize.
* @param boundaries Sorted sequence of numbers specifying the boundaries of the buckets.
* @return Result as a new tensor.
*/
def bucketize[T: TF : IsIntOrLongOrFloatOrDouble](
input: Tensor[T],
boundaries: Seq[Float]
): Tensor[T] = {
Tensor.fromNativeHandle[T](NativeTensorOpsMath.bucketize(
executionContext.value.nativeHandle, input.nativeHandle, boundaries.toArray))
}
//endregion Bucketization Ops
//region Other Ops
/** $OpDocMathZerosFraction
*
* @group MathOps
* @param input Input tensor.
* @return Result as a new tensor.
*/
def zerosFraction[T: TF : IsNumeric](
input: Tensor[T]
): Tensor[Float] = {
mean(equal(input, Tensor.zeros(input.dataType, Shape())).toFloat)
}
//endregion Other Ops
}
object Math extends Math {
private[tensors] trait Implicits {
implicit def tensorConvertibleToTensorMathOps[T, TC](value: TC)(implicit
f: TC => Tensor[T]
): TensorMathOps[T] = {
new TensorMathOps(f(value))
}
implicit def tensorConvertibleToFloatTensorMathOps[TC](
value: TC
)(implicit f: TC => Tensor[Float]): FloatTensorMathOps = {
new FloatTensorMathOps(f(value))
}
implicit def tensorConvertibleToDoubleTensorMathOps[TC](
value: TC
)(implicit f: TC => Tensor[Double]): DoubleTensorMathOps = {
new DoubleTensorMathOps(f(value))
}
implicit def tensorConvertibleToComplexFloatTensorMathOps[TC](
value: TC
)(implicit f: TC => Tensor[ComplexFloat]): ComplexFloatTensorMathOps = {
new ComplexFloatTensorMathOps(f(value))
}
implicit def tensorConvertibleToComplexDoubleTensorMathOps[TC](
value: TC
)(implicit f: TC => Tensor[ComplexDouble]): ComplexDoubleTensorMathOps = {
new ComplexDoubleTensorMathOps(f(value))
}
implicit class TensorMathOps[T](val tensor: Tensor[T]) {
protected implicit val evTTF: TF[T] = {
TF.fromDataType(tensor.dataType)
}
/** $OpDocMathSelect
*
* @group MathOps
* @param x Tensor which may have the same shape as `condition`. If `condition` has rank `1`, then `t` may have
* a higher rank, but its first dimension must match the size of `condition`.
* @param y Tensor with the same data type and shape as `t`.
* @return Created op output.
*/
def select[R: TF](
x: Tensor[R],
y: Tensor[R]
)(implicit ev: T =:= Boolean): Tensor[R] = {
Math.select(tensor.asInstanceOf[Tensor[Boolean]], x, y)
}
//region Unary Ops
/** $OpDocMathAbs
*
* @group MathOps
* @return Result as a new tensor.
*/
def abs(implicit ev: IsReal[T]): Tensor[T] = {
Math.abs(tensor)
}
/** $OpDocMathNegate
*
* @group MathOps
* @return Result as a new tensor.
*/
def negate(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.negate(tensor)
}
/** $OpDocMathReciprocal
*
* @group MathOps
* @return Result as a new tensor.
*/
def reciprocal(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.reciprocal(tensor)
}
/** $OpDocMathSquare
*
* @group MathOps
* @return Result as a new tensor.
*/
def square(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.square(tensor)
}
/** $OpDocMathSqrt
*
* @group MathOps
* @return Result as a new tensor.
*/
def sqrt(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.sqrt(tensor)
}
/** $OpDocMathRsqrt
*
* @group MathOps
* @return Result as a new tensor.
*/
def rsqrt(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.rsqrt(tensor)
}
/** $OpDocMathExp
*
* @group MathOps
* @return Result as a new tensor.
*/
def exp(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.exp(tensor)
}
/** $OpDocMathExpm1
*
* @group MathOps
* @return Result as a new tensor.
*/
def expm1(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.expm1(tensor)
}
/** $OpDocMathLog
*
* @group MathOps
* @return Result as a new tensor.
*/
def log(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.log(tensor)
}
/** $OpDocMathLog1p
*
* @group MathOps
* @return Result as a new tensor.
*/
def log1p(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.log1p(tensor)
}
/** $OpDocMathSin
*
* @group MathOps
* @return Result as a new tensor.
*/
def sin(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.sin(tensor)
}
/** $OpDocMathCos
*
* @group MathOps
* @return Result as a new tensor.
*/
def cos(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.cos(tensor)
}
/** $OpDocMathTan
*
* @group MathOps
* @return Result as a new tensor.
*/
def tan(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.tan(tensor)
}
/** $OpDocMathAsin
*
* @group MathOps
* @return Result as a new tensor.
*/
def asin(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.asin(tensor)
}
/** $OpDocMathAcos
*
* @group MathOps
* @return Result as a new tensor.
*/
def acos(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.acos(tensor)
}
/** $OpDocMathAtan
*
* @group MathOps
* @return Result as a new tensor.
*/
def atan(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.atan(tensor)
}
/** $OpDocMathSinh
*
* @group MathOps
* @return Result as a new tensor.
*/
def sinh(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.sinh(tensor)
}
/** $OpDocMathCosh
*
* @group MathOps
* @return Result as a new tensor.
*/
def cosh(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.cosh(tensor)
}
/** $OpDocMathTanh
*
* @group MathOps
* @return Result as a new tensor.
*/
def tanh(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.tanh(tensor)
}
/** $OpDocMathAsinh
*
* @group MathOps
* @return Result as a new tensor.
*/
def asinh(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.asinh(tensor)
}
/** $OpDocMathAcosh
*
* @group MathOps
* @return Result as a new tensor.
*/
def acosh(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.acosh(tensor)
}
/** $OpDocMathAtanh
*
* @group MathOps
* @return Result as a new tensor.
*/
def atanh(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.atanh(tensor)
}
/** $OpDocMathLogGamma
*
* @group MathOps
* @return Result as a new tensor.
*/
def logGamma(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.logGamma(tensor)
}
/** $OpDocMathDigamma
*
* @group MathOps
* @return Result as a new tensor.
*/
def digamma(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.digamma(tensor)
}
/** $OpDocMathErf
*
* @group MathOps
* @return Result as a new tensor.
*/
def erf(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.erf(tensor)
}
/** $OpDocMathErfc
*
* @group MathOps
* @return Result as a new tensor.
*/
def erfc(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.erfc(tensor)
}
/** $OpDocMathSigmoid
*
* @group MathOps
* @return Result as a new tensor.
*/
def sigmoid(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.sigmoid(tensor)
}
/** $OpDocMathLogSigmoid
*
* @group MathOps
* @return Result as a new tensor.
*/
def logSigmoid(implicit ev: IsDecimal[T]): Tensor[T] = {
Math.logSigmoid(tensor)
}
/** $OpDocMathSign
*
* @group MathOps
* @return Result as a new tensor.
*/
def sign(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.sign(tensor)
}
/** $OpDocMathRound
*
* @group MathOps
* @return Result as a new tensor.
*/
def round(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.round(tensor)
}
/** $OpDocMathRoundInt
*
* @group MathOps
* @return Result as a new tensor.
*/
def roundInt(implicit ev: IsHalfOrFloatOrDouble[T]): Tensor[T] = {
Math.roundInt(tensor)
}
/** $OpDocMathFloor
*
* @group MathOps
* @return Result as a new tensor.
*/
def floor(implicit ev: IsHalfOrFloatOrDouble[T]): Tensor[T] = {
Math.floor(tensor)
}
/** $OpDocMathCeil
*
* @group MathOps
* @return Result as a new tensor.
*/
def ceil(implicit ev: IsHalfOrFloatOrDouble[T]): Tensor[T] = {
Math.ceil(tensor)
}
/** $OpDocMathIsNaN
*
* @group MathOps
* @return Result as a new tensor.
*/
def isNaN(implicit ev: IsHalfOrFloatOrDouble[T]): Tensor[Boolean] = {
Math.isNaN(tensor)
}
/** $OpDocMathIsInf
*
* @group MathOps
* @return Result as a new tensor.
*/
def isInf(implicit ev: IsHalfOrFloatOrDouble[T]): Tensor[Boolean] = {
Math.isInf(tensor)
}
/** $OpDocMathIsFinite
*
* @group MathOps
* @return Result as a new tensor.
*/
def isFinite(implicit ev: IsHalfOrFloatOrDouble[T]): Tensor[Boolean] = {
Math.isFinite(tensor)
}
//endregion Unary Ops
//region Binary Ops
/** $OpDocMathAdd
*
* @group MathOps
* @return Result as a new tensor.
*/
def add(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.add(tensor, other)
}
/** $OpDocMathSubtract
*
* @group MathOps
* @return Result as a new tensor.
*/
def subtract(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.subtract(tensor, other)
}
/** $OpDocMathMultiply
*
* @group MathOps
* @return Result as a new tensor.
*/
def multiply(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.multiply(tensor, other)
}
/** $OpDocMathDivide
*
* @group MathOps
* @return Result as a new tensor.
*/
def divide(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.divide(tensor, other)
}
/** $OpDocMathFloorDivide
*
* @group MathOps
* @return Result as a new tensor.
*/
@deprecated("Use `truncateDivide` instead.", "0.1")
def floorDivide(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.floorDivide(tensor, other)
}
/** $OpDocMathTruncateDivide
*
* @group MathOps
* @return Result as a new tensor.
*/
def truncateDivide(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.truncateDivide(tensor, other)
}
/** $OpDocMathRealDivide
*
* @group MathOps
* @return Result as a new tensor.
*/
def realDivide(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.realDivide(tensor, other)
}
/** $OpDocMathSquaredDifference
*
* @group MathOps
* @return Result as a new tensor.
*/
def squaredDifference(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.squaredDifference(tensor, other)
}
/** $OpDocMathMod
*
* @group MathOps
* @return Result as a new tensor.
*/
def mod(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.mod(tensor, other)
}
/** $OpDocMathFloorMod
*
* @group MathOps
* @return Result as a new tensor.
*/
def floorMod(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.floorMod(tensor, other)
}
/** $OpDocMathTruncateMod
*
* @group MathOps
* @return Result as a new tensor.
*/
def truncateMod(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.truncateMod(tensor, other)
}
/** $OpDocMathPow
*
* @group MathOps
* @return Result as a new tensor.
*/
def pow(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.pow(tensor, other)
}
/** $OpDocMathIgammac
*
* @group MathOps
* @return Result as a new tensor.
*/
def igammac(other: Tensor[T])(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.igammac(tensor, other)
}
/** $OpDocMathIgamma
*
* @group MathOps
* @return Result as a new tensor.
*/
def igamma(other: Tensor[T])(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.igamma(tensor, other)
}
/** $OpDocMathZeta
*
* @group MathOps
* @return Result as a new tensor.
*/
def zeta(other: Tensor[T])(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.zeta(tensor, other)
}
/** $OpDocMathPolygamma
*
* @group MathOps
* @return Result as a new tensor.
*/
def polygamma(other: Tensor[T])(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.polygamma(tensor, other)
}
/** $OpDocMathAtan2
*
* @group MathOps
* @return Result as a new tensor.
*/
def atan2(other: Tensor[T])(implicit ev: IsFloatOrDouble[T]): Tensor[T] = {
Math.atan2(tensor, other)
}
/** $OpDocMathMinimum
*
* @group MathOps
* @return Result as a new tensor.
*/
def minimum(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.minimum(tensor, other)
}
/** $OpDocMathMaximum
*
* @group MathOps
* @return Result as a new tensor.
*/
def maximum(other: Tensor[T])(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.maximum(tensor, other)
}
//endregion Binary Ops
//region Logical Ops
/** $OpDocMathLogicalNot
*
* @group MathOps
* @return Result as a new tensor.
*/
def logicalNot(implicit ev: T =:= Boolean): Tensor[Boolean] = {
Math.logicalNot(tensor.asInstanceOf[Tensor[Boolean]])
}
/** $OpDocMathLogicalAnd
*
* @group MathOps
* @return Result as a new tensor.
*/
def logicalAnd(other: Tensor[Boolean])(implicit ev: T =:= Boolean): Tensor[Boolean] = {
Math.logicalAnd(tensor.asInstanceOf[Tensor[Boolean]], other)
}
/** $OpDocMathLogicalOr
*
* @group MathOps
* @return Result as a new tensor.
*/
def logicalOr(other: Tensor[Boolean])(implicit ev: T =:= Boolean): Tensor[Boolean] = {
Math.logicalOr(tensor.asInstanceOf[Tensor[Boolean]], other)
}
/** $OpDocMathLogicalXOr
*
* @group MathOps
* @return Result as a new tensor.
*/
def logicalXOr(other: Tensor[Boolean])(implicit ev: T =:= Boolean): Tensor[Boolean] = {
Math.logicalXOr(tensor.asInstanceOf[Tensor[Boolean]], other)
}
//endregion Logical Ops
//region Comparison Ops
/** $OpDocMathEqual
*
* @group MathOps
* @return Result as a new tensor.
*/
def equal(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.equal(tensor, other)
}
/** $OpDocMathNotEqual
*
* @group MathOps
* @return Result as a new tensor.
*/
def notEqual(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.notEqual(tensor, other)
}
/** $OpDocMathApproximatelyEqual
*
* @group MathOps
* @return Result as a new tensor.
*/
def approximatelyEqual(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.approximatelyEqual(tensor, other)
}
/** $OpDocMathLess
*
* @group MathOps
* @return Result as a new tensor.
*/
def less(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.less(tensor, other)
}
/** $OpDocMathLessEqual
*
* @group MathOps
* @return Result as a new tensor.
*/
def lessEqual(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.lessEqual(tensor, other)
}
/** $OpDocMathGreater
*
* @group MathOps
* @return Result as a new tensor.
*/
def greater(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.greater(tensor, other)
}
/** $OpDocMathGreaterEqual
*
* @group MathOps
* @return Result as a new tensor.
*/
def greaterEqual(other: Tensor[T])(implicit ev: IsNumeric[T]): Tensor[Boolean] = {
Math.greaterEqual(tensor, other)
}
//endregion Comparison Ops
//region Reduction Ops
/** $OpDocMathSum
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def sum[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.sum(tensor, axes, keepDims)
}
/** $OpDocMathMean
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def mean[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.mean(tensor, axes, keepDims)
}
/** $OpDocMathProd
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def prod[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.prod(tensor, axes, keepDims)
}
/** $OpDocMathMin
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def min[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.min(tensor, axes, keepDims)
}
/** $OpDocMathMax
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def max[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.max(tensor, axes, keepDims)
}
/** $OpDocMathAll
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def all[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: T =:= Boolean): Tensor[Boolean] = {
Math.all(tensor.asInstanceOf[Tensor[Boolean]], axes, keepDims)
}
/** $OpDocMathAny
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def any[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: T =:= Boolean): Tensor[Boolean] = {
Math.any(tensor.asInstanceOf[Tensor[Boolean]], axes, keepDims)
}
/** $OpDocMathLogSumExp
*
* @group MathOps
* @param axes Integer sequence containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def logSumExp(
axes: Seq[Int] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.logSumExp(tensor, axes, keepDims)
}
/** $OpDocMathCountNonZero
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param keepDims If `true`, retain the reduced axes.
* @return Result as a new tensor.
*/
def countNonZero[I: IntDefault : TF : IsIntOrLong](
axes: Tensor[I] = null,
keepDims: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[Long] = {
Math.countNonZero(tensor, axes, keepDims)
}
/** $OpDocMathArgmin
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @return Result as a new tensor.
*/
def argmin[I: TF : IsIntOrLong](
axes: Tensor[I]
)(implicit ev: IsNotQuantized[T]): Tensor[Long] = {
Math.argmin(tensor, axes)
}
/** $OpDocMathArgmin
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param outputDataType Data type for the output tensor.
* @return Result as a new tensor.
*/
def argmin[I: TF : IsIntOrLong, IR: TF : IsIntOrLong](
axes: Tensor[I],
outputDataType: DataType[IR]
)(implicit ev: IsNotQuantized[T]): Tensor[IR] = {
Math.argmin(tensor, axes, outputDataType)
}
/** $OpDocMathArgmax
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @return Result as a new tensor.
*/
def argmax[I: TF : IsIntOrLong](
axes: Tensor[I]
)(implicit ev: IsNotQuantized[T]): Tensor[Long] = {
Math.argmax(tensor, axes)
}
/** $OpDocMathArgmax
*
* @group MathOps
* @param axes Integer tensor containing the axes to reduce. If `null`, then all axes are reduced.
* @param outputDataType Data type for the output tensor.
* @return Result as a new tensor.
*/
def argmax[I: TF : IsIntOrLong, IR: TF : IsIntOrLong](
axes: Tensor[I],
outputDataType: DataType[IR]
)(implicit ev: IsNotQuantized[T]): Tensor[IR] = {
Math.argmax(tensor, axes, outputDataType)
}
/** $OpDocMathCumsum
*
* @group MathOps
* @param axis Tensor containing the axis along which to perform the cumulative sum.
* @param exclusive Boolean value indicating whether to perform an exclusive cumulative sum.
* @param reverse Boolean value indicating whether to perform a reverse cumulative sum.
* @return Result as a new tensor.
*/
def cumsum[I: TF : IsIntOrLong](
axis: Tensor[I],
exclusive: Boolean = false,
reverse: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.cumsum(tensor, axis, exclusive, reverse)
}
/** $OpDocMathCumprod
*
* @group MathOps
* @param axis Tensor containing the axis along which to perform the cumulative product.
* @param exclusive Boolean value indicating whether to perform an exclusive cumulative product.
* @param reverse Boolean value indicating whether to perform a reverse cumulative product.
* @return Result as a new tensor.
*/
def cumprod[I: TF : IsIntOrLong](
axis: Tensor[I],
exclusive: Boolean = false,
reverse: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.cumprod(tensor, axis, exclusive, reverse)
}
//endregion Reduction Ops
/** $OpDocMathBinCount
*
* @group MathOps
* @param weights If not `null`, this tensor must have the same shape as `input`. For each value in `input`, the
* corresponding bin count will be incremented by the corresponding weight instead of `1`.
* @param minLength If not `null`, this ensures the output has length at least `minLength`, padding with zeros at
* the end, if necessary.
* @param maxLength If not `null`, this skips values in `input` that are equal or greater than `maxLength`,
* ensuring that the output has length at most `maxLength`.
* @param dataType If `weights` is `null`, this determines the data type used for the output tensor (i.e., the
* tensor containing the bin counts).
* @return Result as a new tensor.
*/
def binCount[R: TF : IsIntOrLongOrFloatOrDouble](
dataType: DataType[R],
weights: Tensor[R] = null,
minLength: Tensor[Int] = null,
maxLength: Tensor[Int] = null
)(implicit ev: T =:= Int): Tensor[R] = {
Math.binCount(tensor.asInstanceOf[Tensor[Int]], dataType, weights, minLength, maxLength)
}
//region Segment Ops
/** $OpDocMathSegmentSum
*
* @group MathOps
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentSum[I: TF : IsIntOrLong](
segmentIndices: Tensor[I]
)(implicit ev: IsNumeric[T]): Tensor[T] = {
Math.segmentSum(tensor, segmentIndices)
}
/** $OpDocMathSegmentMean
*
* @group MathOps
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentMean[I: TF : IsIntOrLong](
segmentIndices: Tensor[I]
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.segmentMean(tensor, segmentIndices)
}
/** $OpDocMathSegmentProd
*
* @group MathOps
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentProd[I: TF : IsIntOrLong](
segmentIndices: Tensor[I]
)(implicit ev: IsNumeric[T]): Tensor[T] = {
Math.segmentProd(tensor, segmentIndices)
}
/** $OpDocMathSegmentMin
*
* @group MathOps
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentMin[I: TF : IsIntOrLong](
segmentIndices: Tensor[I]
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.segmentMin(tensor, segmentIndices)
}
/** $OpDocMathSegmentMax
*
* @group MathOps
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @return Result as a new tensor.
*/
def segmentMax[I: TF : IsIntOrLong](
segmentIndices: Tensor[I]
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.segmentMax(tensor, segmentIndices)
}
/** $OpDocMathUnsortedSegmentSum
*
* @group MathOps
* @param segmentIndices Segment indices.
* @param segmentsNumber Number of segments.
* @return Result as a new tensor.
*/
def unsortedSegmentSum[I1: TF : IsIntOrLong, I2: TF : IsIntOrLong](
segmentIndices: Tensor[I1],
segmentsNumber: Tensor[I2]
)(implicit ev: IsNumeric[T]): Tensor[T] = {
Math.unsortedSegmentSum(tensor, segmentIndices, segmentsNumber)
}
// TODO: [TENSORS] Missing 'unsortedSegmentMean'.
// TODO: [TENSORS] Missing 'unsortedSegmentProd'.
// TODO: [TENSORS] Missing 'unsortedSegmentMin'.
/** $OpDocMathUnsortedSegmentMax
*
* @group MathOps
* @param segmentIndices Segment indices.
* @param segmentsNumber Number of segments.
* @return Result as a new tensor.
*/
def unsortedSegmentMax[I1: TF : IsIntOrLong, I2: TF : IsIntOrLong](
segmentIndices: Tensor[I1],
segmentsNumber: Tensor[I2]
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.unsortedSegmentMax(tensor, segmentIndices, segmentsNumber)
}
/** $OpDocMathSparseSegmentSum
*
* @group MathOps
* @param indices One-dimensional tensor with rank equal to that of `segmentIndices`.
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @param numSegments Optional scalar indicating the size of the output tensor.
* @return Result as a new tensor.
*/
def sparseSegmentSum[I1: TF : IsIntOrLong, I2: IntDefault : TF : IsIntOrLong](
indices: Tensor[I1],
segmentIndices: Tensor[Int],
numSegments: Tensor[I2] = null
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.sparseSegmentSum(tensor, indices, segmentIndices, numSegments)
}
/** $OpDocMathSparseSegmentMean
*
* @group MathOps
* @param indices One-dimensional tensor with rank equal to that of `segmentIndices`.
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @param numSegments Optional scalar indicating the size of the output tensor.
* @return Result as a new tensor.
*/
def sparseSegmentMean[I1: TF : IsIntOrLong, I2: IntDefault : TF : IsIntOrLong](
indices: Tensor[I1],
segmentIndices: Tensor[Int],
numSegments: Tensor[I2] = null
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.sparseSegmentMean(tensor, indices, segmentIndices, numSegments)
}
/** $OpDocMathSparseSegmentSumSqrtN
*
* @group MathOps
* @param indices One-dimensional tensor with rank equal to that of `segmentIndices`.
* @param segmentIndices Segment indices. Values should be sorted and can be repeated.
* @param numSegments Optional scalar indicating the size of the output tensor.
* @return Result as a new tensor.
*/
def sparseSegmentSumSqrtN[I1: TF : IsIntOrLong, I2: IntDefault : TF : IsIntOrLong](
indices: Tensor[I1],
segmentIndices: Tensor[Int],
numSegments: Tensor[I2] = null
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.sparseSegmentSumSqrtN(tensor, indices, segmentIndices, numSegments)
}
//endregion Segment Ops
//region Matrix Ops
/** $OpDocMathDiag
*
* @group MathOps
* @return Result as a new tensor.
*/
def diag(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.diag(tensor)
}
/** $OpDocMathDiagPart
*
* @group MathOps
* @return Result as a new tensor.
*/
def diagPart(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.diagPart(tensor)
}
/** $OpDocMathMatrixDiag
*
* @group MathOps
* @return Result as a new tensor with rank equal to `K + 1` and shape equal to the shape of `diagonal`, with its
* last dimension duplicated.
*/
def matrixDiag: Tensor[T] = {
Math.matrixDiag(tensor)
}
/** $OpDocMathMatrixSetDiag
*
* @group MathOps
* @param diagonal Rank-`K` tensor, where `K >= 1`.
* @return Result as a new tensor with rank equal to `K + 1` and shape equal to the shape of `input`.
*/
def matrixSetDiag(diagonal: Tensor[T]): Tensor[T] = {
Math.matrixSetDiag(tensor, diagonal)
}
/** $OpDocMathMatrixDiagPart
*
* @group MathOps
* @return Result as a new tensor containing the diagonal(s) and having shape equal to
* `input.shape[:-2] + [min(input.shape[-2:])]`.
*/
def matrixDiagPart: Tensor[T] = {
Math.matrixDiagPart(tensor)
}
/** $OpDocMathMatrixBandPart
*
* @group MathOps
* @param numSubDiagonals Scalar tensor that contains the number of sub-diagonals to keep. If negative,
* the entire lower triangle is kept.
* @param numSuperDiagonals Scalar tensor that contains the number of super-diagonals to keep. If negative,
* the entire upper triangle is kept.
* @return Result as a new tensor containing the expected banded tensor and has rank `K` and same shape as `input`.
*/
def matrixBandPart[I: TF : IsIntOrLong](
numSubDiagonals: Tensor[I],
numSuperDiagonals: Tensor[I]
): Tensor[T] = {
Math.matrixBandPart(tensor, numSubDiagonals, numSuperDiagonals)
}
/** $OpDocMathTrace
*
* @group MathOps
* @return Result as a new tensor.
*/
def trace(implicit ev: IsNumeric[T]): Tensor[T] = {
Math.trace(tensor)
}
/** $OpDocMathMatmul
*
* @group MathOps
* @param other Tensor to multiply with.
* @param transposeA If `true`, this tensor is transposed before the multiplication.
* @param transposeB If `true`, `other` is transposed before the multiplication.
* @param conjugateA If `true`, this tensor is conjugated before the multiplication.
* @param conjugateB If `true`, `other` is conjugated before the multiplication.
* @param aIsSparse If `true`, this tensor is treated as a sparse matrix (i.e., it is assumed it contains many
* zeros).
* @param bIsSparse If `true`, `other` is treated as a sparse matrix (i.e., it is assumed it contains many
* zeros).
* @return Result as a new tensor.
*/
def matmul(
other: Tensor[T],
transposeA: Boolean = false,
transposeB: Boolean = false,
conjugateA: Boolean = false,
conjugateB: Boolean = false,
aIsSparse: Boolean = false,
bIsSparse: Boolean = false
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.matmul(tensor, other, transposeA, transposeB, conjugateA, conjugateB, aIsSparse, bIsSparse)
}
/** $OpDocMathCross
*
* @group MathOps
* @param other Tensor to multiply with.
* @return Result as a new tensor.
*/
def cross(
other: Tensor[T]
)(implicit ev: IsReal[T]): Tensor[T] = {
Math.cross(tensor, other)
}
/** Dynamic version (i.e., where `numAxes` may be a tensor) of the `tensorDot` op.
*
* $OpDocMathTensorDot
*
* @group MathOps
* @param other Tensor to contract with.
* @param numAxes Number of axes to contract.
* @return Created op output.
*/
def tensorDot(
other: Tensor[T],
numAxes: Tensor[Int]
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.tensorDot(tensor, other, numAxes)
}
/** Dynamic version (i.e., where `axesA` and `axesB` may be tensors) of the `tensorDot` op.
*
* $OpDocMathTensorDot
*
* @group MathOps
* @param other Tensor to contract with.
* @param axesA Axes to contract in `a`.
* @param axesB Axes to contract in `b`.
* @return Created op output.
*/
def tensorDot(
other: Tensor[T],
axesA: Tensor[Int],
axesB: Tensor[Int]
)(implicit ev: IsNotQuantized[T]): Tensor[T] = {
Math.tensorDot(tensor, other, axesA, axesB)
}
//endregion Matrix Ops
//region Complex Ops
/** $OpDocMathConjugate
*
* @group MathOps
* @return Result as a new tensor.
*/
def conjugate(implicit ev: IsComplex[T]): Tensor[T] = {
Math.conjugate(tensor)
}
//endregion Complex Ops
//region Bucketization Ops
/** $OpDocMathBucketize
*
* @group MathOps
* @param boundaries Sorted sequence of numbers specifying the boundaries of the buckets.
* @return Result as a new tensor.
*/
def bucketize(
boundaries: Seq[Float]
)(implicit ev: IsIntOrLongOrFloatOrDouble[T]): Tensor[T] = {
Math.bucketize(tensor, boundaries)
}
//endregion Bucketization Ops
//region Other Ops
/** $OpDocMathZerosFraction
*
* @group MathOps
* @return Result as a new tensor.
*/
def zerosFraction(implicit ev: IsNumeric[T]): Tensor[Float] = {
Math.zerosFraction(tensor)
}
//endregion Other Ops
}
implicit class FloatTensorMathOps(val tensor: Tensor[Float]) {
/** Creates a new complex number with the provided imaginary part.
*
* @param imag Imaginary part.
* @return Resulting complex number.
*/
def toComplex(imag: Tensor[Float] = 0.0f): Tensor[ComplexFloat] = {
Math.complexFloat(tensor, imag)
}
}
implicit class DoubleTensorMathOps(val tensor: Tensor[Double]) {
/** Creates a new complex number with the provided imaginary part.
*
* @param imag Imaginary part.
* @return Resulting complex number.
*/
def toComplex(imag: Tensor[Double] = 0.0): Tensor[ComplexDouble] = {
Math.complexDouble(tensor, imag)
}
}
implicit class ComplexFloatTensorMathOps(val tensor: Tensor[ComplexFloat]) {
/** $OpDocMathReal
*
* @group MathOps
* @return Result as a new tensor.
*/
def real: Tensor[Float] = {
Math.realFloat(tensor.asInstanceOf[Tensor[ComplexFloat]])
}
/** $OpDocMathImag
*
* @group MathOps
* @return Result as a new tensor.
*/
def imag: Tensor[Float] = {
Math.imagFloat(tensor.asInstanceOf[Tensor[ComplexFloat]])
}
/** $OpDocMathAbs
*
* @group MathOps
* @return Result as a new tensor.
*/
def magnitude: Tensor[Float] = {
Math.magnitudeFloat(tensor.asInstanceOf[Tensor[ComplexFloat]])
}
/** $OpDocMathAngle
*
* @group MathOps
* @return Result as a new tensor.
*/
def angle: Tensor[Float] = {
Math.angleFloat(tensor.asInstanceOf[Tensor[ComplexFloat]])
}
}
implicit class ComplexDoubleTensorMathOps(val tensor: Tensor[ComplexDouble]) {
/** $OpDocMathReal
*
* @group MathOps
* @return Result as a new tensor.
*/
def real: Tensor[Double] = {
Math.realDouble(tensor.asInstanceOf[Tensor[ComplexDouble]])
}
/** $OpDocMathImag
*
* @group MathOps
* @return Result as a new tensor.
*/
def imag: Tensor[Double] = {
Math.imagDouble(tensor.asInstanceOf[Tensor[ComplexDouble]])
}
/** $OpDocMathAbs
*
* @group MathOps
* @return Result as a new tensor.
*/
def magnitude: Tensor[Double] = {
Math.magnitudeDouble(tensor.asInstanceOf[Tensor[ComplexDouble]])
}
/** $OpDocMathAngle
*
* @group MathOps
* @return Result as a new tensor.
*/
def angle: Tensor[Double] = {
Math.angleDouble(tensor.asInstanceOf[Tensor[ComplexDouble]])
}
}
}
}
| eaplatanios/tensorflow_scala | modules/api/src/main/scala/org/platanios/tensorflow/api/tensors/ops/Math.scala | Scala | apache-2.0 | 113,332 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.expressions.utils.Func0
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class UserDefinedFunctionValidationTest extends TableTestBase {
@Test
def testScalarFunctionOperandTypeCheck(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage(
"Given parameters of function 'func' do not match any signature. \\n" +
"Actual: (java.lang.String) \\n" +
"Expected: (int)")
val util = scalaStreamTestUtil()
util.addTableSource[(Int, String)]("t", 'a, 'b)
util.tableEnv.registerFunction("func", Func0)
util.verifyExplain("select func(b) from t")
}
@Test
def testAggregateFunctionOperandTypeCheck(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage(
"Given parameters of function 'agg' do not match any signature. \\n" +
"Actual: (org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions" +
".Accumulator0, java.lang.String, java.lang.Integer) \\n" +
"Expected: (org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions" +
".Accumulator0, long, int)")
val util = scalaStreamTestUtil()
val agg = new OverAgg0
util.addTableSource[(Int, String)]("t", 'a, 'b)
util.tableEnv.registerFunction("agg", agg)
util.verifyExplain("select agg(b, a) from t")
}
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/validation/UserDefinedFunctionValidationTest.scala | Scala | apache-2.0 | 2,426 |
package quizleague.web.model
import scala.scalajs.js
class Text(
val id:String,
val text:String,
val mimeType:String
) extends Model
object Text{
def apply(id:String,
text:String,
mimeType:String) = new Text(id,text,mimeType)
} | gumdrop/quizleague-maintain | js/src/main/scala/quizleague/web/model/Text.scala | Scala | mit | 253 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 MineFormers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package de.mineformers.kybology.core
import cpw.mods.fml.common.eventhandler.SubscribeEvent
import de.mineformers.kybology.core.window.WorldWindowData
import net.minecraftforge.common.MinecraftForge
import net.minecraftforge.event.world.ChunkWatchEvent
/**
* CoreProxy
*
* @author PaleoCrafter
*/
class CoreProxy extends de.mineformers.core.mod.Proxy {
override def init(): Unit = {
MinecraftForge.EVENT_BUS.register(this)
}
@SubscribeEvent
def onChunkWatch(event: ChunkWatchEvent): Unit = {
if (!event.player.worldObj.isRemote) {
WorldWindowData(event.player.worldObj).syncChunk(event.player, event.chunk.chunkXPos, event.chunk.chunkZPos)
}
}
}
| MineFormers/Kybology | src/main/scala/de/mineformers/kybology/core/CoreProxy.scala | Scala | mit | 1,826 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.client.socket
import akka.actor.Actor
import akka.util.Timeout
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.communication.security.SecurityActorType
import org.apache.toree.kernel.protocol.v5.client.{ActorLoader, Utilities}
import org.apache.toree.kernel.protocol.v5.{KernelMessage, UUID}
import Utilities._
import org.apache.toree.kernel.protocol.v5.client.execution.{DeferredExecution, DeferredExecutionManager}
import org.apache.toree.kernel.protocol.v5.content.ExecuteReply
import org.apache.toree.utils.LogLike
import scala.concurrent.Await
import scala.concurrent.duration._
import akka.pattern.ask
/**
* The client endpoint for Shell messages specified in the IPython Kernel Spec
* @param socketFactory A factory to create the ZeroMQ socket connection
* @param actorLoader The loader used to retrieve actors
* @param signatureEnabled Whether or not to check and provide signatures
*/
class ShellClient(
socketFactory: SocketFactory,
actorLoader: ActorLoader,
signatureEnabled: Boolean
) extends Actor with LogLike {
logger.debug("Created shell client actor")
implicit val timeout = Timeout(21474835.seconds)
val socket = socketFactory.ShellClient(context.system, self)
def receiveExecuteReply(parentId:String, kernelMessage: KernelMessage): Unit = {
val deOption: Option[DeferredExecution] = DeferredExecutionManager.get(parentId)
deOption match {
case None =>
logger.warn(s"No deferred execution for parent id ${parentId}")
case Some(de) =>
Utilities.parseAndHandle(kernelMessage.contentString,
ExecuteReply.executeReplyReads, (er: ExecuteReply) => de.resolveReply(er))
}
}
override def receive: Receive = {
// from shell
case message: ZMQMessage =>
logger.debug("Received shell kernel message.")
val kernelMessage: KernelMessage = message
// TODO: Validate incoming message signature
logger.trace(s"Kernel message is ${kernelMessage}")
receiveExecuteReply(message.parentHeader.msg_id,kernelMessage)
// from handler
case message: KernelMessage =>
logger.trace(s"Sending kernel message ${message}")
val signatureManager =
actorLoader.load(SecurityActorType.SignatureManager)
import scala.concurrent.ExecutionContext.Implicits.global
val messageWithSignature = if (signatureEnabled) {
val signatureMessage = signatureManager ? message
Await.result(signatureMessage, 100.milliseconds)
.asInstanceOf[KernelMessage]
} else message
val zMQMessage: ZMQMessage = messageWithSignature
socket ! zMQMessage
}
}
| chipsenkbeil/incubator-toree | client/src/main/scala/org/apache/toree/kernel/protocol/v5/client/socket/ShellClient.scala | Scala | apache-2.0 | 3,503 |
/*
* _____ _ __ _
* | __ | ___ ___ _| || | ___ ___ |_| ___
* | -|| -_|| .'|| . || |__ | . || . || || _|
* |__|__||___||__,||___||_____||___||_ ||_||___|
* |___|
* ReadLogic
*
* Copyright 2015 Anastasios Skarlatidis
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package readlogic.prolog
sealed trait Formula extends Serializable {
lazy val variables: Set[Variable] = subFormulas.foldRight(Set[Variable]())((a: Formula, b) => a.variables ++ b)
lazy val constants: Set[Constant] = subFormulas.foldRight(Set[Constant]())((a: Formula, b) => a.constants ++ b)
lazy val functions: Set[TermFunction] = subFormulas.foldRight(Set[TermFunction]())((a: Formula, b) => a.functions ++ b)
/**
* Gives the sub-formulas that this formula contains
*/
def subFormulas: Seq[Formula]
/**
* The textual representation of this formula
*/
def toText: String
/**
* @return the number of AtomicFormulas
*/
def countAtoms: Int = subFormulas.foldRight(1)((current, rest) => current.countAtoms + rest)
def isUnit: Boolean
}
sealed trait DefiniteClauseConstruct extends Formula with Serializable
sealed case class Atom(symbol: String, args: List[_ <: Term]) extends Formula with DefiniteClauseConstruct with Serializable {
lazy val signature = AtomSignature(symbol, args.size)
def arity = args.size
override def isUnit: Boolean = true
override def countAtoms = 1
/**
* Gives the sub-formulas that this formula contains
*/
override def subFormulas: Seq[Formula] = Seq()
/**
* All variables of this atom
*/
override lazy val variables: Set[Variable] = collectVariables(args)
/**
* All constants of this atom
*/
override lazy val constants: Set[Constant] = collectConstants(args)
/**
* All functions of this atom
*/
override lazy val functions: Set[TermFunction] = collectFunctions(args)
def isGround = variables.isEmpty
override def toText: String = {
if (args.nonEmpty)
symbol + "(" + args.map(_.toText).reduceLeft((left, right) => left + ", " + right) + ")"
else
symbol
}
override def toString: String = {
if (args.nonEmpty)
symbol + "(" + args.map(_.toString).reduceLeft((left, right) => left + ", " + right) + ")"
else symbol
}
}
sealed trait LogicalConnective extends Formula with DefiniteClauseConstruct
sealed case class Conjunction(left: Formula, right: Formula) extends LogicalConnective with DefiniteClauseConstruct with Serializable {
require(left ne null, "The left part of a conjunction cannot be empty")
require(right ne null, "The right part of a conjunction cannot be empty")
override def isUnit: Boolean = false
/**
* Gives the sub-formulas that this formula contains
*/
override def subFormulas: Seq[Formula] = Seq(left, right)
/**
* The textual representation of this formula
*/
override def toText: String = left.toText + ", " + right.toText
}
/*sealed case class Disjunction(left: Formula, right: Formula) extends LogicalConnective {
require(left ne null, "The left part of a disjunction cannot be null")
require(right ne null, "The right part of a disjunction cannot be null")
override def isUnit: Boolean = false
/**
* Gives the sub-formulas that this formula contains
*/
override def subFormulas: Seq[Formula] = Seq(left, right)
/**
* The textual representation of this formula
*/
override def toText: String = left.toText + " ; " + right.toText
}*/
sealed case class Negation(formula: Formula) extends LogicalConnective with DefiniteClauseConstruct with Serializable {
require(formula ne null, "The specified formula cannot be null")
/**
* Gives the sub-formulas that this formula contains
*/
override def subFormulas: Seq[Formula] = Seq(formula)
/**
* The textual representation of this formula
*/
override def toText = "not("+formula.toText+")"
override def isUnit: Boolean = formula.isUnit
override def countAtoms = 1
}
sealed case class Rule(head: Atom, body: DefiniteClauseConstruct) extends Formula with Serializable {
require(head ne null, "The head of a rule cannot be null (headless rules are not supported)")
require(body ne null, "The body of a rule cannot be null (unit clauses are not supported)")
override lazy val variables: Set[Variable] = body.subFormulas.foldRight(head.variables)((a: Formula, b) => a.variables ++ b)
override lazy val constants: Set[Constant] = body.subFormulas.foldRight(head.constants)((a: Formula, b) => a.constants ++ b)
override lazy val functions: Set[TermFunction] = body.subFormulas.foldRight(head.functions)((a: Formula, b) => a.functions ++ b)
/**
* Gives the sub-formulas that this formula contains
*/
override def subFormulas: Seq[Formula] = Seq(head, body)
/**
* The textual representation of this formula
*/
override def toText: String = head.toText + " :- " + body.toText +"."
override def isUnit: Boolean = false
}
| anskarl/ReadLogic | src/main/scala/readlogic/prolog/Formula.scala | Scala | apache-2.0 | 5,588 |
package io.github.oxlade39.storrent.persistence
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import akka.actor.{Props, ActorSystem}
import org.scalatest.MustMatchers
import io.github.oxlade39.storrent.test.util.{ForwardingParent, FileOps}
import java.io.File
import scala.util.Random
import akka.util.ByteString
import scala.io.Source
import org.scalatest.mockito.MockitoSugar
import io.github.oxlade39.storrent.core.{TorrentFile, Torrent}
import io.github.oxlade39.storrent.peer.{Block, DownloadPiece}
import org.mockito.Mockito
import org.apache.commons.io.FileUtils
class FilePersistenceTest extends TestKit(ActorSystem("FilePersistenceTest"))
with WordSpecLike with BeforeAndAfterAll with ImplicitSender with MustMatchers with FileOps {
override def afterAll(): Unit = {
system.terminate()
}
"FilePersistence" must {
"write a file to disk" in {
val testOut = new File("target") / "testOut" / s"FilePersistenceTest${Random.nextLong()}"
testOut.delete()
val fileContent =
ByteString("""
|Lorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsum
|This is the body of a test file which
|
|should be broken up into pieces
|
|and writtin to file
""".stripMargin)
val underTest = system.actorOf(Props(new ForwardingParent(
FilePersistence.props(toPersistInto = testOut, fileOffset = FolderPersistence.FileOffset(0, fileContent.size)),
testActor)))
val writes = for {
(block, offsetIndex) <- Random.shuffle(fileContent.grouped(10).zipWithIndex)
} yield FilePersistence.Write(block, offsetIndex * 10)
writes foreach (underTest ! _)
val done = expectMsgType[FilePersistence.Done]
val fileAsString = Source.fromFile(done.file).getLines().mkString(System.lineSeparator())
val rawBytesAsString = fileContent.utf8String
fileAsString mustEqual rawBytesAsString
}
"not complete prematurely when sent duplicate pieces" in {
val testOut = new File("target") / "testOut" / s"FilePersistenceTest${Random.nextLong()}"
testOut.delete()
val fileContent =
ByteString("""
|Lorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsumLorum ipsum
|This is the body of a test file which
|
|should be broken up into pieces
|
|and writtin to file
""".stripMargin)
val underTest = system.actorOf(Props(new ForwardingParent(
FilePersistence.props(toPersistInto = testOut, fileOffset = FolderPersistence.FileOffset(0, fileContent.size)),
testActor)))
val writes = for {
(block, offsetIndex) <- Random.shuffle(fileContent.grouped(10).zipWithIndex)
} yield FilePersistence.Write(block, offsetIndex * 10)
val doubledUp = writes.toList.flatMap(write => List(write, write))
val firstHalf = doubledUp take (doubledUp.size / 2) + 1
val secondHalf = doubledUp drop (doubledUp.size / 2) + 1
firstHalf foreach (underTest ! _)
expectNoMsg()
secondHalf foreach (underTest ! _)
expectMsgType[FilePersistence.Done]
}
}
}
class FolderPersistenceTest extends TestKit(ActorSystem("FolderPersistenceTest"))
with WordSpecLike with BeforeAndAfterAll with ImplicitSender with MustMatchers with FileOps with MockitoSugar {
override def beforeAll(): Unit = {
val testOut = new File("target") / "testOut"
FileUtils.forceMkdir(testOut)
}
override def afterAll(): Unit = {
system.terminate()
}
"FolderPersistence" must {
"input data must be consistent" in {
val reconstituted =
FolderPersistenceTest.downloadPieces.take(2).map(_.contiguousStream.get).reduce(_ ++ _)
val expected = "0\\nfile 0 with some text\\n/0"
val chopped = reconstituted.utf8String.substring(0, expected.size)
chopped mustEqual expected
}
"write multiple files within a directory" in {
val torrent = mock[Torrent]
val testOut = new File("target") / "testOut" / s"FolderPersistenceTest${Random.nextLong()}"
Mockito.when(torrent.files).thenReturn(FolderPersistenceTest.files)
val underTest = watch(system.actorOf(Props(new ForwardingParent(
FolderPersistence.props(torrent, testOut), testActor)), "multipleFiles"))
FolderPersistenceTest.downloadPieces foreach { p =>
underTest ! Persistence.Persist(p)
}
val FolderPersistence.Done(dir) = expectMsgType[FolderPersistence.Done]
dir.listFiles().map(_.getName).toSet mustEqual FolderPersistenceTest.files.map(_.name).toSet
dir.listFiles() foreach { written =>
val result = FolderPersistenceTest.results.find {
case (tf, _) => tf.name == written.getName
}
val Some((torrentFile, fileBytes)) = result
val bytesToString = fileBytes.utf8String
val fileToString = Source.fromFile(written).getLines().mkString("\\n")
bytesToString mustEqual fileToString
}
expectTerminated(underTest)
}
}
}
object FolderPersistenceTest {
val numberOfFiles = 5
val blockSize = 6
val blocksPerPiece = 3
val totalPieceSize = blockSize * blocksPerPiece
val results: Seq[(TorrentFile, ByteString)] = 0.until(numberOfFiles).map { fileIndex =>
val fileText = ByteString(
s"$fileIndex\\nfile $fileIndex with some text\\n/$fileIndex"
)
val torrentFile = TorrentFile(name = s"file$fileIndex", fileText.size)
(torrentFile, fileText)
}
val sequentialBytes: ByteString = results.map(_._2).reduce(_ ++ _)
val blockBytes: List[ByteString] = sequentialBytes.grouped(blockSize).toList
val downloadPieces: List[DownloadPiece] = blockBytes.grouped(blocksPerPiece).toList.zipWithIndex.map {
case (blocks, pieceIndex) =>
val bytesInPiece = blocks.reduce(_ ++ _)
DownloadPiece(pieceIndex, bytesInPiece.size, pieceIndex * totalPieceSize, Torrent.hash(bytesInPiece)) ++
blocks.zipWithIndex.map(b => Block(b._2 * blockSize, b._1))
}
def files = results.map(_._1).toList
}
| oxlade39/STorrent | src/test/scala/io/github/oxlade39/storrent/persistence/FilePersistenceTest.scala | Scala | apache-2.0 | 6,263 |
package api.socket
import play.api.libs.json._
/**
* Single stream response.
*/
case class StreamResponse(stream: models.Stream, correlation: Int)
object StreamResponse {
implicit val writes = new Writes[StreamResponse] {
def writes(x: StreamResponse): JsValue =
Json.obj(
"type" -> "Stream",
"correlation" -> x.correlation) ++ Json.toJson(x.stream).as[JsObject]
}
}
/**
* Multiple stream response.
*/
case class StreamsResponse(streams: Seq[models.Stream], correlation: Int)
object StreamsResponse {
implicit val statusWrites = new Writes[StreamsResponse] {
def writes(x: StreamsResponse): JsValue =
Json.obj(
"type" -> "Streams",
"streams" -> x.streams,
"correlation" -> x.correlation)
}
}
/**
* Current stream status response.
*/
case class StreamStatusResponse(uri: String, status: models.Status, correlation: Int)
object StreamStatusResponse {
implicit val writes = new Writes[StreamStatusResponse] {
def writes(x: StreamStatusResponse): JsValue =
Json.obj(
"type" -> "StreamStatus",
"url" -> x.uri,
"status" -> x.status,
"correlation" -> x.correlation)
}
}
/**
* Stream children response.
*/
case class ApiChildrenResponse(uri: String, children: Seq[models.Stream], correlation: Int)
object ApiChildrenResponse {
implicit val writes = new Writes[ApiChildrenResponse] {
def writes(x: ApiChildrenResponse): JsValue =
Json.obj(
"type" -> "StreamChildren",
"url" -> x.uri,
"children" -> x.children,
"correlation" -> x.correlation)
}
}
/**
* Stream children response.
*/
case class ApiChildResponse(uri: String, children: models.Stream, correlation: Int)
object ApiChildResponse {
implicit val writes = new Writes[ApiChildResponse] {
def writes(x: ApiChildResponse): JsValue =
Json.obj(
"type" -> "StreamChild",
"url" -> x.uri,
"child" -> x.children,
"correlation" -> x.correlation)
}
}
/**
* Websocket error response message.
*/
case class SocketError(error: String, correlation: Int)
object SocketError {
implicit val writes = new Writes[SocketError] {
def writes(x: SocketError): JsValue =
Json.obj(
"type" -> "Error",
"error" -> x.error,
"correlation" -> x.correlation)
}
}
/**
* Stream tags response.
*/
case class StreamTagsResponse(uri: String, tags: Seq[models.StreamTag], correlation: Int)
object StreamTagsResponse {
implicit val writes = new Writes[StreamTagsResponse] {
def writes(x: StreamTagsResponse): JsValue =
Json.obj(
"type" -> "StreamTags",
"url" -> x.uri,
"tags" -> x.tags,
"correlation" -> x.correlation)
}
}
/**
* Stream single tag response.
*/
case class StreamTagResponse(uri: String, tag: models.StreamTag, correlation: Int)
object StreamTagResponse {
implicit val writes = new Writes[StreamTagResponse] {
def writes(x: StreamTagResponse): JsValue =
Json.obj(
"type" -> "StreamTag",
"url" -> x.uri,
"correlation" -> x.correlation) ++ Json.toJson(x.tag).as[JsObject]
}
} | Blotre/blotre | app/api/socket/Responses.scala | Scala | mit | 3,158 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.nscplugin.test
import org.scalajs.nscplugin.test.util._
import org.junit.Test
// scalastyle:off line.size.limit
class ReflectTest extends DirectTest with TestHelpers {
override def preamble: String =
"""import scala.scalajs.js, js.annotation._
import scala.scalajs.reflect.annotation._
"""
@Test
def noEnableReflectiveInstantiationOnJSType: Unit = {
"""
@EnableReflectiveInstantiation
class A extends js.Object
@EnableReflectiveInstantiation
trait B extends js.Object
@EnableReflectiveInstantiation
object C extends js.Object
@EnableReflectiveInstantiation
@js.native
@JSGlobal
class D extends js.Object
@EnableReflectiveInstantiation
@js.native
trait E extends js.Object
@EnableReflectiveInstantiation
@js.native
@JSGlobal
object F extends js.Object
""" hasErrors
"""
|newSource1.scala:4: error: @EnableReflectiveInstantiation cannot be used on types extending js.Any.
| @EnableReflectiveInstantiation
| ^
|newSource1.scala:7: error: @EnableReflectiveInstantiation cannot be used on types extending js.Any.
| @EnableReflectiveInstantiation
| ^
|newSource1.scala:10: error: @EnableReflectiveInstantiation cannot be used on types extending js.Any.
| @EnableReflectiveInstantiation
| ^
|newSource1.scala:13: error: @EnableReflectiveInstantiation cannot be used on types extending js.Any.
| @EnableReflectiveInstantiation
| ^
|newSource1.scala:18: error: @EnableReflectiveInstantiation cannot be used on types extending js.Any.
| @EnableReflectiveInstantiation
| ^
|newSource1.scala:22: error: @EnableReflectiveInstantiation cannot be used on types extending js.Any.
| @EnableReflectiveInstantiation
| ^
"""
}
}
| scala-js/scala-js | compiler/src/test/scala/org/scalajs/nscplugin/test/ReflectTest.scala | Scala | apache-2.0 | 2,171 |
/*
* Copyright (c) 2015 Alpine Data Labs
* All rights reserved.
*/
package com.alpine.model.pack.preprocess
import com.alpine.features.{StringType, FeatureDesc, IntType}
import com.alpine.json.JsonTestUtil
import org.scalatest.FunSuite
/**
* Tests serialization of NullValueReplacement
* and application of NullValueReplacer.
*/
class NullValueReplacementTest extends FunSuite {
val model = NullValueReplacement(
Seq[Any](70, "sunny"),
Seq[FeatureDesc[_]](
FeatureDesc("humidity", IntType()),
FeatureDesc("outlook", StringType())
)
)
test("Should serialize correctly") {
JsonTestUtil.testJsonization(model)
}
test("Should apply transformation correctly") {
val t = model.transformer
assert(Seq(70,"sunny") === t.apply(Seq[Any](null, null)))
assert(Seq(65,"sunny") === t.apply(Seq[Any](65, null)))
assert(Seq(70,"rainy") === t.apply(Seq[Any](null, "rainy")))
assert(Seq(65,"rainy") === t.apply(Seq[Any](65, "rainy")))
}
}
| holdenk/PluginSDK | alpine-model-pack/src/test/scala/com/alpine/model/pack/preprocess/NullValueReplacementTest.scala | Scala | apache-2.0 | 992 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin
import collector.ZipkinCollector
import gen.LogEntry
import org.specs.Specification
import org.specs.mock.{ClassMocker, JMocker}
import com.twitter.io.TempFile
import com.twitter.finagle.builder.ClientBuilder
import org.apache.thrift.protocol.TBinaryProtocol
import com.twitter.util.Eval
import com.twitter.cassie.tests.util.FakeCassandra
import com.twitter.zipkin.query.ZipkinQuery
import com.twitter.common.zookeeper.ServerSet
import com.twitter.common.net.pool.DynamicHostSet.HostChangeMonitor
import java.net.{InetSocketAddress, InetAddress}
import java.util.Map
import com.twitter.thrift.{Status, ServiceInstance}
import com.twitter.zipkin.config.{ZipkinQueryConfig, CassandraStorageConfig, ZipkinCollectorConfig}
import org.apache.zookeeper.server.persistence.FileTxnSnapLog
import org.apache.zookeeper.server.ZooKeeperServer.BasicDataTreeBuilder
import org.apache.zookeeper.server.{NIOServerCnxn, ZooKeeperServer}
import com.twitter.common.io.FileUtils
import com.twitter.finagle.Service
import com.twitter.finagle.thrift.{ThriftClientRequest, ThriftClientFramedCodec}
class ZipkinSpec extends Specification with JMocker with ClassMocker {
object FakeServer extends FakeCassandra
var collector: ZipkinCollector = null
var collectorTransport: Service[ThriftClientRequest, Array[Byte]] = null
var query: ZipkinQuery = null
var queryTransport: Service[ThriftClientRequest, Array[Byte]] = null
"ZipkinCollector and ZipkinQuery" should {
doBefore {
// fake cassandra node
FakeServer.start()
// start a temporary zookeeper server
val zkPort = 2181 // TODO pick another port?
val tmpDir = FileUtils.createTempDir()
val zooKeeperServer =
new ZooKeeperServer(new FileTxnSnapLog(tmpDir, tmpDir), new BasicDataTreeBuilder())
val connectionFactory = new NIOServerCnxn.Factory(new InetSocketAddress(zkPort))
connectionFactory.startup(zooKeeperServer)
// no need to register in serversets
val nullServerSetsImpl = new ServerSet() {
def join(p1: InetSocketAddress, p2: Map[String, InetSocketAddress], p3: Status) = null
def monitor(p1: HostChangeMonitor[ServiceInstance]) {}
}
// start a collector that uses the local zookeeper and fake cassandra
val collectorConfigFile = TempFile.fromResourcePath("/TestCollector.scala")
val collectorConfig = new Eval().apply[ZipkinCollectorConfig](collectorConfigFile)
collectorConfig.zkConfig.servers = List("localhost:" + zkPort)
collectorConfig.storageConfig.asInstanceOf[CassandraStorageConfig].cassandraConfig.port = FakeServer.port.get
val collectorPort = collectorConfig.serverPort
// start a query service that uses the local zookeeper and fake cassandra
val queryFile = TempFile.fromResourcePath("/TestQuery.scala")
val queryConfig = new Eval().apply[ZipkinQueryConfig](queryFile)
queryConfig.zkConfig.servers = List("localhost:" + zkPort)
queryConfig.storageConfig.asInstanceOf[CassandraStorageConfig].cassandraConfig.port = FakeServer.port.get
val queryPort = queryConfig.serverPort
collector = new ZipkinCollector(collectorConfig)
collector.start()
query = new ZipkinQuery(queryConfig, nullServerSetsImpl, queryConfig.storage, queryConfig.index)
query.start()
queryTransport = ClientBuilder()
.hosts(InetAddress.getLocalHost.getHostName + ":" + queryPort)
.hostConnectionLimit(1)
.codec(ThriftClientFramedCodec())
.build()
collectorTransport = ClientBuilder()
.hosts(InetAddress.getLocalHost.getHostName + ":" + collectorPort)
.hostConnectionLimit(1)
.codec(ThriftClientFramedCodec())
.build()
}
doAfter {
collectorTransport.release()
collector.shutdown()
queryTransport.release()
query.shutdown()
FakeServer.stop()
}
"collect a trace, then return it when requested from the query daemon" in {
val protocol = new TBinaryProtocol.Factory()
val span = "CgABAAAAAAAAAHsLAAMAAAAGbWV0aG9kCgAEAAAAAAAAAHsKAAUAAAAAAAAAew8ABgwAA" +
"AACCgABAAAAAAdU1MALAAIAAAACY3MMAAMIAAEBAQEBBgACAAELAAMAAAAHc2VydmljZQAACgABAAAAAA" +
"dU1MALAAIAAAACY3IMAAMIAAEBAQEBBgACAAELAAMAAAAHc2VydmljZQAADwAIDAAAAAELAAEAAAADa2V" +
"5CwACAAAABXZhbHVlCAADAAAAAQwABAgAAQEBAQEGAAIAAQsAAwAAAAdzZXJ2aWNlAAAA"
// let's send off a tracing span to the collector
val collectorClient = new gen.ZipkinCollector.FinagledClient(collectorTransport, protocol)
collectorClient.log(Seq(LogEntry("zipkin", span)))()
// let's check that the trace we just sent has been stored and indexed properly
val queryClient = new gen.ZipkinQuery.FinagledClient(queryTransport, protocol)
val traces = queryClient.getTracesByIds(Seq(123), Seq())()
traces.isEmpty mustEqual false
traces(0).spans.isEmpty mustEqual false
traces(0).spans(0).traceId mustEqual 123
}
}
}
| lanrion/zipkin | zipkin-test/src/test/scala/com/twitter/zipkin/ZipkinSpec.scala | Scala | apache-2.0 | 5,613 |
/*
* Copyright (c) 2012-2013 SnowPlow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.hadoop.hive
// Scala
import scala.collection.JavaConversions
// Specs2
import org.specs2.mutable.Specification
// Hive
import org.apache.hadoop.hive.serde2.SerDeException
// SnowPlow Utils
import com.snowplowanalytics.util.Tap._
// Deserializer
import test.{SnowPlowDeserializer, SnowPlowEvent, SnowPlowTest}
class NoJsTrackerTest extends Specification {
// Toggle if tests are failing and you want to inspect the struct contents
implicit val _DEBUG = false
val row = "2012-05-24 11:35:53 DFW3 3343 99.116.172.58 GET d3gs014xn8p70.cloudfront.net /ice.png 200 - Mozilla/5.0%20(Windows%20NT%206.1;%20WOW64;%20rv:12.0)%20Gecko/20100101%20Firefox/12.0 &e=pv&page=root%20readme&url=https%3A%2F%2Fgithub.com%2Fsnowplow%2Fsnowplow&aid=snowplow&p=web&tv=no-js-0.1.0"
val expected = new SnowPlowEvent().tap { e =>
e.app_id = "snowplow"
e.platform = "web"
e.dt = "2012-05-24"
e.collector_tm = "11:35:53"
e.event = "page_view"
e.page_title = "root readme"
e.page_url = "https://github.com/snowplow/snowplow"
}
"The SnowPlow page view row from the No-JS tracker \\"%s\\"".format(row) should {
val actual = SnowPlowDeserializer.deserialize(row)
// Check all of the field values
// The application (site, game, app etc) this event belongs to, and the tracker platform
"have app_id (Application ID) = %s".format(expected.app_id) in {
actual.app_id must_== expected.app_id
}
"have platform (Platform) = %s".format(expected.platform) in {
actual.platform must_== expected.platform
}
// Date/time
"have dt (Legacy Hive Date) = %s".format(expected.dt) in {
actual.dt must_== expected.dt
}
"have collector_tm (Collector Time) = %s".format(expected.collector_tm) in {
actual.collector_tm must_== expected.collector_tm
}
// Event and transaction
"have event (Event Type) = %s".format(expected.event) in {
actual.event must_== expected.event
}
"have a valid (stringly-typed UUID) event_id" in {
SnowPlowTest.stringlyTypedUuid(actual.event_id) must_== actual.event_id
}
// Page
"have page_url (Page URL) = %s".format(expected.page_url) in {
actual.page_url must_== expected.page_url
}
// Tracking a page view, so we have a page title
"have page_title (Page Title) = %s".format(expected.page_title) in {
actual.page_title must_== expected.page_title
}
}
} | richo/snowplow | 3-etl/hive-etl/snowplow-log-deserializers/src/test/scala/com/snowplowanalytics/snowplow/hadoop/hive/NoJsTrackerTest.scala | Scala | apache-2.0 | 3,143 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.facade.filters.annotated
import com.hypertino.binders.annotations.fieldName
import com.hypertino.binders.value.Value
import com.hypertino.facade.filter.chain.SimpleFilterChain
import com.hypertino.facade.filter.model._
import com.hypertino.facade.filter.parser.{ExpressionEvaluator, PreparedExpression}
import com.hypertino.facade.raml._
import com.hypertino.hyperbus.model.HRL
import com.typesafe.config.Config
import scaldi.Injectable
case class RewriteAnnotation(
@fieldName("if") predicate: Option[PreparedExpression],
location: String,
query: Value
) extends RamlAnnotation {
def name: String = "rewrite"
}
class RewriteFilterFactory(config: Config, protected val predicateEvaluator: ExpressionEvaluator) extends RamlFilterFactory with Injectable {
override def createFilters(target: RamlFilterTarget): SimpleFilterChain = {
val (sourceLocation, ramlMethod, destinationLocation, query) = target match {
case ResourceTarget(uri, RewriteAnnotation(_, l, q)) ⇒ (uri, None, l, q)
case MethodTarget(uri, method, RewriteAnnotation(_, l, q)) ⇒ (uri, Some(Method(method)), l, q)
case otherTarget ⇒ throw RamlConfigException(s"Annotation 'rewrite' cannot be assigned to $otherTarget")
}
val sourceHRL = HRL(sourceLocation)
val destinationHRL = HRL(destinationLocation, query)
RewriteIndexHolder.updateRewriteIndex(sourceHRL, destinationHRL, ramlMethod)
SimpleFilterChain(
requestFilters = Seq(new RewriteRequestFilter(sourceHRL, destinationHRL, predicateEvaluator)),
responseFilters = Seq.empty,
eventFilters = Seq(new RewriteEventFilter(predicateEvaluator))
)
}
override def createRamlAnnotation(name: String, value: Value): RamlAnnotation = {
value.to[RewriteAnnotation]
}
}
| hypertino/hyperfacade | src/main/scala/com/hypertino/facade/filters/annotated/RewriteFilterFactory.scala | Scala | mpl-2.0 | 2,170 |
package io.aos.scala.pmatch;
object MatchSpl1 extends Application {
def matchTest(x: Int): String = x match {
case 1 => "one"
case 2 => "two"
case _ => "many"
}
println(matchTest(3))
}
| XClouded/t4f-core | scala/src/main/scala/io/aos/scala/pmatch/MatchSpl1.scala | Scala | apache-2.0 | 209 |
package net.nomadicalien.ch3
sealed trait Tree[+A]
case class Leaf[A](value: A) extends Tree[A]
case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A]
object Tree {
def size[A](as: Tree[A]): Int = as match {
case Leaf(_) => 1
case Branch(l, r) => 1 + size(l) + size(r)
}
def maximum(as: Tree[Int]): Int = {
def maxIter(t: Tree[Int], maxValue: Int): Int = t match {
case Leaf(v) => v.max(maxValue)
case Branch(l,r) => maxIter(l, maxIter(r,maxValue))
}
maxIter(as, -1)//assumption is everything is greater or equal to zero
}
def depth[A](as: Tree[A]): Int = as match {
case Leaf(_) => 1
case Branch(l, r) => 1 + depth(l).max(depth(r))
}
def map[A,B](as: Tree[A])(f: A => B): Tree[B] = as match {
case Leaf(a) => Leaf(f(a))
case Branch(l, r) => Branch(map(l)(f), map(r)(f))
}
def fold[A,B](t: Tree[A])(f: A => B)(g: (B,B) => B): B = t match {
case Leaf(a) => f(a)
case Branch(l,r) => g(fold(l)(f)(g), fold(r)(f)(g))
}
def size2[A](as: Tree[A]): Int = fold(as)(a=>1)(1 + _ + _)
def maximum2(as: Tree[Int]): Int = fold(as)(a=>a)(_ max _)
def depth2[A](as: Tree[A]): Int = fold(as)(a=>1)((d1,d2)=>1 + (d1 max d2) )
def map2[A,B](as: Tree[A])(f: A => B): Tree[B] = fold(as)(a=>Leaf(f(a)):Tree[B])(Branch(_,_))
} | BusyByte/func-prog-scala | exercises-and-notes/src/main/scala/net/nomadicalien/ch3/Tree.scala | Scala | apache-2.0 | 1,314 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
private[scalatest] sealed trait AnsiColor {
val code: String
}
private[scalatest] case object AnsiGreen extends AnsiColor {
val code: String = "\033[32m"
}
private[scalatest] case object AnsiCyan extends AnsiColor {
val code: String = "\033[36m"
}
private[scalatest] case object AnsiYellow extends AnsiColor {
val code: String = "\033[33m"
}
private[scalatest] case object AnsiRed extends AnsiColor {
val code: String = "\033[31m"
}
| travisbrown/scalatest | src/main/scala/org/scalatest/tools/AnsiColor.scala | Scala | apache-2.0 | 1,078 |
package smr.collection
import smr.Shard
import collection.mutable.Builder
/**
*
* @author dlwh
*/
trait DistributedBuilder[Elem, +To] extends Builder[Elem,To] {
type LocalSummary
def localBuilder:LocalBuilder[Elem,Iterable[Elem],LocalSummary];
def resultFromSummaries(summaries: IndexedSeq[(Iterable[Shard],LocalSummary)]):To;
}
trait LocalBuilder[-Elem, +To, LocalSummary] extends Builder[Elem, To] with Serializable {
def summary():LocalSummary;
def copy:LocalBuilder[Elem,To,LocalSummary]
}
| dlwh/smr | src/main/scala/smr/collection/DistributedBuilder.scala | Scala | apache-2.0 | 512 |
/*
* Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com>
* Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.kafka.internal
import java.net.URLEncoder
import akka.kafka.tests.scaladsl.LogCapturing
import akka.kafka.{Subscription, Subscriptions}
import akka.util.ByteString
import org.apache.kafka.common.TopicPartition
import org.scalatest.{Matchers, WordSpec}
class SubscriptionsSpec extends WordSpec with Matchers with LogCapturing {
"URL encoded subscription" should {
"be readable for topics" in {
encode(Subscriptions.topics(Set("topic1", "topic2"))) should be(
"topic1+topic2"
)
}
"be readable for patterns" in {
encode(Subscriptions.topicPattern("topic.*")) should be("pattern+topic.*")
}
"be readable for assignments" in {
encode(Subscriptions.assignment(Set(new TopicPartition("topic1", 1)))) should be("topic1-1")
}
"be readable for assignments with offset" in {
encode(Subscriptions.assignmentWithOffset(Map(new TopicPartition("topic1", 1) -> 123L))) should be(
"topic1-1+offset123"
)
}
"be readable for multiple assignments with offset" in {
encode(
Subscriptions.assignmentWithOffset(
Map(new TopicPartition("topic1", 1) -> 123L, new TopicPartition("A-Topic-Name", 2) -> 456L)
)
) should be(
"topic1-1+offset123+A-Topic-Name-2+offset456"
)
}
"be readable for multiple assignments with timestamp" in {
encode(
Subscriptions.assignmentOffsetsForTimes(
Map(new TopicPartition("topic1", 1) -> 12345L, new TopicPartition("Another0Topic", 1) -> 998822L)
)
) should be(
"topic1-1+timestamp12345+Another0Topic-1+timestamp998822"
)
}
}
private def encode(subscription: Subscription) =
URLEncoder.encode(subscription.renderStageAttribute, ByteString.UTF_8)
}
| softwaremill/reactive-kafka | tests/src/test/scala/akka/kafka/internal/SubscriptionsSpec.scala | Scala | apache-2.0 | 1,938 |
/*
* Copyright 2014 – 2018 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalax.transducers.internal
import java.util.concurrent.atomic.AtomicBoolean
class Reduced {
private[this] final val state = new AtomicBoolean()
def apply[T](x: T): T = {
if (!state.compareAndSet(false, true)) {
throw new IllegalStateException("ContractViolation: Reduced state signaled multiple times, check for bugs in code.")
}
x
}
override def toString: String = s"Reduced(${state.get()})"
def ? : Boolean = state.get()
}
| knutwalker/transducers-scala | api/src/main/scala/scalax/transducers/internal/Reduced.scala | Scala | apache-2.0 | 1,070 |
package jp.co.cyberagent.aeromock.template.freemarker.method
import jp.co.cyberagent.aeromock.core.http.VariableManager
import jp.co.cyberagent.aeromock.core.script.GroovyDirectiveScriptRunner
import freemarker.template.TemplateMethodModelEx
import groovy.lang.Binding
/**
* Implementation of [[freemarker.template.TemplateMethodModelEx]] to define function in Groovy.
* @author stormcat24
*/
class AeromockCustomMethod(runner: GroovyDirectiveScriptRunner, scriptName: String)
extends TemplateMethodModelEx {
/**
* @inheritdoc
*/
override def exec(arguments: java.util.List[_]): AnyRef = {
val binding = new Binding
binding.setVariable("arguments", arguments)
VariableManager.getRequestMap().foreach(entry => binding.setVariable(entry._1, entry._2))
binding.setVariable("_data", VariableManager.getDataMap())
runner.run[AnyRef](scriptName, binding)
}
}
| CyberAgent/aeromock | aeromock-freemarker/src/main/scala/jp/co/cyberagent/aeromock/template/freemarker/method/AeromockCustomMethod.scala | Scala | mit | 894 |
package io.aigar.game
import com.github.jpbetz.subspace.Vector2
import java.util.Random
object Grid {
final val WidthPerPlayer = 100
final val HeightPerPlayer = 100
}
class Grid(val width: Int, val height: Int) {
val random = new Random()
def randomPosition: Vector2 = {
val x = random.nextFloat() * width
val y = random.nextFloat() * height
Vector2(x, y)
}
// Logic coming from http://stackoverflow.com/a/5838055/395386
def randomRadiusPosition: Vector2 = {
val angle = 2 * Math.PI * random.nextFloat()
val a = random.nextFloat() + random.nextFloat()
val radius = if (a > 1) 2-a else a
val x = radius * Math.cos(angle).toFloat * (width/2) + width/2
val y = radius * Math.sin(angle).toFloat * (height/2) + height/2
Vector2(x, y)
}
def state: serializable.Dimensions = {
serializable.Dimensions(width, height)
}
}
| DrPandemic/aigar.io | game/src/main/scala/io/aigar/game/Grid.scala | Scala | mit | 880 |
/*
* Copyright 2013 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import java.util.Date
import models.DriveLog
import play.api.data._
import play.api.data.Forms._
object DriveLogForm {
val DATE = "date"
val DATE_PATTERN = "yyyy-MM-dd"
val TRIPMETER = "tripmeter"
val TRIPMETER_PRECISION = 5
val TRIPMETER_SCALE = 1
val FUELOMETER = "fuelometer"
val FUELOMETER_PRECISION = 3
val FUELOMETER_SCALE = 1
val REMAINING = "remaining"
val REMAINING_PRECISION = 4
val REMAINING_SCALE = 0
val ODOMETER = "odometer"
val ODOMETER_PRECISION = 9
val ODOMETER_SCALE = 0
val NOTE = "note"
val NOTE_MIN = 1
val NOTE_MAX = 1024
val driveLogForm: Form[DriveLog] = Form(mapping(
DATE -> date(DATE_PATTERN),
TRIPMETER -> bigDecimal(TRIPMETER_PRECISION, TRIPMETER_SCALE),
FUELOMETER -> bigDecimal(FUELOMETER_PRECISION, FUELOMETER_SCALE),
REMAINING -> bigDecimal(REMAINING_PRECISION, REMAINING_SCALE),
ODOMETER -> bigDecimal(ODOMETER_PRECISION, ODOMETER_SCALE),
NOTE -> optional(text(NOTE_MIN, NOTE_MAX)))(apply)(unapply))
def apply(date: Date, tripmeter: BigDecimal, fuelometer: BigDecimal, remaining: BigDecimal, odometer: BigDecimal, note: Option[String]): DriveLog = {
DriveLog(date, tripmeter, fuelometer, remaining, odometer, note)
}
def unapply(item: DriveLog): Option[(Date, BigDecimal, BigDecimal, BigDecimal, BigDecimal, Option[String])] = {
Some((item.dt, item.tripmeter, item.fuelometer, item.remaining, item.odometer, item.note))
}
}
| agwlvssainokuni/lifelog | lifelog-website/app/controllers/DriveLogForm.scala | Scala | apache-2.0 | 2,067 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.shared.unstable.util
import java.io.{ File, FileNotFoundException }
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers
class SslSpec extends AnyFlatSpecLike with Matchers {
private val keystore = "testkeystore"
private val password = "123456"
private val classLoader = this.getClass.getClassLoader
private def fileFromResource(classPathResource: String) =
new File(classLoader.getResource(classPathResource).getFile).getCanonicalPath
"SSLHelperSpec" should "load keystore from file" in {
val keystoreFile = fileFromResource(keystore)
val keyManagers = Ssl.newKeyManagerFactory(None, keystoreFile, password, None).getKeyManagers
keyManagers should have size 1
}
it should "load keystore from classpath" in {
val keyManagers = Ssl.newKeyManagerFactory(None, keystore, password, None).getKeyManagers
keyManagers should have size 1
}
it should "throw FileNotFoundException when load non-existing keystore from classpath" in {
a[FileNotFoundException] shouldBe thrownBy(Ssl.newKeyManagerFactory(None, "some/non/existing", password, None))
}
it should "load truststore from file" in {
val truststoreFile = fileFromResource(keystore)
val trustManagers = Ssl.newTrustManagerFactory(None, truststoreFile, password, None).getTrustManagers
trustManagers should have size 1
}
it should "load truststore from classpath" in {
val trustManagers = Ssl.newTrustManagerFactory(None, keystore, password, None).getTrustManagers
trustManagers should have size 1
}
it should "throw FileNotFoundException when load non-existing truststore from classpath" in {
a[FileNotFoundException] shouldBe thrownBy(Ssl.newTrustManagerFactory(None, "some/non/existing", password, None))
}
}
| gatling/gatling | gatling-commons-shared-unstable/src/test/scala/io/gatling/commons/shared/unstable/util/SslSpec.scala | Scala | apache-2.0 | 2,440 |
package tests.distribution.delta.crdt.basic
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.freespec.AnyFreeSpec
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import rescala.extra.lattices.delta.crdt.basic.Network
object NetworkGenerators {
val genNetwork: Gen[Network] = for {
lossChance <- Gen.choose(0.0, 1.0)
duplicateChance <- Gen.choose(0.0, 1.0)
delayChance <- Gen.choose(0.0, 1.0)
} yield new Network(lossChance, duplicateChance, delayChance)
implicit val arbNetwork: Arbitrary[Network] = Arbitrary(genNetwork)
}
class NetworkTest extends AnyFreeSpec with ScalaCheckDrivenPropertyChecks {
import NetworkGenerators._
"sendMessage/receiveMessages" in forAll { (msgs: List[Array[Byte]], replicaID: String) =>
val network = new Network(0, 0, 0)
msgs.foreach(network.sendMessage("a", _))
val rcvdOther = network.receiveMessages(replicaID)
assert(
replicaID == "a" || rcvdOther.isEmpty,
s"""For ids other than "a" no messages should be received, but $replicaID received $rcvdOther"""
)
val rcvd = network.receiveMessages("a")
msgs.foreach { msg =>
assert(
rcvd.contains(msg),
s"""For id "a" the sent messages should be received, but $rcvd does not contain ${msg.mkString(
"Array(",
", ",
")"
)}"""
)
}
val rcvdAfter = network.receiveMessages("a")
assert(
rcvdAfter.isEmpty,
s"After receiving the messages there should not be any messages left to received, but $rcvdAfter is not empty"
)
}
"loss" in forAll { msg: Array[Byte] =>
val network = new Network(1, 0, 0)
network.sendMessage("a", msg)
val rcvd = network.receiveMessages("a")
assert(
rcvd.isEmpty,
s"In a network with 100% loss chance no messages should be received, but $rcvd is not empty"
)
}
"duplicate" in forAll { msg: Array[Byte] =>
val network = new Network(0, 1, 0)
network.sendMessage("a", msg)
val rcvd1 = network.receiveMessages("a")
val rcvd2 = network.receiveMessages("a")
val rcvd3 = network.receiveMessages("a")
val rcvd4 = network.receiveMessages("a")
val rcvd5 = network.receiveMessages("a")
assert(
rcvd1.contains(msg),
s"In a network with 100% duplicate chance a message should be able to be received infinitely often, but after receiving 1 time $rcvd1 does not contain ${msg.mkString("Array(", ", ", ")")}"
)
assert(
rcvd2.contains(msg),
s"In a network with 100% duplicate chance a message should be able to be received infinitely often, but after receiving 2 time $rcvd1 does not contain ${msg.mkString("Array(", ", ", ")")}"
)
assert(
rcvd3.contains(msg),
s"In a network with 100% duplicate chance a message should be able to be received infinitely often, but after receiving 3 time $rcvd1 does not contain ${msg.mkString("Array(", ", ", ")")}"
)
assert(
rcvd4.contains(msg),
s"In a network with 100% duplicate chance a message should be able to be received infinitely often, but after receiving 4 time $rcvd1 does not contain ${msg.mkString("Array(", ", ", ")")}"
)
assert(
rcvd5.contains(msg),
s"In a network with 100% duplicate chance a message should be able to be received infinitely often, but after receiving 5 time $rcvd1 does not contain ${msg.mkString("Array(", ", ", ")")}"
)
}
"delay" in forAll { msg: Array[Byte] =>
val network = new Network(0, 0, 1)
network.sendMessage("a", msg)
val rcvd = network.receiveMessages("a")
assert(
rcvd.isEmpty,
s"In a network with 100% delay chance no message should ever arrive, but $rcvd is not empty"
)
}
"reliablePhase" in forAll { (msg: Array[Byte], network: Network) =>
network.startReliablePhase()
network.sendMessage("a", msg)
val rcvd = network.receiveMessages("a")
assert(
rcvd.contains(msg),
s"When the network is in a reliable phase all sent messages should be received, but $rcvd does not contain ${msg.mkString("Array(", ", ", ")")}"
)
}
}
| guidosalva/REScala | Code/Extensions/Replication/src/test/scala/tests/distribution/delta/crdt/basic/NetworkTest.scala | Scala | apache-2.0 | 4,153 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Sat Sep 24 20:46:45 EDT 2011
* @see LICENSE (MIT style license file).
*/
package scalation.math
// U N D E R D E V E L O P M E N T
import scalation.util.Error
import ExtremeD.approx
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ProbNum` class is used to represent probabilistic numbers '(x, p)'
* where 'x' is a real number and 'p' is its probability of occurrence.
* FIX: Currently this class is half-baked!!!
* @see http://okmij.org/ftp/Computation/monads.html#random-var-monad
* @param x the real number (double precision)
* @param p the probability of its occurrence [0, 1]
*/
case class ProbNum (x: Double, p: Double = 1.0)
extends Numeric [ProbNum] with Ordered [ProbNum] with Error
{
if (p < 0.0 || p > 1.0) flaw ("constructor", "p is not a probability " + p)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the unary minus (-).
*/
def unary_- () = ProbNum (-x, p)
def negate (xp: ProbNum) = -xp
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add two probabilistic numbers.
* @param xp add 'xp' to this
*/
def + (xp: ProbNum) = ProbNum (x * p + xp.x * xp.p, p + xp.p)
def plus (xp: ProbNum, yq: ProbNum) = xp + yq
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Subtract two probabilistic numbers.
* @param xp subtract 'xp' from this
*/
def - (xp: ProbNum) = ProbNum (x * p - xp.x * p, p + xp.p)
def minus (xp: ProbNum, yq: ProbNum) = xp - yq
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply two probabilistic numbers.
* @param xp multiply this times 'xp'
*/
def * (xp: ProbNum) = ProbNum (x * p * xp.x * xp.p, p + xp.p)
def times (xp: ProbNum, yq: ProbNum) = xp * yq
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the probabilistic number is certain (probability = 1).
*/
def isCertain = approx (p, 1.0)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compare two probabilistic numbers (negative for <, zero for ==, positive for >).
* @param xp the first probabilistic number to compare
* @param yq the second probabilistic number to compare
*/
def compare (xp: ProbNum, yq: ProbNum) = xp.x compare yq.x
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compare this probabilistic number with that probabilistic number 'yq'.
* @param yq that probabilistic number
*/
def compare (yq: ProbNum) = x compare yq.x
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the probabilistic number to a `Double`.
* @param xp the probabilistic number to convert
*/
def toDouble (xp: ProbNum) = xp.x
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the probabilistic number to a `Float`.
* @param xp the probabilistic number to convert
*/
def toFloat (xp: ProbNum) = xp.x.asInstanceOf [Float]
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the probabilistic number to a `Long`.
* @param xpc the probabilistic number to convert
*/
def toLong (xp: ProbNum) = xp.asInstanceOf [Long]
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the probabilistic number to an `Int`.
* @param xp the probabilistic number to convert
*/
def toInt (xp: ProbNum) = xp.asInstanceOf [Int]
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a probabilistic number from an `Int`.
* @param n the integer used to create the probabilistic number.
*/
def fromInt (n: Int) = ProbNum (n, 0.0)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert this probabilistic number to a `String`.
*/
override def toString = "ProbNum ( " + x + " , " + p + " )"
} // ProbNum class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ProbNumTest` object is used to test the `ProbNum` class.
* > run-main scalation.math.ProbNumTest
*/
object ProbNumTest extends App
{
import scalation.math.ProbNum._
val xp = ProbNum (2.0, .5)
val yq = ProbNum (4.0, .5)
println ("xp = " + xp)
println ("yq = " + yq)
println ("xp + yq = " + (xp + yq))
println ("xp - yq = " + (xp - yq))
println ("xp * yq = " + (xp * yq))
} // ProbNumTest
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/math/ProbNum.scala | Scala | mit | 4,882 |
package pages.theme
import java.util.UUID
import net.liftweb.builtin.snippet.Tail
import net.liftweb.common.Full
import net.liftweb.http.SHtml.ElemAttr
import net.liftweb.http.js.JE.{ValById, JsRaw}
import net.liftweb.http.js.jquery.JqJsCmds.{Hide, Show}
import net.liftweb.http.js.{JsCmds, JsCmd}
import net.liftweb.http.js.JsCmds._
import net.liftweb.http.{SHtml}
import net.liftweb.json.JsonAST.{JNull, JNothing, JString, JArray}
import net.liftweb.util.Helpers
import net.liftweb.util.Helpers._
import controllers.RootController.XSPageHandle
import scala.xml.NodeSeq
trait SForms extends SBtns {
abstract class DefaultForm()(implicit val xsh: XSPageHandle) extends Form
trait Form extends Id {
implicit val xsh: XSPageHandle
val field: Field
var saveFailed = false
def onChangeClientSide(): JsCmd = JsCmds.Noop
def onChangeServerSide(f: Field): JsCmd = field.update() & field.onChangedField(f)
def afterSucessfullSave(): JsCmd = JsCmds.Noop
def onSave(): JsCmd = {
val errors = field.errors()
if (errors.isEmpty) {
saveFailed = false
field.doSave() & field.update() & afterSucessfullSave()
} else {
saveFailed = true
field.update()
}
}
def rendered: NodeSeq = {
<form method="POST" action="javascript:void(0);" role="form">
{field.rendered}
</form>
}
trait Field {
val id = Helpers.nextFuncName
def update(): JsCmd
def children: Seq[Field] = Nil
def onChangedField(f: Field): JsCmd = if (deps.contains(f)) Replace(id, rendered) else {children.map(_.onChangedField(f)).reduceOption(_ & _).getOrElse(JsCmds.Noop)}
def modified: Boolean
def doSave(): JsCmd
def errors(): Seq[(Field, String)]
def rendered: NodeSeq
def enabled: () => Boolean
def deps: Seq[Field]
}
trait VerticalGroupBase extends Field {
def fields: Seq[Field]
override def children: Seq[Field] = fields
def modified: Boolean = fields.exists(_.modified)
def errors(): Seq[(Field, String)] = fields.flatMap(_.errors())
def doSave(): JsCmd = fields.map(_.doSave()).foldLeft[JsCmd](JsCmds.Noop)(_ & _)
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & fields.map(_.update()).reduceOption(_ & _).getOrElse(JsCmds.Noop)
def rendered: NodeSeq = {
<div style={if (!enabled()) "display:none;" else ""} id={id}>{fields.map(_.rendered).reduceOption[NodeSeq](_ ++ _).getOrElse(NodeSeq.Empty)}</div>
}
}
case class VerticalGroup(enabled: () => Boolean = () => true, deps: Seq[Field] = Nil)(val fields: Field*) extends VerticalGroupBase
trait HorizontalGroupBase extends Field {
def fields: Seq[Field]
override def children: Seq[Field] = fields
def modified: Boolean = fields.exists(_.modified)
def errors(): Seq[(Field, String)] = fields.flatMap(_.errors())
def doSave(): JsCmd = fields.map(_.doSave()).foldLeft[JsCmd](JsCmds.Noop)(_ & _)
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & fields.map(_.update()).reduceOption(_ & _).getOrElse(JsCmds.Noop)
def size = "md"
def rendered: NodeSeq = {
val width = fields.filter(_.enabled()).size match {
case 0 => "12"
case 1 => "12"
case 2 => "6"
case 3 => "4"
case 4 => "3"
}
<div style={if (!enabled()) "display:none;" else ""} id={id} class="row">
{fields.map(f => <div class={s"col-$size-$width"}>{f.rendered}</div>).reduceOption[NodeSeq](_ ++ _).getOrElse(NodeSeq.Empty)}
</div>
}
}
case class HorizontalGroup(
enabled: () => Boolean = () => true,
deps: Seq[Field] = Nil,
override val size: String = "md"
)(val fields: Field*) extends HorizontalGroupBase
trait RadioFieldBase[T] extends Field {
var modified = false
def labelText: String
def labelFor: T => String
def allOptions: Seq[T]
def save: T => JsCmd
var value = get()
def get: () => T
def errors(): Seq[(Field, String)] = Nil
def errorsNs() = errors().filter(_ => modified || saveFailed).headOption.map(error => <span class="text-red help-block">{error._2}</span>).getOrElse(NodeSeq.Empty)
override def doSave(): JsCmd = { modified = false; save(value) }
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & SetHtml(id + "errors", errorsNs())
override def rendered: NodeSeq = {
val renderedOptions = allOptions
<div style={if (!enabled()) "display:none;" else ""} id={id} class="form-group pi-padding-bottom-10">
<label>{labelText}:</label>
{
renderedOptions.zipWithIndex.map(tuple => {
<div class="radio">
<label>
<input type="radio" name={id + "input"} value={tuple._2.toString} onchange="" checked={if (tuple._1 == value) "checked" else null }/> {labelFor(tuple._1)}
</label>
</div>
}).reduceOption[NodeSeq](_ ++ _).getOrElse(NodeSeq.Empty)
}
<span id={id + "errors"}>{errorsNs}</span>
</div> ++
Tail.render(Script(OnLoad(Run(
s"""
|$$('input[type=radio][name=${id + "input"}]')
| .on('change', function() {
| ${xsh.ajaxCall(JsRaw("$(this).val()"), v => {value = renderedOptions(v.toInt); modified = true; onChangeServerSide(this)}).toJsCmd}
| });
|""".stripMargin
))))
}
}
case class RadioField[T](
labelText: String,
allOptions: Seq[T],
get: () => T,
save: T => JsCmd,
labelFor: T => String = (_: T).toString,
enabled: () => Boolean = () => true,
req: Boolean = false,
deps: Seq[Field] = Nil
) extends RadioFieldBase[T]
trait MultiCheckboxFieldBase[T] extends Field {
var modified = false
def labelText: String
def labelFor: T => String
def allOptions: Seq[T]
def save: Set[T] => JsCmd
var value = get()
def get: () => Set[T]
def errors(): Seq[(Field, String)] = Nil
def errorsNs() = errors().filter(_ => modified || saveFailed).headOption.map(error => <span class="text-red help-block">{error._2}</span>).getOrElse(NodeSeq.Empty)
override def doSave(): JsCmd = { modified = false; save(value) }
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & SetHtml(id + "errors", errorsNs())
override def rendered: NodeSeq = {
<div style={if (!enabled()) "display:none;" else ""} id={id} class="form-group pi-padding-bottom-10">
<label for={id + "input"}>{labelText}:</label>
{
allOptions.map(option => {
val idcheckbox = Helpers.nextFuncName
<div class="checkbox">
<label>
<input type="checkbox" id={idcheckbox} value={Helpers.nextFuncName} onchange={xsh.ajaxCall(JsRaw(s"$$('#${idcheckbox}').is(':checked')"), v => {
if (v == "true") {value = value + option} else {value = value - option}
modified = true;
onChangeServerSide(this)
}).toJsCmd}/> {labelFor(option)}
</label>
</div>
}).reduceOption[NodeSeq](_ ++ _).getOrElse(NodeSeq.Empty)
}
<span id={id + "errors"}>{errorsNs}</span>
</div>
}
}
case class MultiCheckboxField[T](
labelText: String,
allOptions: Seq[T],
get: () => Set[T],
save: Set[T] => JsCmd,
labelFor: T => String = (_: T).toString,
enabled: () => Boolean = () => true,
req: Boolean = false,
deps: Seq[Field] = Nil
) extends MultiCheckboxFieldBase[T]
trait GenTextFieldBase[T] extends Field {
def toStr(v: T): String
def fromStr(s: String): Option[T]
var modified = false
def placeholderText: String
def labelText: String
def inputName: Option[String]
def get: () => T
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & SetHtml(id + "errors", errorsNs())
var value = get()
def errors(): Seq[(Field, String)] = Nil
def save: T => JsCmd
override def doSave(): JsCmd = { modified = false; save(value) }
def errorsNs() = errors().filter(_ => modified || saveFailed).headOption.map(error => <span class="text-red help-block">{error._2}</span>).getOrElse(NodeSeq.Empty)
def rendered: NodeSeq = {
<div style={if (!enabled()) "display:none;" else ""} id={id} class="form-group pi-padding-bottom-10">
<label for={id + "input"}>{labelText}:</label>
<input name={inputName.orNull} placeholder={placeholderText} value={toStr(value)} onchange={(onChangeClientSide() & xsh.ajaxCall(ValById(id + "input"), (v: String) => {fromStr(v).map(v => {value = v; modified = true; onChangeServerSide(this)}).getOrElse(JsCmds.Noop)})).toJsCmd} id={id + "input"} class="form-control" type="text"/>
<span id={id + "errors"}>{errorsNs}</span>
</div>
}
}
case class TextField(labelText: String, get: () => String, save: String => JsCmd, placeholderText: String = "", enabled: () => Boolean = () => true, inputName: Option[String] = None, req: Boolean = false, deps: Seq[Field] = Nil) extends GenTextFieldBase[String] {
def toStr(v: String): String = v
def fromStr(s: String): Option[String] = Some(s)
override def errors(): Seq[(Field, String)] = if (req && value == "") List(this -> "Required") else Nil
}
case class DoubleField(
labelText: String,
get: () => Double,
save: Double => JsCmd,
placeholderText: String = "",
enabled: () => Boolean = () => true,
inputName: Option[String] = None,
req: Boolean = false,
fmt: String = "%.2f",
deps: Seq[Field] = Nil) extends GenTextFieldBase[Double] {
def toStr(v: Double): String = v.formatted(fmt)
def fromStr(s: String): Option[Double] = scala.util.Try(s.toDouble).toOption
}
case class IntField(
labelText: String,
get: () => Int,
save: Int => JsCmd,
placeholderText: String = "",
enabled: () => Boolean = () => true,
inputName: Option[String] = None,
req: Boolean = false,
deps: Seq[Field] = Nil) extends GenTextFieldBase[Int] {
def toStr(v: Int): String = v.toString
def fromStr(s: String): Option[Int] = scala.util.Try(s.toInt).toOption
}
case class LongField(
labelText: String,
get: () => Long,
save: Long => JsCmd,
placeholderText: String = "",
enabled: () => Boolean = () => true,
inputName: Option[String] = None,
req: Boolean = false,
deps: Seq[Field] = Nil) extends GenTextFieldBase[Long] {
def toStr(v: Long): String = v.toString
def fromStr(s: String): Option[Long] = scala.util.Try(s.toLong).toOption
}
case class EmptyField() extends Field {
override def doSave(): JsCmd = JsCmds.Noop
override def enabled: () => Boolean = () => true
override def rendered: NodeSeq = <span id={id}></span>
override def errors(): Seq[(Field, String)] = Nil
override def update(): JsCmd = JsCmds.Noop
override def modified: Boolean = false
override def deps: Seq[Field] = Nil
}
trait SelectFieldBase[T] extends Field {
var modified = false
def labelText: String
def labelFor: T => String
def inputName: Option[String]
def allOptions: Seq[T]
def get: () => T
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & SetHtml(id + "errors", errorsNs())
def errors(): Seq[(Field, String)] = Nil
def errorsNs() = errors().filter(_ => modified || saveFailed).headOption.map(error => <span class="text-red help-block">{error._2}</span>).getOrElse(NodeSeq.Empty)
var value: T = get()
def save: T => JsCmd
override def doSave(): JsCmd = save(value)
def attrs: Seq[ElemAttr] = Seq("class" -> "form-control", "id" -> (id + "input"), "style" -> "color: rgb(33, 37, 43);") ++ inputName.map("name" -> _).toSeq
def rendered: NodeSeq = {
<div style={if (!enabled()) "display:none;" else ""} id={id} class="form-group pi-padding-bottom-10">
<label for={id + "input"}>{labelText}:</label>
{
xsh.ajaxSelectElem[T](allOptions, Full(value), attrs: _*)(v => {
value = v
modified = true
onChangeClientSide() & onChangeServerSide(this)
})((v: T) => labelFor(v))
}
<span id={id + "errors"}>{errorsNs}</span>
</div>
}
}
case class SelectField[T](
labelText: String,
allOptions: Seq[T],
get: () => T,
save: T => JsCmd,
labelFor: T => String = (_: T).toString,
enabled: () => Boolean = () => true,
inputName: Option[String] = None,
deps: Seq[Field] = Nil
) extends SelectFieldBase[T]
trait MultiSelectFieldBase[T] extends Field {
var modified = false
def labelText: String
def labelFor: T => String
def inputName: Option[String]
def allOptions: Seq[T]
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & SetHtml(id + "errors", errorsNs())
def errors(): Seq[(Field, String)] = Nil
def errorsNs() = errors().filter(_ => modified || saveFailed).headOption.map(error => <span class="text-red help-block">{error._2}</span>).getOrElse(NodeSeq.Empty)
def save: Set[T] => JsCmd
var value = get()
def get: () => Set[T]
override def doSave(): JsCmd = save(value)
def attrs: Seq[ElemAttr] = Seq("class" -> "form-control", "multiple" -> "multiple", "id" -> (id + "input"), "style" -> "color: rgb(33, 37, 43);") ++ inputName.map("name" -> _).toSeq
def rendered: NodeSeq = {
val randomId = Helpers.nextFuncName
val renderedOptions = allOptions
<div style={if (!enabled()) "display:none;" else ""} id={id} class="form-group pi-padding-bottom-10">
<label for={id + "input"}>{labelText}:</label>
<select class="form-control" id={id + "input"} multiple="multiple" style="color: rgb(33, 37, 43);" onchange={xsh.jsonCall(JsRaw(s"$$('#${id + "input"}').val()"), v => v match {
case JArray(fields: List[JString]) =>
value = fields.map(v => renderedOptions(v.s.toInt)).toSet
modified = true
onChangeServerSide(this)
case JNothing | JNull =>
value = Set()
modified = true
onChangeServerSide(this)
}).toJsCmd}>
{renderedOptions.zipWithIndex.map(tuple => <option value={tuple._2.toString} selected={if (renderedOptions.contains(tuple._1)) "selected" else null}>{labelFor(tuple._1)}</option>).reduceOption[NodeSeq](_ ++ _).getOrElse(NodeSeq.Empty)}
</select>
<span id={id + "errors"}>{errorsNs}</span>
</div>
}
}
case class MultiSelectField[T](
labelText: String,
allOptions: Seq[T],
get: () => Set[T],
save: Set[T] => JsCmd,
labelFor: T => String = (_: T).toString,
enabled: () => Boolean = () => true,
inputName: Option[String] = None,
deps: Seq[Field] = Nil
) extends MultiSelectFieldBase[T]
trait SelectOptFieldBase[T] extends SelectFieldBase[Option[T]] {
def noneLabel: String
def someLabel: T => String
def labelFor: Option[T] => String = _.map(someLabel).getOrElse(noneLabel)
def allOptOptions: Seq[T]
def allOptions: Seq[Option[T]] = None +: allOptOptions.map(Some(_))
def req: Boolean
override def errors(): Seq[(Field, String)] = super.errors() ++ (if (value.isEmpty) List(this -> "Required") else Nil)
}
case class SelectOptField[T](
labelText: String,
allOptOptions: Seq[T],
get: () => Option[T],
save: Option[T] => JsCmd,
noneLabel: String,
someLabel: T => String = (_: T).toString,
enabled: () => Boolean = () => true,
inputName: Option[String] = None,
req: Boolean = false,
deps: Seq[Field] = Nil
) extends SelectOptFieldBase[T]
trait DatePickerFieldBase extends Field {
var modified = false
def get: () => Long
def save: Long => JsCmd
var value = get()
var datePickerId = UUID.randomUUID().toString
def labelText: String
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & SetHtml(id + "errors", errorsNs())
def errors(): Seq[(Field, String)] = Nil
def errorsNs() = errors().filter(_ => modified || saveFailed).headOption.map(error => <span class="text-red help-block">{error._2}</span>).getOrElse(NodeSeq.Empty)
override def doSave(): JsCmd = save(value)
override def rendered: NodeSeq = {
<label id={id}>
{labelText}
</label> <br/>
<div class="form-group">
<div class="input-group date" id={datePickerId}>
<input type='text' class="form-control" />
<span class="input-group-addon">
<span class="glyphicon glyphicon-calendar"></span></span>
</div>
<span id={id + "errors"}>{errorsNs}</span>
</div>
<tail>
{Script(OnLoad(Run(
s"""
|$$('#${datePickerId}')
|.datetimepicker({
| locale: 'en-gb',
| defaultDate: new Date($value)
|})
|.on('dp.change', function(evt) {
| ${xsh.ajaxCall(JsRaw(s"evt.date.valueOf()"), v => {value = v.toLong; onChangeServerSide(this)}).toJsCmd}
|});
|""".stripMargin
)))}
</tail>
}
}
case class DateTimePickerField(
labelText: String,
get: () => Long,
save: Long => JsCmd,
enabled: () => Boolean = () => true,
req: Boolean = false,
deps: Seq[Field] = Nil
) extends DatePickerFieldBase
trait SubmitButtonBase extends Field {
val modified = false
def labelText: String
def btnClass: () => String
def update(): JsCmd = (if (enabled()) Show(id) else Hide(id)) & JsCmds.Run(s"""$$('#$id button').attr('class', ${btnClass().encJs});""")
override def errors(): Seq[(Field, String)] = Nil
override def doSave(): JsCmd = JsCmds.Noop
override def rendered: NodeSeq =
<div id={id}>
<div class="clearfix"><button style={(if (!enabled()) "display:none;" else "")} onclick={xsh.ajaxInvoke(() => onSave()).toJsCmd} class={btnClass()}>{labelText}</button></div>
</div>
}
case class SubmitButton(labelText: String = "Save", btnClass: () => String = () => TH.Btn().Primary.right.clas, enabled: () => Boolean = () => true, deps: Seq[Field] = Nil) extends SubmitButtonBase
}
} | slynx-fw/slynx-demo | app/pages/theme/SForms.scala | Scala | apache-2.0 | 21,272 |
package com.arcusys.valamis.web.servlet.base
import javax.servlet.http.HttpServletRequest
import com.arcusys.learn.liferay.LiferayClasses._
import com.arcusys.learn.liferay.services._
import com.arcusys.learn.liferay.util.{PortalUtilHelper, PortletName}
import com.arcusys.valamis.web.portlet.base.{Permission, PermissionBase}
import com.arcusys.valamis.web.servlet.base.exceptions._
import org.apache.http.ParseException
import org.scalatra._
import org.slf4j.LoggerFactory
case class PermissionCredentials(groupId: Long, portletId: String, primaryKey: String)
class ScalatraPermissionUtil(scalatra: ScalatraBase) extends PermissionUtil {
def getCourseIdFromRequest(implicit request: HttpServletRequest): Long = {
Option(request.getParameter("courseId"))
.orElse(scalatra.params.get("courseId"))
.map(parseCourseId)
.getOrElse(throw AccessDeniedException("courseId is empty"))
}
}
object PermissionUtil extends PermissionUtil {
def getCourseIdFromRequest(implicit request: HttpServletRequest): Long = {
Option(request.getParameter("courseId")).map(parseCourseId)
.getOrElse(throw AccessDeniedException("courseId is empty"))
}
}
trait PermissionUtil {
val logger = LoggerFactory.getLogger(PermissionUtil.getClass)
def getCourseIdFromRequest(implicit request: HttpServletRequest): Long
def requireCurrentLoggedInUser(userId: Long) = {
if (getUserId != userId)
throw AccessDeniedException()
}
def getUserId: Long = ServiceContextHelper.getServiceContext.getUserId
def getCompanyId: Long = PermissionHelper.getPermissionChecker().getCompanyId
def getCourseId: Long = ServiceContextHelper.getServiceContext.getRequest.getParameter("courseId").toLong
def requireLogin() = {
if (!isAuthenticated)
throw new NotAuthorizedException
}
def getLiferayUser = UserLocalServiceHelper().getUser(PermissionHelper.getPermissionChecker().getUserId)
def isAuthenticated: Boolean = PermissionHelper.getPermissionChecker().isSignedIn
def hasPermissionApi(permission: PermissionBase, portlets: PortletName*)(implicit r: HttpServletRequest): Boolean = {
hasPermissionApiSeq(PermissionHelper.getPermissionChecker(), permission, portlets)
}
def hasPermissionApi(user: LUser, permission: PermissionBase, portlets: PortletName*)
(implicit r: HttpServletRequest): Boolean = {
hasPermissionApiSeq(PermissionHelper.getPermissionChecker(user), permission, portlets)
}
def hasPermissionApi(courseId: Long, user: LUser, permission: PermissionBase, portlets: PortletName*): Boolean = {
hasPermissionApiSeq(PermissionHelper.getPermissionChecker(user), permission, portlets, courseId)
}
def requirePermissionApi(permission: PermissionBase, portlets: PortletName*)(implicit r: HttpServletRequest): Unit = {
val companyId = PortalUtilHelper.getCompanyId(r)
val user = Option(PortalUtilHelper.getUser(r)).getOrElse {
UserLocalServiceHelper().getUser(UserLocalServiceHelper().getDefaultUserId(companyId))
}
if (!hasPermissionApiSeq(PermissionHelper.getPermissionChecker(user), permission, portlets)) {
throw AccessDeniedException(s"no ${permission.name} permission for ${portlets.mkString(", ")}")
}
}
def requirePermissionApi(permissions: Permission*)(implicit r: HttpServletRequest): Unit = {
if (!permissions.foldLeft(false) { (acc, permission) =>
acc || hasPermissionApiSeq(PermissionHelper.getPermissionChecker(), permission.permission, permission.portlets)
}) throw AccessDeniedException("You don't have required permissions")
}
def requirePermissionApi(user: LUser, permission: PermissionBase, portlets: PortletName*)
(implicit r: HttpServletRequest): Unit = {
if (!hasPermissionApiSeq(PermissionHelper.getPermissionChecker(user), permission, portlets)) {
throw AccessDeniedException(s"no ${permission.name} permission for ${portlets.mkString(", ")}")
}
}
protected def parseCourseId(raw: String): Long = {
try {
raw.toLong
} catch {
case e: NumberFormatException => throw new BadRequestException("courseId is incorrect")
}
}
private def hasPermissionApiSeq(checker: LPermissionChecker,
permission: PermissionBase,
portlets: Seq[PortletName])
(implicit r: HttpServletRequest): Boolean = {
val keys = portlets.map(_.key)
getCurrentLayout match {
case Some(layout) =>
check(checker, permission, keys, Seq(layout))
case None =>
val courseId = getCourseIdFromRequest
hasPermissionApiSeq(checker, permission, portlets, courseId)
}
}
private def hasPermissionApiSeq(checker: LPermissionChecker,
permission: PermissionBase,
portlets: Seq[PortletName],
courseId: Long): Boolean = {
val portletIds = portlets.map(_.key)
//at first, check the permission at group/company scope
if (portletIds.exists(hasPermission(checker, courseId, _, None, permission))) {
true
} else {//then look for the permission on any page of the site
lazy val privateLayouts = LayoutLocalServiceHelper.getLayouts(courseId, privateLayout = true)
lazy val publicLayouts = LayoutLocalServiceHelper.getLayouts(courseId, privateLayout = false)
//TODO get rid of it - we don't need this actually, because we
//we can use current layout. And if there is no current layout, then
//checking by group/company can be used
check(checker, permission, portletIds, privateLayouts) ||
check(checker, permission, portletIds, publicLayouts)
}
}
private def check(checker: LPermissionChecker, permission: PermissionBase, keys: Seq[String], allLayouts: Seq[LLayout]): Boolean = {
for (
layout <- allLayouts;
plid = layout.getPlid;
portletId <- LayoutLocalServiceHelper.getPortletIds(layout)
) {
if (keys.contains(portletId)) {
val primaryKey = plid + LLiferayPortletSession.LayoutSeparator + portletId
if (hasPermission(checker, layout.getGroupId, portletId, Some(primaryKey), permission)) {
return true
}
}
}
false
}
def hasPermission(checker: LPermissionChecker, groupId: Long, portletId: String, primaryKey: Option[String],
action: PermissionBase): Boolean = {
try {
ResourceActionLocalServiceHelper.getResourceAction(portletId, action.name)
//if primaryKey == portletId, then it means that we want to check permission
//in group/company scope (figured out empirically)
checker.hasPermission(groupId, portletId, primaryKey.getOrElse(portletId), action.name)
} catch {
case ex: IllegalArgumentException =>
logger.debug("Failed to check permission", ex)
false
case _: LNoSuchResourceActionException =>
false
}
}
private def getCurrentLayout(implicit request: HttpServletRequest): Option[LLayout] = {
val plid = request.getParameter("plid")
Option(plid).filter(_.nonEmpty) flatMap { id =>
try {
LayoutLocalServiceHelper.fetchLayout(id.toLong)
} catch {
case _: NumberFormatException => throw new ParseException("Bad plid value: " + id)
}
}
}
}
| arcusys/JSCORM | valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/base/PermissionUtil.scala | Scala | gpl-3.0 | 7,406 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.spi
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.{Cardinality, Selectivity}
import org.neo4j.cypher.internal.frontend.v2_3.{LabelId, PropertyKeyId, RelTypeId}
object GraphStatistics {
val DEFAULT_RANGE_SELECTIVITY = Selectivity.of(0.3).get
val DEFAULT_PREDICATE_SELECTIVITY = Selectivity.of(0.75).get
val DEFAULT_PROPERTY_SELECTIVITY = Selectivity.of(0.5).get
val DEFAULT_EQUALITY_SELECTIVITY = Selectivity.of(0.1).get
val DEFAULT_NUMBER_OF_ID_LOOKUPS = Cardinality(25)
val DEFAULT_NUMBER_OF_INDEX_LOOKUPS = Cardinality(25)
val DEFAULT_LIMIT_CARDINALITY = Cardinality(75)
val DEFAULT_REL_UNIQUENESS_SELECTIVITY = Selectivity.of(1.0 - 1 / 100 /*rel-cardinality*/).get
val DEFAULT_RANGE_SEEK_FACTOR = 0.03
val DEFAULT_PREFIX_LENGTH = 6
}
trait GraphStatistics {
def nodesWithLabelCardinality(labelId: Option[LabelId]): Cardinality
def cardinalityByLabelsAndRelationshipType(fromLabel: Option[LabelId], relTypeId: Option[RelTypeId], toLabel: Option[LabelId]): Cardinality
/*
Probability of any node with the given label, to have a given property with a particular value
indexSelectivity(:X, prop) = s => |MATCH (a:X)| * s = |MATCH (a:X) WHERE x.prop = '*'|
*/
def indexSelectivity(label: LabelId, property: PropertyKeyId): Option[Selectivity]
/*
Probability of any node with the given label, to have a particular property
indexPropertyExistsSelectivity(:X, prop) = s => |MATCH (a:X)| * s = |MATCH (a:X) WHERE has(x.prop)|
*/
def indexPropertyExistsSelectivity(label: LabelId, property: PropertyKeyId): Option[Selectivity]
}
class DelegatingGraphStatistics(delegate: GraphStatistics) extends GraphStatistics {
override def nodesWithLabelCardinality(labelId: Option[LabelId]): Cardinality =
delegate.nodesWithLabelCardinality(labelId)
override def cardinalityByLabelsAndRelationshipType(fromLabel: Option[LabelId], relTypeId: Option[RelTypeId], toLabel: Option[LabelId]): Cardinality =
delegate.cardinalityByLabelsAndRelationshipType(fromLabel, relTypeId, toLabel)
override def indexSelectivity(label: LabelId, property: PropertyKeyId): Option[Selectivity] =
delegate.indexSelectivity(label, property)
override def indexPropertyExistsSelectivity(label: LabelId, property: PropertyKeyId): Option[Selectivity] =
delegate.indexPropertyExistsSelectivity(label, property)
}
class StatisticsCompletingGraphStatistics(delegate: GraphStatistics)
extends DelegatingGraphStatistics(delegate) {
override def cardinalityByLabelsAndRelationshipType(fromLabel: Option[LabelId], relTypeId: Option[RelTypeId], toLabel: Option[LabelId]): Cardinality =
(fromLabel, toLabel) match {
case (Some(_), Some(_)) =>
// TODO: read real counts from readOperations when they are gonna be properly computed and updated
Cardinality.min(
super.cardinalityByLabelsAndRelationshipType(fromLabel, relTypeId, None),
super.cardinalityByLabelsAndRelationshipType(None, relTypeId, toLabel)
)
case _ =>
super.cardinalityByLabelsAndRelationshipType(fromLabel, relTypeId, toLabel)
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/spi/GraphStatistics.scala | Scala | apache-2.0 | 4,044 |
package code
package model
import net.liftweb.mapper._
import net.liftweb.common._
import net.liftweb.sitemap.Loc._
import net.liftmodules.fobobs.mapper._
/**
* The singleton that has methods for accessing the database
*/
object User
extends User
with MetaMegaProtoUser[User]
with BootstrapMegaMetaProtoUser[User] {
override def dbTableName = "users" // define the DB table name
override def screenWrap = Full(<lift:surround with="default" at="content">
<lift:bind /></lift:surround>)
// define the order fields will appear in forms and output
override def fieldOrder =
List(id, firstName, lastName, email, locale, timezone, password, textArea)
// comment this line out to require email validations
override def skipEmailValidation = true
//add a loc group to the user menu
override def globalUserLocParams: List[LocParam[Unit]] =
List(LocGroup("user"))
override def resetPasswordMenuLoc: Box[net.liftweb.sitemap.Menu] = Box(Empty)
override def validateUserMenuLoc: Box[net.liftweb.sitemap.Menu] = Box(Empty)
}
/**
* An O-R mapped "User" class that includes first name, last name, password and we add a "Personal Essay" to it
*/
class User extends MegaProtoUser[User] {
def getSingleton = User // what's the "meta" server
// define an additional field for a personal essay
object textArea extends MappedTextarea(this, 2048) {
override def textareaRows = 10
override def textareaCols = 50
override def displayName = "Personal Essay"
}
}
| karma4u101/FoBo-Demo | pimping-lift-advanced-bs3/src/main/scala/code/model/User.scala | Scala | apache-2.0 | 1,521 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.rdd
import java.util
import java.util.concurrent._
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.execution.command.{CompactionCallableModel, CompactionModel, SplitPartitionCallableModel}
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.locks.{CarbonLockFactory, CarbonLockUtil, LockUsage}
import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonTableIdentifier}
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.core.mutate.CarbonUpdateUtil
import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatusManager}
import org.apache.carbondata.processing.merger.{CarbonDataMergerUtil, CompactionType}
import org.apache.carbondata.processing.model.{CarbonDataLoadSchema, CarbonLoadModel}
import org.apache.carbondata.spark._
import org.apache.carbondata.spark.compaction.CompactionCallable
import org.apache.carbondata.spark.load._
import org.apache.carbondata.spark.partition.SplitPartitionCallable
import org.apache.carbondata.spark.util.{CommonUtil, LoadMetadataUtil}
/**
* Common functions for data life cycle management
*/
object DataManagementFunc {
private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def deleteLoadByDate(
sqlContext: SQLContext,
schema: CarbonDataLoadSchema,
databaseName: String,
tableName: String,
storePath: String,
dateField: String,
dateFieldActualName: String,
dateValue: String) {
val sc = sqlContext
// Delete the records based on data
val table = schema.getCarbonTable
val loadMetadataDetailsArray =
SegmentStatusManager.readLoadMetadata(table.getMetaDataFilepath).toList
val resultMap = new CarbonDeleteLoadByDateRDD(
sc.sparkContext,
new DeletedLoadResultImpl(),
databaseName,
table.getDatabaseName,
dateField,
dateFieldActualName,
dateValue,
table.getFactTableName,
tableName,
storePath,
loadMetadataDetailsArray).collect.groupBy(_._1)
var updatedLoadMetadataDetailsList = new ListBuffer[LoadMetadataDetails]()
if (resultMap.nonEmpty) {
if (resultMap.size == 1) {
if (resultMap.contains("")) {
LOGGER.error("Delete by Date request is failed")
sys.error("Delete by Date request is failed, potential causes " +
"Empty store or Invalid column type, For more details please refer logs.")
}
}
val updatedloadMetadataDetails = loadMetadataDetailsArray.map { elem => {
var statusList = resultMap.get(elem.getLoadName)
// check for the merged load folder.
if (statusList.isEmpty && null != elem.getMergedLoadName) {
statusList = resultMap.get(elem.getMergedLoadName)
}
if (statusList.isDefined) {
elem.setModificationOrdeletionTimesStamp(elem.getTimeStamp(CarbonLoaderUtil
.readCurrentTime()))
// if atleast on CarbonCommonConstants.MARKED_FOR_UPDATE status exist,
// use MARKED_FOR_UPDATE
if (statusList.get
.forall(status => status._2 == CarbonCommonConstants.MARKED_FOR_DELETE)) {
elem.setLoadStatus(CarbonCommonConstants.MARKED_FOR_DELETE)
} else {
elem.setLoadStatus(CarbonCommonConstants.MARKED_FOR_UPDATE)
updatedLoadMetadataDetailsList += elem
}
elem
} else {
elem
}
}
}
// Save the load metadata
val carbonLock = CarbonLockFactory
.getCarbonLockObj(table.getAbsoluteTableIdentifier.getCarbonTableIdentifier,
LockUsage.METADATA_LOCK
)
try {
if (carbonLock.lockWithRetries()) {
LOGGER.info("Successfully got the table metadata file lock")
if (updatedLoadMetadataDetailsList.nonEmpty) {
// TODO: Load Aggregate tables after retention.
}
// write
CarbonLoaderUtil.writeLoadMetadata(
storePath,
databaseName,
table.getDatabaseName,
updatedloadMetadataDetails.asJava
)
}
} finally {
if (carbonLock.unlock()) {
LOGGER.info("unlock the table metadata file successfully")
} else {
LOGGER.error("Unable to unlock the metadata lock")
}
}
} else {
LOGGER.error("Delete by Date request is failed")
LOGGER.audit(s"The delete load by date is failed for $databaseName.$tableName")
sys.error("Delete by Date request is failed, potential causes " +
"Empty store or Invalid column type, For more details please refer logs.")
}
}
def executeCompaction(carbonLoadModel: CarbonLoadModel,
storePath: String,
compactionModel: CompactionModel,
executor: ExecutorService,
sqlContext: SQLContext,
storeLocation: String): Unit = {
val sortedSegments: util.List[LoadMetadataDetails] = new util.ArrayList[LoadMetadataDetails](
carbonLoadModel.getLoadMetadataDetails
)
CarbonDataMergerUtil.sortSegments(sortedSegments)
var segList = carbonLoadModel.getLoadMetadataDetails
var loadsToMerge = CarbonDataMergerUtil.identifySegmentsToBeMerged(
storePath,
carbonLoadModel,
compactionModel.compactionSize,
segList,
compactionModel.compactionType
)
while (loadsToMerge.size() > 1 ||
(compactionModel.compactionType.name().equals("IUD_UPDDEL_DELTA_COMPACTION") &&
loadsToMerge.size() > 0)) {
val lastSegment = sortedSegments.get(sortedSegments.size() - 1)
deletePartialLoadsInCompaction(carbonLoadModel)
val futureList: util.List[Future[Void]] = new util.ArrayList[Future[Void]](
CarbonCommonConstants
.DEFAULT_COLLECTION_SIZE
)
scanSegmentsAndSubmitJob(futureList,
loadsToMerge,
executor,
storePath,
sqlContext,
compactionModel,
carbonLoadModel,
storeLocation
)
try {
futureList.asScala.foreach(future => {
future.get
}
)
} catch {
case e: Exception =>
LOGGER.error(e, s"Exception in compaction thread ${ e.getMessage }")
throw e
}
// scan again and determine if anything is there to merge again.
CommonUtil.readLoadMetadataDetails(carbonLoadModel, storePath)
segList = carbonLoadModel.getLoadMetadataDetails
// in case of major compaction we will scan only once and come out as it will keep
// on doing major for the new loads also.
// excluding the newly added segments.
if (compactionModel.compactionType == CompactionType.MAJOR_COMPACTION) {
segList = CarbonDataMergerUtil
.filterOutNewlyAddedSegments(carbonLoadModel.getLoadMetadataDetails, lastSegment)
}
if (compactionModel.compactionType == CompactionType.IUD_UPDDEL_DELTA_COMPACTION) {
loadsToMerge.clear()
} else if (segList.size > 0) {
loadsToMerge = CarbonDataMergerUtil.identifySegmentsToBeMerged(
storePath,
carbonLoadModel,
compactionModel.compactionSize,
segList,
compactionModel.compactionType
)
}
else {
loadsToMerge.clear()
}
}
}
/**
* This will submit the loads to be merged into the executor.
*
* @param futureList
*/
private def scanSegmentsAndSubmitJob(futureList: util.List[Future[Void]],
loadsToMerge: util
.List[LoadMetadataDetails],
executor: ExecutorService,
storePath: String,
sqlContext: SQLContext,
compactionModel: CompactionModel,
carbonLoadModel: CarbonLoadModel,
storeLocation: String): Unit = {
loadsToMerge.asScala.foreach(seg => {
LOGGER.info("loads identified for merge is " + seg.getLoadName)
}
)
val compactionCallableModel = CompactionCallableModel(storePath,
carbonLoadModel,
storeLocation,
compactionModel.carbonTable,
loadsToMerge,
sqlContext,
compactionModel.compactionType
)
val future: Future[Void] = executor.submit(new CompactionCallable(compactionCallableModel))
futureList.add(future)
}
def executePartitionSplit( sqlContext: SQLContext,
carbonLoadModel: CarbonLoadModel,
executor: ExecutorService,
storePath: String,
segment: String,
partitionId: String,
oldPartitionIdList: List[Int]): Unit = {
val futureList: util.List[Future[Void]] = new util.ArrayList[Future[Void]](
CarbonCommonConstants.DEFAULT_COLLECTION_SIZE
)
scanSegmentsForSplitPartition(futureList, executor, storePath, segment, partitionId,
sqlContext, carbonLoadModel, oldPartitionIdList)
try {
futureList.asScala.foreach(future => {
future.get
}
)
} catch {
case e: Exception =>
LOGGER.error(e, s"Exception in partition split thread ${ e.getMessage }")
throw e
}
}
private def scanSegmentsForSplitPartition(futureList: util.List[Future[Void]],
executor: ExecutorService,
storePath: String,
segmentId: String,
partitionId: String,
sqlContext: SQLContext,
carbonLoadModel: CarbonLoadModel,
oldPartitionIdList: List[Int]): Unit = {
val splitModel = SplitPartitionCallableModel(storePath,
carbonLoadModel,
segmentId,
partitionId,
oldPartitionIdList,
sqlContext)
val future: Future[Void] = executor.submit(new SplitPartitionCallable(splitModel))
futureList.add(future)
}
def prepareCarbonLoadModel(storePath: String,
table: CarbonTable,
newCarbonLoadModel: CarbonLoadModel): Unit = {
newCarbonLoadModel.setTableName(table.getFactTableName)
val dataLoadSchema = new CarbonDataLoadSchema(table)
// Need to fill dimension relation
newCarbonLoadModel.setCarbonDataLoadSchema(dataLoadSchema)
newCarbonLoadModel.setTableName(table.getCarbonTableIdentifier.getTableName)
newCarbonLoadModel.setDatabaseName(table.getCarbonTableIdentifier.getDatabaseName)
newCarbonLoadModel.setStorePath(table.getStorePath)
CommonUtil.readLoadMetadataDetails(newCarbonLoadModel, storePath)
val loadStartTime = CarbonUpdateUtil.readCurrentTime();
newCarbonLoadModel.setFactTimeStamp(loadStartTime)
}
def deletePartialLoadsInCompaction(carbonLoadModel: CarbonLoadModel): Unit = {
// Deleting the any partially loaded data if present.
// in some case the segment folder which is present in store will not have entry in
// status.
// so deleting those folders.
try {
CarbonLoaderUtil.deletePartialLoadDataIfExist(carbonLoadModel, true)
} catch {
case e: Exception =>
LOGGER.error(s"Exception in compaction thread while clean up of stale segments" +
s" ${ e.getMessage }")
}
}
def deleteLoadsAndUpdateMetadata(
dbName: String,
tableName: String,
storePath: String,
isForceDeletion: Boolean,
carbonTable: CarbonTable): Unit = {
if (LoadMetadataUtil.isLoadDeletionRequired(carbonTable.getMetaDataFilepath)) {
val details = SegmentStatusManager.readLoadMetadata(carbonTable.getMetaDataFilepath)
val carbonTableStatusLock =
CarbonLockFactory.getCarbonLockObj(
new CarbonTableIdentifier(dbName, tableName, ""),
LockUsage.TABLE_STATUS_LOCK
)
// Delete marked loads
val isUpdationRequired =
DeleteLoadFolders.deleteLoadFoldersFromFileSystem(
dbName,
tableName,
storePath,
isForceDeletion,
details
)
if (isUpdationRequired) {
try {
// Update load metadate file after cleaning deleted nodes
if (carbonTableStatusLock.lockWithRetries()) {
LOGGER.info("Table status lock has been successfully acquired.")
// read latest table status again.
val latestMetadata = SegmentStatusManager
.readLoadMetadata(carbonTable.getMetaDataFilepath)
// update the metadata details from old to new status.
val latestStatus = CarbonLoaderUtil
.updateLoadMetadataFromOldToNew(details, latestMetadata)
CarbonLoaderUtil.writeLoadMetadata(storePath, dbName, tableName, latestStatus)
} else {
val errorMsg = "Clean files request is failed for " +
s"$dbName.$tableName" +
". Not able to acquire the table status lock due to other operation " +
"running in the background."
LOGGER.audit(errorMsg)
LOGGER.error(errorMsg)
throw new Exception(errorMsg + " Please try after some time.")
}
} finally {
CarbonLockUtil.fileUnlock(carbonTableStatusLock, LockUsage.TABLE_STATUS_LOCK)
}
}
}
}
def cleanFiles(
dbName: String,
tableName: String,
storePath: String,
carbonTable: CarbonTable,
forceTableClean: Boolean): Unit = {
val identifier = new CarbonTableIdentifier(dbName, tableName, "")
val carbonCleanFilesLock =
CarbonLockFactory.getCarbonLockObj(identifier, LockUsage.CLEAN_FILES_LOCK)
try {
if (carbonCleanFilesLock.lockWithRetries()) {
LOGGER.info("Clean files lock has been successfully acquired.")
if (forceTableClean) {
val absIdent = AbsoluteTableIdentifier.from(storePath, dbName, tableName)
FileFactory.deleteAllCarbonFilesOfDir(
FileFactory.getCarbonFile(absIdent.getTablePath,
FileFactory.getFileType(absIdent.getTablePath)))
} else {
deleteLoadsAndUpdateMetadata(dbName, tableName, storePath,
isForceDeletion = true, carbonTable)
CarbonUpdateUtil.cleanUpDeltaFiles(carbonTable, true)
}
} else {
val errorMsg = "Clean files request is failed for " +
s"$dbName.$tableName" +
". Not able to acquire the clean files lock due to another clean files " +
"operation is running in the background."
LOGGER.audit(errorMsg)
LOGGER.error(errorMsg)
throw new Exception(errorMsg + " Please try after some time.")
}
} finally {
CarbonLockUtil.fileUnlock(carbonCleanFilesLock, LockUsage.CLEAN_FILES_LOCK)
}
}
}
| aniketadnaik/carbondataStreamIngest | integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/DataManagementFunc.scala | Scala | apache-2.0 | 15,574 |
/*
* Copyright (C) 2016 Department for Business, Energy and Industrial Strategy
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package services
import play.api.Logger
import play.api.data.validation.ValidationError
import play.api.libs.json._
import play.api.libs.ws.{WSClient, WSRequest, WSResponse}
import services.RestService.{JsonParseException, RestFailure}
import scala.concurrent.{ExecutionContext, Future}
trait RestService {
def ws: WSClient
implicit def ec: ExecutionContext
def getOpt[A: Reads](url: String): Future[Option[A]] = {
val request: WSRequest = ws.url(url)
request.get.map { response =>
response.status match {
case 200 => response.json.validate[A] match {
case JsSuccess(a, _) => Some(a)
case JsError(errs) => throw JsonParseException("GET", request, response, errs)
}
case 404 => None
case _ => throw RestFailure("GET", request, response)
}
}
}
def getMany[A: Reads](url: String): Future[Seq[A]] = {
val request: WSRequest = ws.url(url)
request.get.map { response =>
response.status match {
case 200 => response.json.validate[Seq[A]] match {
case JsSuccess(as, _) => as
case JsError(errs) => throw JsonParseException("GET", request, response, errs)
}
case _ => throw RestFailure("GET", request, response)
}
}
}
def post[A: Writes](url: String, body: A): Future[Unit] = {
val request = ws.url(url)
request.post(Json.toJson(body)).map(_ => ())
}
def put[A: Writes](url: String, body: A): Future[Unit] = {
val request = ws.url(url)
request.put(Json.toJson(body)).map(_ => ())
}
def delete(url: String): Future[Unit] = {
val request = ws.url(url)
request.delete().map(_ => ())
}
def postWithResult[A: Reads, B: Writes](url: String, body: B): Future[Option[A]] = {
val request:WSRequest = ws.url(url)
request.post(Json.toJson(body)).map { response =>
response.status match {
case 200 => response.json.validate[A] match {
case JsSuccess(a, _) => Some(a)
case JsError(errs) => throw JsonParseException("POST", request, response, errs)
}
case 404 => None
case _ => throw RestFailure("POST", request, response)
}
}
}
}
object RestService {
case class JsonParseException(method: String, request: WSRequest, response: WSResponse, errs: Seq[(JsPath, Seq[ValidationError])]) extends Exception
case class RestFailure(method: String, request: WSRequest, response: WSResponse) extends Exception {
val status = response.status
}
}
| UKGovernmentBEIS/rifs-frontend-play | src/main/scala/services/RestService.scala | Scala | gpl-3.0 | 3,245 |
import scala.reflect.ClassManifest
class Foo[@specialized A: ClassManifest] {
// conflicting in bounds, expect a normalized member calling m
// and bridge + implementation in specialized subclasses
// and overloads here according to specialization on A
def m1[@specialized B <: A](x: B, y: A) =
goal(x)
// conflicting, unsolvable, expect a warning
def m2[@specialized B <: String](x: B) = x.concat("a")
// conflicting in bounds, no mention of other spec members
// expect an overload here plus implementation in
// compatible specialized subclasses
def m3[@specialized B >: A](x: B) = ()
// non-conflicting, expect a normalized overload implementation here
def m4[@specialized T, U <: Ordered[T]](x: T, y: U) = ()
// non-conflicting, expect a normalized overload implementation here
def m5[@specialized B](x: B) = x
// non-conflicting, expect a normalized implementation here
// and specialized implementations for all expansions in specialized subclasses
def m6[@specialized B](x: B, y: A) =
goal(y)
def goal(x: A) = {
val xs = new Array[A](1)
xs(0) = x
}
}
| scala/scala | test/files/pos/spec-params-old.scala | Scala | apache-2.0 | 1,121 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import java.io._
import java.net.URI
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
import org.apache.orc._
import org.apache.orc.OrcConf.{COMPRESS, MAPRED_OUTPUT_SCHEMA}
import org.apache.orc.mapred.OrcStruct
import org.apache.orc.mapreduce._
import org.apache.spark.TaskContext
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import org.apache.spark.util.SerializableConfiguration
private[sql] object OrcFileFormat {
private def checkFieldName(name: String): Unit = {
try {
TypeDescription.fromString(s"struct<$name:int>")
} catch {
case _: IllegalArgumentException =>
throw new AnalysisException(
s"""Column name "$name" contains invalid character(s).
|Please use alias to rename it.
""".stripMargin.split("\\n").mkString(" ").trim)
}
}
def checkFieldNames(names: Seq[String]): Unit = {
names.foreach(checkFieldName)
}
def getQuotedSchemaString(dataType: DataType): String = dataType match {
case _: AtomicType => dataType.catalogString
case StructType(fields) =>
fields.map(f => s"`${f.name}`:${getQuotedSchemaString(f.dataType)}")
.mkString("struct<", ",", ">")
case ArrayType(elementType, _) =>
s"array<${getQuotedSchemaString(elementType)}>"
case MapType(keyType, valueType, _) =>
s"map<${getQuotedSchemaString(keyType)},${getQuotedSchemaString(valueType)}>"
case _ => // UDT and others
dataType.catalogString
}
}
/**
* New ORC File Format based on Apache ORC.
*/
class OrcFileFormat
extends FileFormat
with DataSourceRegister
with Serializable {
override def shortName(): String = "orc"
override def toString: String = "ORC"
override def hashCode(): Int = getClass.hashCode()
override def equals(other: Any): Boolean = other.isInstanceOf[OrcFileFormat]
override def inferSchema(
sparkSession: SparkSession,
options: Map[String, String],
files: Seq[FileStatus]): Option[StructType] = {
OrcUtils.readSchema(sparkSession, files)
}
override def prepareWrite(
sparkSession: SparkSession,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val orcOptions = new OrcOptions(options, sparkSession.sessionState.conf)
val conf = job.getConfiguration
conf.set(MAPRED_OUTPUT_SCHEMA.getAttribute, OrcFileFormat.getQuotedSchemaString(dataSchema))
conf.set(COMPRESS.getAttribute, orcOptions.compressionCodec)
conf.asInstanceOf[JobConf]
.setOutputFormat(classOf[org.apache.orc.mapred.OrcOutputFormat[OrcStruct]])
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new OrcOutputWriter(path, dataSchema, context)
}
override def getFileExtension(context: TaskAttemptContext): String = {
val compressionExtension: String = {
val name = context.getConfiguration.get(COMPRESS.getAttribute)
OrcUtils.extensionsForCompressionCodecNames.getOrElse(name, "")
}
compressionExtension + ".orc"
}
}
}
override def supportBatch(sparkSession: SparkSession, schema: StructType): Boolean = {
val conf = sparkSession.sessionState.conf
conf.orcVectorizedReaderEnabled && conf.wholeStageEnabled &&
schema.length <= conf.wholeStageMaxNumFields &&
schema.forall(_.dataType.isInstanceOf[AtomicType])
}
override def isSplitable(
sparkSession: SparkSession,
options: Map[String, String],
path: Path): Boolean = {
true
}
override def buildReaderWithPartitionValues(
sparkSession: SparkSession,
dataSchema: StructType,
partitionSchema: StructType,
requiredSchema: StructType,
filters: Seq[Filter],
options: Map[String, String],
hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow] = {
if (sparkSession.sessionState.conf.orcFilterPushDown) {
OrcFilters.createFilter(dataSchema, filters).foreach { f =>
OrcInputFormat.setSearchArgument(hadoopConf, f, dataSchema.fieldNames)
}
}
val resultSchema = StructType(requiredSchema.fields ++ partitionSchema.fields)
val sqlConf = sparkSession.sessionState.conf
val enableVectorizedReader = supportBatch(sparkSession, resultSchema)
val capacity = sqlConf.orcVectorizedReaderBatchSize
val broadcastedConf =
sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf))
val isCaseSensitive = sparkSession.sessionState.conf.caseSensitiveAnalysis
(file: PartitionedFile) => {
val conf = broadcastedConf.value.value
val filePath = new Path(new URI(file.filePath))
val fs = filePath.getFileSystem(conf)
val readerOptions = OrcFile.readerOptions(conf).filesystem(fs)
val reader = OrcFile.createReader(filePath, readerOptions)
val requestedColIdsOrEmptyFile = OrcUtils.requestedColumnIds(
isCaseSensitive, dataSchema, requiredSchema, reader, conf)
if (requestedColIdsOrEmptyFile.isEmpty) {
Iterator.empty
} else {
val requestedColIds = requestedColIdsOrEmptyFile.get
assert(requestedColIds.length == requiredSchema.length,
"[BUG] requested column IDs do not match required schema")
val taskConf = new Configuration(conf)
taskConf.set(OrcConf.INCLUDE_COLUMNS.getAttribute,
requestedColIds.filter(_ != -1).sorted.mkString(","))
val fileSplit = new FileSplit(filePath, file.start, file.length, Array.empty)
val attemptId = new TaskAttemptID(new TaskID(new JobID(), TaskType.MAP, 0), 0)
val taskAttemptContext = new TaskAttemptContextImpl(taskConf, attemptId)
if (enableVectorizedReader) {
val batchReader = new OrcColumnarBatchReader(capacity)
// SPARK-23399 Register a task completion listener first to call `close()` in all cases.
// There is a possibility that `initialize` and `initBatch` hit some errors (like OOM)
// after opening a file.
val iter = new RecordReaderIterator(batchReader)
Option(TaskContext.get()).foreach(_.addTaskCompletionListener[Unit](_ => iter.close()))
val requestedDataColIds = requestedColIds ++ Array.fill(partitionSchema.length)(-1)
val requestedPartitionColIds =
Array.fill(requiredSchema.length)(-1) ++ Range(0, partitionSchema.length)
batchReader.initialize(fileSplit, taskAttemptContext)
batchReader.initBatch(
reader.getSchema,
resultSchema.fields,
requestedDataColIds,
requestedPartitionColIds,
file.partitionValues)
iter.asInstanceOf[Iterator[InternalRow]]
} else {
val orcRecordReader = new OrcInputFormat[OrcStruct]
.createRecordReader(fileSplit, taskAttemptContext)
val iter = new RecordReaderIterator[OrcStruct](orcRecordReader)
Option(TaskContext.get()).foreach(_.addTaskCompletionListener[Unit](_ => iter.close()))
val fullSchema = requiredSchema.toAttributes ++ partitionSchema.toAttributes
val unsafeProjection = GenerateUnsafeProjection.generate(fullSchema, fullSchema)
val deserializer = new OrcDeserializer(dataSchema, requiredSchema, requestedColIds)
if (partitionSchema.length == 0) {
iter.map(value => unsafeProjection(deserializer.deserialize(value)))
} else {
val joinedRow = new JoinedRow()
iter.map(value =>
unsafeProjection(joinedRow(deserializer.deserialize(value), file.partitionValues)))
}
}
}
}
}
override def supportDataType(dataType: DataType): Boolean = dataType match {
case _: AtomicType => true
case st: StructType => st.forall { f => supportDataType(f.dataType) }
case ArrayType(elementType, _) => supportDataType(elementType)
case MapType(keyType, valueType, _) =>
supportDataType(keyType) && supportDataType(valueType)
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
case _ => false
}
}
| WindCanDie/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala | Scala | apache-2.0 | 9,647 |
package controllers.game
import com.artclod.mathml.{Match, Yes, No, Inconclusive}
import com.artclod.mathml.Match._
import com.artclod.mathml.MathML
import com.artclod.mathml.scalar.MathMLElem
import com.artclod.play.CommonsMailerHelper
import com.artclod.slick.JodaUTC
import com.artclod.util._
import controllers.game.GamesController._
import controllers.game.GamesEmail._
import controllers.quiz.QuestionsController
import controllers.quiz.derivative.{DerivativeAnswerForm, DerivativeQuestionForm}
import controllers.quiz.derivativegraph.{DerivativeGraphQuestionForm, DerivativeGraphAnswerForm}
import controllers.quiz.graphmatch.{GraphMatchAnswerForm, GraphMatchQuestionForm}
import controllers.quiz.multiplechoice.{MultipleChoiceQuestionForm, MultipleChoiceAnswerForm}
import controllers.quiz.multiplefunction.{MultipleFunctionQuestionForm, MultipleFunctionAnswerForm }
import controllers.quiz.polynomialzone.{PolynomialZoneAnswerForm, PolynomialZoneQuestionForm}
import controllers.quiz.tangent.{TangentAnswerForm, TangentQuestionForm}
import controllers.support.SecureSocialConsented
import models.game.GameRole._
import models.game._
import models.game.mask.{GameMask, MyStillAnswering, MyQuizFinished, MyQuizUnfinished}
import models.quiz.Quiz
import models.quiz.answer._
import models.quiz.question._
import models.support._
import models.user.{Alerts, User}
import play.api.data.Form
import play.api.data.Forms._
import play.api.db.slick.Config.driver.simple.Session
import play.api.mvc.{Controller, Result}
import scala.util.{Left, Right}
trait GamesPlayerController extends Controller with SecureSocialConsented {
// ===== Abstract "To Implement" =====
protected val playerType: String
protected def createdQuiz(game: Game)(implicit session: Session): Option[Quiz]
protected def createdQuizEnsured(game: Game)(implicit user: User, session: Session): (Game, Quiz)
protected def quizToAnswer(game: Game)(implicit session: Session): Option[Quiz]
protected def questionToAnswer(gameId: GameId, questionId: QuestionId)(implicit session: Session): Either[Result, (Game, Quiz, Question)]
// ===== Concrete =====
protected def finalizeQuizInternal(game: Game)(implicit user: User, session: Session) {
val gameState = game.toMask(user) match {
case g: MyQuizUnfinished => g
case s => throw new IllegalStateException("Mask should have been subclass of " + classOf[MyQuizUnfinished].getName + " but was " + s)
}
Games.update(gameState.finalizeMyQuiz)
}
protected def finalizeAnswersInternal(game: Game)(implicit user: User, session: Session) {
val gameState = game.toMask(user) match {
case g: GameMask with MyQuizFinished with MyStillAnswering => g
case s => throw new IllegalStateException("Mask should have been subclass of " + classOf[MyQuizFinished].getName + " with " + classOf[MyStillAnswering].getName + " but was " + s)
}
val updatedGame = gameState.doneAnswering
Games.update(updatedGame)
Alerts.gameAlert(updatedGame.toMask(user))
}
protected def questionView(game: Game, quiz: Quiz, question: Question, unfinishedAnswer: Answer)(implicit user: models.user.User, session: Session) : Result = {
GamesController.questionView(game.toMask(user), quiz, question, Some(Left(unfinishedAnswer)))
}
def apply(gameId: GameId, questionId: QuestionId)(implicit session: Session): Either[Result, (Game, Quiz, Question)] =
Games(gameId) match {
case None => Left(NotFound(views.html.errors.notFoundPage("There was no game for id=[" + gameId + "]")))
case Some(game) => createdQuiz(game) match {
case None => Left(NotFound(views.html.errors.notFoundPage("The game with id=[" + gameId + "] does not have a " + playerType + " Quiz")))
case Some(quiz) => Right[Result, (Game, Quiz)]((game, quiz)) + QuestionsController(quiz.id, questionId)
}
}
// ===== Add =====
def addQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) => {
game.toMask(user) match {
case mask : mask.MyQuizUnfinished => {
GameAddQuestion.form.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
questionIdNum => {
Questions(QuestionId(questionIdNum)) match {
case Some(question) => {
val (updatedGame, quiz) = createdQuizEnsured(game)
question.attach(quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
}
case None => BadRequest(views.html.errors.errorPage(new IllegalStateException("No question found for [" + QuestionId(questionIdNum) + "]")))
}
}
)
}
case _ => BadRequest(views.html.errors.errorPage(new IllegalStateException("Can only add to game quizzes when they are unfinished state [" + gameId + "]")))
}
}
}
}
// ===== Start Create =====
def createDerivativeQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
DerivativeQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.derivative(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
DerivativeQuestions.create(DerivativeQuestionForm.toQuestion(user, form), quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def createDerivativeGraphQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
DerivativeGraphQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.derivativeGraph(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
DerivativeGraphQuestions.create(DerivativeGraphQuestionForm.toQuestion(user, form), quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def createTangentQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
TangentQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.tangent(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
TangentQuestions.create(TangentQuestionForm.toQuestion(user, form), quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def createGraphMatchQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
GraphMatchQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.graphMatch(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
GraphMatchQuestions.create(GraphMatchQuestionForm.toQuestion(user, form), quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def createPolynomialZoneQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
PolynomialZoneQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.polynomialZone(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
PolynomialZoneQuestions.create(PolynomialZoneQuestionForm.toQuestion(user, form), quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def createMultipleChoiceQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
MultipleChoiceQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.multipleChoice(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
MultipleChoiceQuestions.create(MultipleChoiceQuestionForm.toQuestion(user, form), MultipleChoiceQuestionForm.toOptions(form), quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def createMultipleFunctionQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
MultipleFunctionQuestionForm.values.bindFromRequest.fold(
errors =>
game.toMask(user) match {
case mask : models.game.mask.MyQuizUnfinished => BadRequest(views.html.game.play.createQuiz(mask, controllers.quiz.QuestionForms.multipleFunction(errors)))
case _ => BadRequest(views.html.errors.formErrorPage(errors))
},
form => {
val (updatedGame, quiz) = createdQuizEnsured(game)
MultipleFunctionQuestions.create(MultipleFunctionQuestionForm.toQuestion(user, form), MultipleFunctionQuestionForm.toOptions(form).get, quiz.id)
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
// ===== End Create =====
def removeQuestion(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) =>
GameRemoveQuestion.form.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
questionId => {
val (updatedGame, quiz) = createdQuizEnsured(game)
for (question <- Questions(questionId)) { quiz.remove(question) }
Redirect(routes.GamesController.game(updatedGame.id, None))
})
}
}
def finalizeCreatedQuiz(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) => {
finalizeQuizInternal(game)
for(mail <- game.otherPlayer(user).maybeSendEmail.map(otherMail => CommonsMailerHelper.defaultMailSetup(otherMail))) {
val userName = user.nameDisplay
mail.setSubject(userName + " created a CalcTutor game quiz for you")
mail.sendHtml(userName + " created a game quiz for you in the " + serverLinkEmail(request) + " (" + goToGameLinkEmail(request, game) + ").")
}
Redirect(routes.GamesController.game(game.id, None))
}
}
}
// ===== Start Answer =====
def answerDerivativeQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question : DerivativeQuestion)) => {
DerivativeAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = DerivativeAnswerForm.toAnswerUnfinished(user, question, form)
DerivativeAnswers.correct(question, form.functionMathML) match {
case Yes => Redirect(routes.GamesController.game(game.id, Some(DerivativeAnswers.createAnswer(unfinishedAnswer(true)).id)))
case No => Redirect(routes.GamesController.answer(game.id, question.id, DerivativeAnswers.createAnswer(unfinishedAnswer(false)).id))
case Inconclusive => questionView(game, quiz, question, unfinishedAnswer(false))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a derivative question " + questionId))
}
}
def answerDerivativeGraphQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question: DerivativeGraphQuestion)) => {
DerivativeGraphAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = DerivativeGraphAnswerForm.toAnswerUnfinished(user, question, form)
DerivativeGraphAnswers.correct(question, form.derivativeOrder) match {
case Yes => Redirect(routes.GamesController.game(game.id, Some(DerivativeGraphAnswers.createAnswer(unfinishedAnswer(true)).id)))
case No => Redirect(routes.GamesController.answer(game.id, question.id, DerivativeGraphAnswers.createAnswer(unfinishedAnswer(false)).id))
case Inconclusive => questionView(game, quiz, question, unfinishedAnswer(false))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a derivative graph question " + questionId))
}
}
def answerTangentQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question: TangentQuestion)) => {
TangentAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = TangentAnswerForm.toAnswerUnfinished(user, question, form)
TangentAnswers.correct(question, form.slopeMathML, form.interceptMathML) match {
case Yes => Redirect(routes.GamesController.game(game.id, Some(TangentAnswers.createAnswer(unfinishedAnswer(true)).id)))
case No => Redirect(routes.GamesController.answer(game.id, question.id, TangentAnswers.createAnswer(unfinishedAnswer(false)).id))
case Inconclusive => questionView(game, quiz, question, unfinishedAnswer(false))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a tangent question " + questionId))
}
}
def answerGraphMatchQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question: GraphMatchQuestion)) => {
GraphMatchAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = GraphMatchAnswerForm.toAnswerUnfinished(user, question, form)
GraphMatchAnswers.correct(question, form.guessIndex) match {
case Yes => Redirect(routes.GamesController.game(game.id, Some(GraphMatchAnswers.createAnswer(unfinishedAnswer(true)).id)))
case No => Redirect(routes.GamesController.answer(game.id, question.id, GraphMatchAnswers.createAnswer(unfinishedAnswer(false)).id))
case Inconclusive => questionView(game, quiz, question, unfinishedAnswer(false))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a graph match question " + questionId))
}
}
def answerPolynomialZoneQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question: PolynomialZoneQuestion)) => {
PolynomialZoneAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = PolynomialZoneAnswerForm.toAnswerUnfinished(user, question, form)
PolynomialZoneAnswers.correct(question, form.intervals) match {
case true => Redirect(routes.GamesController.game(game.id, Some(PolynomialZoneAnswers.createAnswer(unfinishedAnswer(true)).id)))
case false => Redirect(routes.GamesController.answer(game.id, question.id, PolynomialZoneAnswers.createAnswer(unfinishedAnswer(false)).id))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a polynomial zone question " + questionId))
}
}
def answerMultipleChoiceQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question: MultipleChoiceQuestion)) => {
MultipleChoiceAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = MultipleChoiceAnswerForm.toAnswerUnfinished(user, question, form)
MultipleChoiceAnswers.correct(question, form.guessIndex) match {
case true => Redirect(routes.GamesController.game(game.id, Some(MultipleChoiceAnswers.createAnswer(unfinishedAnswer(true)).id)))
case false => Redirect(routes.GamesController.answer(game.id, question.id, MultipleChoiceAnswers.createAnswer(unfinishedAnswer(false)).id))
// case Inconclusive => questionView(game, quiz, question, unfinishedAnswer(false))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a multiple choice question " + questionId))
}
}
def answerMultipleFunctionQuestion(gameId: GameId, questionId: QuestionId) = ConsentedAction { implicit request => implicit user => implicit session =>
questionToAnswer(gameId, questionId) match {
case Left(notFoundResult) => notFoundResult
case Right((game, quiz, question: MultipleFunctionQuestion)) => {
MultipleFunctionAnswerForm.values.bindFromRequest.fold(
errors => BadRequest(views.html.errors.formErrorPage(errors)),
form => {
val unfinishedAnswer = MultipleFunctionAnswerForm.toAnswerUnfinished(user, question, form, JodaUTC.now)
val answerOptions = MultipleFunctionAnswers.answerOptions(question, form)
Match.from(answerOptions.map(_.correctNum).max) match {
case Yes => Redirect(routes.GamesController.game(game.id, Some(MultipleFunctionAnswers.createAnswer(unfinishedAnswer(true), answerOptions).id)))
case No => Redirect(routes.GamesController.answer(game.id, question.id, MultipleFunctionAnswers.createAnswer(unfinishedAnswer(false), answerOptions).id))
case Inconclusive => questionView(game, quiz, question, unfinishedAnswer(false))
}
})
}
case Right((game, quiz, _)) => Ok(views.html.errors.notFoundPage("Question was not a multiple choice question " + questionId))
}
}
// ===== End Answer =====
def finalizeAnswers(gameId: GameId) = ConsentedAction { implicit request => implicit user => implicit session =>
GamesController(gameId) match {
case Left(notFoundResult) => notFoundResult
case Right(game) => {
finalizeAnswersInternal(game)
for(mail <- game.otherPlayer(user).maybeSendEmail.map(otherMail => CommonsMailerHelper.defaultMailSetup(otherMail))) {
val userName = user.nameDisplay
mail.setSubject(userName + " finished answering your CalcTutor game quiz")
mail.sendHtml(userName + " finished answering your game quiz in the " + serverLinkEmail(request) + " (" + goToGameLinkEmail(request, game) + ").")
}
Redirect(routes.GamesController.game(game.id, None))
}
}
}
}
object GameRemoveQuestion {
val removeId = "removeId"
val form = Form(removeId -> questionId)
}
| kristiankime/web-education-games | app/controllers/game/GamesPlayerController.scala | Scala | mit | 22,348 |
package com.datastax.spark.connector.writer
import com.datastax.spark.connector.embedded.SparkTemplate._
import scala.collection.immutable.Map
import org.apache.cassandra.dht.IPartitioner
import com.datastax.spark.connector.cql.{CassandraConnector, Schema}
import com.datastax.spark.connector.embedded.EmbeddedCassandra
import com.datastax.spark.connector.{CassandraRow, SparkCassandraITFlatSpecBase}
class RoutingKeyGeneratorSpec extends SparkCassandraITFlatSpecBase {
useCassandraConfig(Seq("cassandra-default.yaml.template"))
val conn = CassandraConnector(defaultConf)
val ks = "RoutingKeyGeneratorSpec"
conn.withSessionDo { session =>
session.execute(s"""CREATE KEYSPACE IF NOT EXISTS "$ks" WITH REPLICATION = { 'class': 'SimpleStrategy', 'replication_factor': 1 }""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".one_key (id INT PRIMARY KEY, value TEXT)""")
session.execute(s"""CREATE TABLE IF NOT EXISTS "$ks".two_keys (id INT, id2 TEXT, value TEXT, PRIMARY KEY ((id, id2)))""")
}
implicit val protocolVersion = conn.withClusterDo(_.getConfiguration.getProtocolOptions.getProtocolVersionEnum)
val cp = conn.withClusterDo(cluster => Class.forName(cluster.getMetadata.getPartitioner).newInstance().asInstanceOf[IPartitioner])
"RoutingKeyGenerator" should "generate proper routing keys when there is one partition key column" in {
val schema = Schema.fromCassandra(conn, Some(ks), Some("one_key"))
val rowWriter = RowWriterFactory.defaultRowWriterFactory[(Int, String)].rowWriter(schema.tables.head, IndexedSeq("id", "value"))
val rkg = new RoutingKeyGenerator(schema.tables.head, Seq("id", "value"))
conn.withSessionDo { session =>
val pStmt = session.prepare(s"""INSERT INTO "$ks".one_key (id, value) VALUES (:id, :value)""")
val bStmt = pStmt.bind(1: java.lang.Integer, "first row")
session.execute(bStmt)
val row = session.execute(s"""SELECT TOKEN(id) FROM "$ks".one_key WHERE id = 1""").one()
val readTokenStr = CassandraRow.fromJavaDriverRow(row, Array("token(id)")).getString(0)
val rk = rkg.apply(bStmt)
val rkToken = cp.getToken(rk)
rkToken.getTokenValue.toString should be(readTokenStr)
}
}
"RoutingKeyGenerator" should "generate proper routing keys when there are more partition key columns" in {
val schema = Schema.fromCassandra(conn, Some(ks), Some("two_keys"))
val rowWriter = RowWriterFactory.defaultRowWriterFactory[(Int, String, String)].rowWriter(schema.tables.head, IndexedSeq("id", "id2", "value"))
val rkg = new RoutingKeyGenerator(schema.tables.head, Seq("id", "id2", "value"))
conn.withSessionDo { session =>
val pStmt = session.prepare(s"""INSERT INTO "$ks".two_keys (id, id2, value) VALUES (:id, :id2, :value)""")
val bStmt = pStmt.bind(1: java.lang.Integer, "one", "first row")
session.execute(bStmt)
val row = session.execute(s"""SELECT TOKEN(id, id2) FROM "$ks".two_keys WHERE id = 1 AND id2 = 'one'""").one()
val readTokenStr = CassandraRow.fromJavaDriverRow(row, Array("token(id,id2)")).getString(0)
val rk = rkg.apply(bStmt)
val rkToken = cp.getToken(rk)
rkToken.getTokenValue.toString should be(readTokenStr)
}
}
}
| Stratio/spark-cassandra-connector | spark-cassandra-connector/src/it/scala/com/datastax/spark/connector/writer/RoutingKeyGeneratorSpec.scala | Scala | apache-2.0 | 3,251 |
/*
* Copyright 2016 Codnos Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codnos.dbgp.internal.commands
import com.codnos.dbgp.api.StatusChangeHandler
import com.codnos.dbgp.internal.arguments.ArgumentConfiguration.Builder._
import com.codnos.dbgp.internal.arguments.ArgumentFormat._
import com.codnos.dbgp.internal.commands.step.{StepOverCommand, StepOverCommandHandler}
import com.codnos.dbgp.internal.impl.StatusChangeHandlerFactory
import org.mockito.Matchers.any
import org.mockito.Mockito.verify
class StepOverSpec extends CommandSpec {
val argumentConfiguration = configuration.withCommand("step_over", numeric("i")).build
"Command" should "have message constructed from the parameters" in {
val command = new StepOverCommand("456")
command should have(
'name ("step_over"),
'message ("step_over -i 456"),
'handlerKey ("status:456")
)
}
"CommandHandler" should "register status change handler and step over" in {
val handler = new StepOverCommandHandler(engine, new StatusChangeHandlerFactory, argumentConfiguration)
handler.channelRead(ctx, "step_over -i 456")
verify(engine).registerStatusChangeHandler(any(classOf[StatusChangeHandler]))
verify(engine).stepOver()
}
}
| Codnos/dbgp-interfaces | src/test/scala/com/codnos/dbgp/internal/commands/StepOverSpec.scala | Scala | apache-2.0 | 1,771 |
/*
* Copyright (C) 2017 Vincibean <Andre Bessi>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vincibean.hex.decoder
import scala.util.Try
import cats.syntax.either._
import scala.collection.immutable.Seq
object Utils {
/**
* Converts a String representing a number in hexadecimal base into a String representing a number in binary base.
* @param s the String to convert: a String representing a number in hexadecimal base
* @return either a String representing a number in binary base or an error
*/
def hexToBinary(s: String): Either[ParseError, String] =
Try(Integer.decode(s))
.map(i => i.toInt)
.map(i => i.toBinaryString)
.toEither
.leftMap(t => HexToBinaryError(s, t))
/**
* Adds left padding to the given String
* @param s the String to left pad
* @param format the format to be used (default: "%31s", i.e. the ensuing String will be 31 characters long)
* @param toPad the Character that should be used for padding (default: 0)
* @return either the padded String or an error
*/
def addPadding(s: String,
format: String = """%31s""",
toPad: Char = '0'): Either[ParseError, String] =
Try(format.format(s))
.map(x => x.replace(' ', toPad))
.toEither
.leftMap(t => PaddingError(s, t))
/**
* Splits a String in multiple positions.
* @param str the String to split.
* @param pos a sequence of positions that will be used for splitting the String
* @return a sequence of splitted Strings
*/
def splitsAt(str: String, pos: Seq[Int]): Seq[String] = {
Try {
val (head, tail) = pos.foldRight((str, List.empty[String])) {
case (curr, (s, res)) =>
val (rest, split) = s.splitAt(curr)
(rest, split :: res)
}
head :: tail
}.getOrElse(Nil)
}
/**
* Converts a String representing a number in binary base into a String representing a number in decimal base.
* @param s the String to convert: a String representing a number in binary base
* @return either a String representing a number in decimal base or an error
*/
def binaryToDecimal(s: String): Either[ParseError, Int] =
Try(Integer.parseInt(s, 2)).toEither
.leftMap(t => BinaryToDecimalError(s, t))
}
| Vincibean/HexadecimalMatchDecoder | src/main/scala/org/vincibean/hex/decoder/Utils.scala | Scala | gpl-3.0 | 2,927 |
package com.monsanto.arch.cloudformation.model.resource
import com.monsanto.arch.cloudformation.model.ResourceRef
import org.scalatest.{FunSpec, Matchers}
import spray.json._
class EFS_UT extends FunSpec with Matchers {
describe("AWS::EFS::FileSystem") {
val resource = `AWS::EFS::FileSystem`(
"test",
FileSystemTags = Some(
List(AmazonTag("Foo", "Bar"))
),
Encrypted = Some(true),
KmsKeyId = Some(`AWS::KMS::Key`(
name = "test",
KeyPolicy = PolicyDocument(
Statement = List(
PolicyStatement(
Effect = "Allow",
Action = List("dynamodb:*")
)
)
)
)),
PerformanceMode = PerformanceMode.generalPurpose
)
it("should serialize to JSON") {
resource.toJson shouldBe """{
| "name": "test",
| "KmsKeyId": {
| "Ref": "test"
| },
| "Encrypted": true,
| "PerformanceMode": "generalPurpose",
| "FileSystemTags": [{
| "Key": "Foo",
| "Value": "Bar"
| }]
|}""".stripMargin.parseJson
}
it("throws an exception when KmsKeyId is set but Encrypted is false") {
an [IllegalArgumentException] should be thrownBy resource.copy(Encrypted = None)
an [IllegalArgumentException] should be thrownBy resource.copy(Encrypted = Some(false))
}
}
describe("AWS::EFS::MountTarget") {
val vpc = `AWS::EC2::VPC`(
"vpc",
CidrBlock(198,51,100,0,24),
List()
)
val subnet = `AWS::EC2::Subnet`(
"test",
VpcId = ResourceRef(vpc),
CidrBlock = CidrBlock(198,51,100,129,25),
Tags = List()
)
val sg = `AWS::EC2::SecurityGroup`(
"test",
GroupDescription = "Test",
VpcId = ResourceRef(vpc),
None,
None,
List()
)
val resource = `AWS::EFS::MountTarget`(
"test",
FileSystemId = ResourceRef(`AWS::EFS::FileSystem`("test")),
IpAddress = Some("198.51.100.1"),
SecurityGroups = List(ResourceRef(sg)),
SubnetId = ResourceRef(subnet)
)
it("should serialize to JSON") {
resource.toJson shouldBe """{
| "name": "test",
| "SecurityGroups": [{
| "Ref": "test"
| }],
| "IpAddress": "198.51.100.1",
| "FileSystemId": {
| "Ref": "test"
| },
| "SubnetId": {
| "Ref": "test"
| }
|}""".stripMargin.parseJson
}
}
}
| MonsantoCo/cloudformation-template-generator | src/test/scala/com/monsanto/arch/cloudformation/model/resource/EFS_UT.scala | Scala | bsd-3-clause | 3,371 |
package satisfaction
package hadoop
package hive.ms
import org.specs2.mutable._
import scala.concurrent.duration._
import org.joda.time.DateTime
import org.specs2.runner.JUnitRunner
import org.junit.runner.RunWith
import org.apache.log4j.Logger
import org.apache.log4j.Level
import hadoop.hive.ms._
import satisfaction.fs.FileSystem
import satisfaction.hadoop.hdfs.Hdfs
import satisfaction.engine.Satisfaction
import satisfaction.fs.LocalFileSystem
import satisfaction.fs.Path
@RunWith(classOf[JUnitRunner])
class PartitionExistsSpec extends Specification {
val hour = new Variable[String]("hour", classOf[String])
val runDate = new Variable[String]("dt", classOf[String])
///implicit val ms : MetaStore = MetaStore.default
implicit val ms : MetaStore = MetaStore( new java.net.URI("thrift://dhdp2jump01:9083"))
implicit val hdfs : FileSystem = Hdfs.default
implicit val track : Track = Track.localTrack( "PartitionExistsTrack",
LocalFileSystem.relativePath( new Path(
"src/test/resources/track/PartitionExists")))
"PartitionExistsSpec" should {
"Create a single partition" in {
val vars: List[Variable[_]] = List(hour, runDate)
val partExist = PartitionExists( HiveTable("ramblas", "page_view_event"))
val witness = Witness((runDate -> "20140522"), ( hour -> "03"))
val checkExists = partExist.evidenceForWitness(witness).head.exists(witness)
println(s" DOES EXIST ALREADY ?? $checkExists")
val goalResult = Satisfaction.satisfyGoal( partExist, witness)
println(" YYY Goal Result is " + goalResult)
println(" YYY Goal ExecResult is " + goalResult.execResult)
println(" YYY Goal State is " + goalResult.state)
if( goalResult.execResult.stackTrace != null ) {
println( " YYY " + goalResult.execResult.errorMessage)
goalResult.execResult.stackTrace.foreach( st => println( " YYY " + st))
}
goalResult.state == GoalState.Success
}
/**
"Create a FanOut set of Partitions" in {
val vars: List[Variable[_]] = List(hour, runDate)
val partExist = PartitionExists( HiveTable("ramblas", "page_view_event"))
val allHours = ( 01 to 23 ) map ( hr => {
val nuum = new java.text.DecimalFormat("00").format(hr)
println(" HOUR = " + nuum);
nuum} )
val fanOutParts = FanOutGoal( partExist, hour, allHours)
val witness = Witness((runDate -> "20140522"))
val goalResult = Satisfaction.satisfyGoal( fanOutParts, witness)
println(" YYY Goal Result is " + goalResult)
println(" YYY Goal ExecResult is " + goalResult.execResult)
println(" YYY Goal State is " + goalResult.state)
if( goalResult.execResult.stackTrace != null ) {
println( " YYY " + goalResult.execResult.errorMessage)
goalResult.execResult.stackTrace.foreach( st => println( " YYY " + st))
}
goalResult.state == GoalState.Success
}
*
*/
/**
"Run a Hive goal" in {
val vars: List[Variable[_]] = List(hour, runDate)
val partExist = PartitionExists( HiveTable("sqoop_test", "page_view_log"))
val fanOut = FanOutGoal( partExist, hour, ( 0 to 23 ).map( _.toString ) )
val witness = Witness((runDate -> "20140522"))
val goalResult = Satisfaction.satisfyGoal( fanOut, witness)
goalResult.state == GoalState.Success
}
*
*/
}
} | jeromebanks/satisfaction | modules/hive-ms/src/test/scala/satisfaction/hadoop/hive/ms/PartitionExistsSpec.scala | Scala | apache-2.0 | 3,903 |
package org.tuubes.core.tasks
import scala.language.implicitConversions
/**
* A function T1,T2 => R that declares throwing a Throwable. The benefit over the standard
* [[scala.Function2]] is that `ThrowableFunction2` can be used from java without try-catch and
* is a type different from [[scala.Function2]], so we can have two methods.
*
* @author TheElectronWill
*/
trait ThrowableFunction2[-T1, -T2, +R] {
/**
* @throws Throwable if an error occurs
*/
@throws[Throwable]
def apply(v1: T1, v2: T2): R
}
object ThrowableFunction2 {
implicit def fromFunction2[T1, T2, R](f: (T1, T2) => R): ThrowableFunction2[T1, T2, R] = f.apply _
implicit def toFunction2[T1, T2, R](tf: ThrowableFunction2[T1, T2, R]): (T1, T2) => R = tf.apply
}
| mcphoton/Photon-Server | core/src/main/scala/org/tuubes/core/tasks/ThrowableFunction2.scala | Scala | lgpl-3.0 | 755 |
package ua.kata
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
class GameTest extends FlatSpec with Matchers with BeforeAndAfterEach {
private var game: Game = _
override def beforeEach(): Unit = game = new Game
private implicit def toTestGame(game: Game): TestGame = new TestGame(game)
"Gutter game" should "have score 0" in {
game.rollMany(20, 0)
game.score() should be (0)
}
"All ones game" should "have score 20" in {
game.rollMany(20, 1)
game.score() should be (20)
}
"Game with one spare" should "have bonus equals to the next roll after spare frame" in {
game.rollSpare()
game.roll(3)
game.rollMany(17 ,0)
game.score() should be (16)
}
"Game with one strike" should "have bonus equals to the two next rolls after strike frame" in {
game.rollStrike()
game.roll(4)
game.roll(3)
game.rollMany(16, 0)
game.score() should be (24)
}
"Perfect game" should "have score 300" in {
game.rollMany(12, 10)
game.score() should be (300)
}
}
private class TestGame(game: Game) {
def rollMany(times: Int, pin: Int): Unit =
(1 to times).foreach(_ => game.roll(pin))
def rollSpare(): Unit = {
game.roll(4)
game.roll(6)
}
def rollStrike(): Unit = game.roll(10)
} | Alex-Diez/Scala-TDD-Katas | bowling_game_kata/bowling_game_day_7/src/test/scala/ua/kata/GameTest.scala | Scala | mit | 1,287 |
package de.alog
package load
import Messages._
import Internal._
import parser.Messages._
import util.Helpers
import akka.actor._
import akka.dispatch._
import akka.pattern._
import scala.io._
import scala.collection._
import scala.concurrent._
import duration._
import scala.util._
import java.security.MessageDigest
import java.io.IOException
import java.util.concurrent.Executors
import java.net.URI
import com.jcraft.jsch._
import akka.util.Timeout
class AppLogLoader(rcv:ActorRef, loaderSvc:ActorRef) extends Actor with FSM[LogLoaderState, LogLoaderData] with ActorLogging{
startWith(Idle, Uninitialized)
when(Idle) {
case Event(WorkAvailable, _) => stay replying GetWork
case Event(item:LogRequest, _) => startWork(item)
case Event(Completed(_), _) => stay
}
when(Busy) {
case Event(m @ Completed(LogRequest(_,_,_,rs)), WorkData(_,requestor)) =>
log.debug(s"received completion notification with state ${rs.map(_.state).getOrElse("<unknown>")}, returning to idle")
requestor ! m
requestor ! GetWork
goto(Idle) using Uninitialized
case Event(WorkAvailable, _) => stay
case Event(l:LogRequest, _) => stay replying Rejected(l)
case Event(WorkTimeout, WorkData(le, _)) =>
log.warning("processing timed out")
self ! Completed(le update ReadStateHelpers.failed("processing timed out")_)
stay
}
onTransition {
case Idle -> Busy =>
setTimer("workTimeout", WorkTimeout, AppLogLoader.processTimeout)
case Busy -> Idle =>
cancelTimer("workTimeout")
}
def startWork(l:LogRequest) = {
import AppLogLoader._
implicit val ec = context.system.dispatchers.defaultGlobalDispatcher
Future firstCompletedOf (
(
readLines(l.file, loaderSvc).map { full =>
log.debug(s"about to read ${l.file}, known state is ${l.recentState}")
val res = l.recentState match {
case Some(ReadState(Some(readMark),_,_,_,_)) =>
full dropWhile(e => !(e._2 sameElements readMark)) match {
case Nil => full
case some => some drop 1
}
case _ => full
}
(res.lastOption.map(_._2).orElse(l.recentState.flatMap(_.readMark)).orElse(Some(Array[Byte](0))), res.map(_._1))
}(ioExecutor) map {
case (readmark:Option[Array[Byte]], msg:Seq[String]) =>
val newLogState = l update ReadStateHelpers.succeeded(readmark)_
val rm = parser.Messages.RawMessage(l.labels, msg, Some(self.actorRef, Completed(newLogState)))
rcv ! rm
log.debug(s"Sent ${rm.msgs.length} new messages over the wire")
}
) ::
after(processTimeout, context.system.scheduler) {
Future failed TimeoutException()
} :: Nil
) andThen {
case Failure(e) =>
log.error(e, s"uncompleted work ${l}: ${e.getMessage}")
self ! Completed(l update ReadStateHelpers.failed(e.getMessage)_)
}
goto(Busy) using(WorkData(l, sender))
}
initialize
}
object AppLogLoader extends Helpers {
val processTimeout = 60 seconds
val historyLogsize = 8
val ioExecutor:ExecutionContextExecutor =
ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(4))
val x = ThreadPoolConfig(corePoolSize=1, maxPoolSize=4)
def props(rcv:ActorRef, loaderSvc:ActorRef): Props = Props(new AppLogLoader(rcv, loaderSvc))
implicit val codec = Codec.ISO8859
private def readLines(logFile:String, loaderSvc:ActorRef): Future[Seq[(String, Array[Byte])]] = {
def hashAlong(b:(String, Array[Byte]), s:String) = (s, hash(b._2, s.getBytes))
implicit val ec = ioExecutor
val source = Try(new URI(logFile)).filter(_.getScheme()=="scp").map { u =>
implicit val to = Timeout(60 seconds)
loaderSvc ? LoadRequest(u, codec) collect { case LoadResult(s) => s }
} getOrElse Future { Source.fromFile(logFile).getLines.toSeq }
source.map(
_.scanLeft("", Array[Byte](0))(hashAlong).drop(1))
}
}
object Internal {
sealed trait LogLoaderState
case object Idle extends LogLoaderState
case object Busy extends LogLoaderState
sealed trait LogLoaderData
case object Uninitialized extends LogLoaderData
case class WorkData(logRequest:LogRequest, receiver:ActorRef) extends LogLoaderData
case class TimeoutException() extends Exception
case object WorkTimeout
}
object ReadStateHelpers {
def succeeded(mark:Option[Array[Byte]])(s:Option[ReadState]) = {
val (msg, hist) = (mark, s) match {
case (Some(m1), Some(ReadState(Some(m2), _,_,_,_))) if m1.sameElements(m2) => ("unchanged", false)
case _ =>
("OK", true)
}
update(mark, false, msg, hist) (s)
}
def failed(m:String)(s:Option[ReadState]) = update(s.flatMap(_.readMark), true, m, false)(s)
private def update(newReadMark:Option[Array[Byte]], newFailed:Boolean, newState:String, historize:Boolean)(s:Option[ReadState]): ReadState = {
ReadState(
readMark=newReadMark.orElse(s.flatMap(_.readMark)).orElse(None),
failed=newFailed,
state=newState,
occurences=s match {
case Some(ReadState(_,oldFailed,oldState,i,_)) if oldFailed == newFailed && oldState == newState => i + 1
case _ => 1
},
((if (historize) List(System.currentTimeMillis) else Nil) ++ s.map(_.history).getOrElse(Nil)).take(AppLogLoader.historyLogsize)
)
}
} | eweinell/alog | alog/src/main/scala/de/alog/load/AppLogLoader.scala | Scala | apache-2.0 | 5,454 |
package cc.factorie.directed
import cc.factorie.util.FastLogging
import cc.factorie.variable._
import org.junit.Assert._
import org.junit.Test
import org.scalatest.junit.JUnitSuite
class TestPlatedDiscrete extends JUnitSuite with FastLogging {
// Support we have a bunch of coins, we flip each coin and check the results
object CoinDomain extends DiscreteDomain(2)
object CoinSeqDomain extends DiscreteSeqDomain { def elementDomain = CoinDomain }
class CoinSeq(num:Int) extends DiscreteSeqVariable(num) { def domain = CoinSeqDomain }
@Test
def testPlatedDiscrete(): Unit = {
// 0 is tail, 1 is head
// all coins have p(tail) = 0.6, p(head) = 0.4
val p = new ProportionsVariable(new DenseProportions1(Array(0.6, 0.4)))
// construct the directed model and flip coins from the given distribution
implicit val model = DirectedModel()
implicit val random = new scala.util.Random(0)
val cs = new CoinSeq(1000) :~ PlatedDiscrete(p)
// check the generated sequence
val numTails = cs.intValues.filter(_ == 0).length
assertEquals(0.6, numTails.toDouble/1000, 0.01)
}
}
| hlin117/factorie | src/test/scala/cc/factorie/directed/TestPlatedDiscrete.scala | Scala | apache-2.0 | 1,120 |
package com.sksamuel.elastic4s
import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder
import org.elasticsearch.index.query.functionscore.{ DecayFunctionBuilder, ScoreFunctionBuilder }
import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder
import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionBuilder
import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilder
import org.elasticsearch.index.query.functionscore.factor.FactorBuilder
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction
/** @author Stephen Samuel */
trait ScoreDsl {
def randomScore(seed: Long) = new RandomScoreDefinition(seed)
def scriptScore(script: String) = new ScriptScoreDefinition(script)
def gaussianScore(field: String, origin: String, scale: String) = new GaussianDecayScoreDefinition(field, origin, scale)
def linearScore(field: String, origin: String, scale: String) = new LinearDecayScoreDefinition(field, origin, scale)
def exponentialScore(field: String, origin: String, scale: String) = new ExponentialDecayScoreDefinition(field, origin, scale)
def factorScore(boost: Double) = new FactorScoreDefinition(boost)
def fieldFactorScore(fieldName: String) = new FieldValueFactorDefinition(fieldName)
}
class FactorScoreDefinition(boost: Double) extends ScoreDefinition[FactorScoreDefinition] {
val builder = new FactorBuilder().boostFactor(boost.toFloat)
}
trait ScoreDefinition[T] {
val builder: ScoreFunctionBuilder
var _filter: Option[FilterDefinition] = None
def filter(filter: FilterDefinition): T = {
this._filter = Option(filter)
this.asInstanceOf[T]
}
}
class FieldValueFactorDefinition(fieldName: String) extends ScoreDefinition[FieldValueFactorDefinition] {
override val builder = new FieldValueFactorFunctionBuilder(fieldName: String)
def factor(f: Double): this.type = {
builder.factor(f.toFloat)
this
}
def modifier(m: FieldValueFactorFunction.Modifier): this.type = {
builder.modifier(m)
this
}
}
class RandomScoreDefinition(seed: Long) extends ScoreDefinition[RandomScoreDefinition] {
val builder = new RandomScoreFunctionBuilder().seed(seed)
}
class ScriptScoreDefinition(script: String) extends ScoreDefinition[ScriptScoreDefinition] {
val builder = new ScriptScoreFunctionBuilder().script(script)
def param(key: String, value: String): ScriptScoreDefinition = {
builder.param(key, value)
this
}
def params(map: Map[String, String]): ScriptScoreDefinition = {
map.foreach(entry => param(entry._1, entry._2))
this
}
def lang(lang: String): ScriptScoreDefinition = {
builder.lang(lang)
this
}
}
abstract class DecayScoreDefinition[T] extends ScoreDefinition[T] {
val builder: DecayFunctionBuilder
def offset(offset: Any): T = {
builder.setOffset(offset.toString)
this.asInstanceOf[T]
}
def decay(decay: Double): T = {
builder.setDecay(decay)
this.asInstanceOf[T]
}
}
class GaussianDecayScoreDefinition(field: String, origin: String, scale: String)
extends DecayScoreDefinition[GaussianDecayScoreDefinition] {
val builder = new GaussDecayFunctionBuilder(field, origin, scale)
}
class LinearDecayScoreDefinition(field: String, origin: String, scale: String)
extends DecayScoreDefinition[LinearDecayScoreDefinition] {
val builder = new LinearDecayFunctionBuilder(field, origin, scale)
}
class ExponentialDecayScoreDefinition(field: String, origin: String, scale: String)
extends DecayScoreDefinition[ExponentialDecayScoreDefinition] {
val builder = new ExponentialDecayFunctionBuilder(field, origin, scale)
}
| maxcom/elastic4s | src/main/scala/com/sksamuel/elastic4s/scorers.scala | Scala | apache-2.0 | 3,871 |
package effectful.examples
import effectful.{CaptureTransform, _}
import effectful.examples.pure.uuid.UUIDs
package object pure {
implicit object LiftService_UUIDService extends LiftService[UUIDs] {
override def apply[F[_], G[_]](
s: UUIDs[F]
)(implicit
X: CaptureTransform[F,G]
) = {
import UUIDs._
new UUIDs[G] {
override def gen() =
X(s.gen())
override def fromBase64(str: String) =
s.fromBase64(str)
override def toBase64(uuid: UUID) =
s.toBase64(uuid)
override def fromString(str: String) =
s.fromString(str)
override def toString(uuid: UUID) =
s.toString(uuid)
}
}
}
}
| S-Mach/effectful | src/test/scala/effectful/examples/pure/package.scala | Scala | mit | 718 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.bg
import java.util.concurrent.TimeUnit
import akka.actor.{Actor, ActorRef, OneForOneStrategy, PoisonPill, Props}
import akka.kafka.{ConsumerSettings, KafkaConsumerActor}
import akka.stream.{ActorMaterializer, Supervision}
import ch.qos.logback.classic.LoggerContext
import cmwell.bg.Runner.logger
import cmwell.fts.FTSServiceNew
import cmwell.irw.IRWService
import cmwell.common.OffsetsService
import cmwell.common.ExitWithError
import cmwell.zstore.ZStore
import com.codahale.metrics.JmxReporter
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
import nl.grons.metrics4.scala.DefaultInstrumented
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.ByteArrayDeserializer
import org.elasticsearch.metrics.ElasticsearchReporter
import cmwell.common.exception._
import org.slf4j.LoggerFactory
import scala.concurrent.Await
import scala.concurrent.duration._
object CMWellBGActor {
val name = "CMWellBGActor"
def props(partition: Int,
config: Config,
irwService: IRWService,
ftsService: FTSServiceNew,
zStore: ZStore,
offsetsService: OffsetsService) =
Props(new CMWellBGActor(partition, config, irwService, ftsService, zStore, offsetsService))
}
/**
* Created by israel on 15/06/2016.
*/
class CMWellBGActor(partition: Int,
config: Config,
irwService: IRWService,
ftsService: FTSServiceNew,
zStore: ZStore,
offsetsService: OffsetsService)
extends Actor
with LazyLogging
with DefaultInstrumented {
var impStream: ImpStream = null
var indexerStream: IndexerStream = null
val waitAfter503 = config.getInt("cmwell.bg.waitAfter503")
val impOn = config.getBoolean("cmwell.bg.ImpOn")
val indexerOn = config.getBoolean("cmwell.bg.IndexerOn")
// Metrics
val bgMetrics = new BGMetrics
val jmxReporter = JmxReporter.forRegistry(bgMetrics.metricRegistry).build()
jmxReporter.start()
val reportMetricsToES = config.getBoolean("cmwell.common.reportMetricsToES")
logger.debug(s"report to es set to $reportMetricsToES")
val esReporterOpt: Option[ElasticsearchReporter] = if (reportMetricsToES) {
logger.debug(s"available ES nodes: ${ftsService.nodesHttpAddresses().mkString(",")}")
Some(ElasticsearchReporter.forRegistry(metricRegistry).hosts(ftsService.nodesHttpAddresses(): _*).build())
} else None
esReporterOpt.foreach { esReporter =>
logger.info("starting metrics ES Reporter")
esReporter.start(10, TimeUnit.SECONDS)
}
override def preStart(): Unit = {
logger.info(s"CMwellBGActor-$partition starting")
super.preStart()
self ! Start
}
override def postStop(): Unit = {
logger.info(s"CMWellBGActor-$partition stopping")
esReporterOpt.foreach(_.close())
stopAll
super.postStop()
}
override def supervisorStrategy = OneForOneStrategy() {
case t: Throwable =>
logger.error("Exception caught in supervisor. resuming children actors", t)
akka.actor.SupervisorStrategy.Resume
}
implicit val system = context.system
implicit val ec = context.dispatcher
implicit val materializer = ActorMaterializer()
override def receive: Receive = {
case Start =>
logger.info("requested to start all streams")
startAll
sender() ! Started
case StartImp =>
logger.info("requested to start Imp Stream")
startImp
sender() ! Started
case StartIndexer =>
logger.info("requested to start Indexer Stream")
startIndexer
sender() ! Started
case Stop =>
logger.info("requested to stop all streams")
stopAll
sender() ! Stopped
case StopImp =>
logger.info("requested to stop Imp Stream")
stopImp
sender() ! Stopped
case StopIndexer =>
logger.info("requested to stop Indexer Stream")
stopIndexer
sender() ! Stopped
case ShutDown =>
logger.info("requested to shutdown")
stopAll
logger.info("stopped all streams. taking the last pill....")
self ! PoisonPill
case All503 =>
logger.info("Got all503 message. becoming state503")
context.become(state503)
logger.debug("stopping all streams")
stopAll
logger.debug(s"became state503. scheduling resume in [waitAfter503] seconds")
context.system.scheduler.scheduleOnce(waitAfter503.seconds, self, Resume)
case Indexer503 =>
logger.error("Indexer Stopped with Exception. check indexer log for details. Restarting indexer.")
stopIndexer
startIndexer
case Imp503 =>
logger.error("Imp stopped with exception. check imp log for details. Restarting imp.")
stopImp
startImp
case ExitWithError =>
logger.error(s"Requested to exit with error by ${sender()}")
System.exit(1)
}
def state503: Receive = {
case Resume =>
logger.info("accepted Resume message")
context.become(receive)
logger.info(s"became normal and sending Start message to myself")
self ! Start
case ResumeIndexer =>
self ! StartIndexer
context.become(receive)
case ShutDown =>
logger.info("requested to shutdown")
stopAll
logger.info("stopped all streams. taking the last pill....")
self ! PoisonPill
case ExitWithError =>
logger.error(s"Requested to exit with error by ${sender()}")
System.exit(1)
case x => logger.debug(s"got $x in state503 state, ignoring!!!!")
}
def shutdown = {
indexerStream.shutdown
impStream.shutdown
}
private def startImp = {
if (impOn) {
if (impStream == null) {
logger.info("starting ImpStream")
impStream = new ImpStream(partition, config, irwService, zStore, ftsService, offsetsService, self, bgMetrics)
} else
logger.warn("requested to start Imp Stream but it is already running. doing nothing.")
}
}
private def startIndexer = {
if (indexerOn) {
if (indexerStream == null) {
logger.info("starting IndexerStream")
indexerStream = new IndexerStream(partition, config, irwService, ftsService, offsetsService, self)
} else
logger.warn("requested to start Indexer Stream but it is already running. doing nothing.")
}
}
private def startAll = {
startImp
startIndexer
}
/**
* Stop the Imp Stream. If already running, will do nothing
*/
private def stopImp = {
if (impStream != null) {
impStream.shutdown
logger.info("stopped imp stream")
} else
logger.info("Imp Stream was already stopped")
impStream = null
}
private def stopIndexer = {
if (indexerStream != null) {
indexerStream.shutdown
logger.info("stopped Indexer Stream")
} else
logger.info("Indexer Stream was already stopped")
indexerStream = null
}
private def stopAll = {
stopIndexer
stopImp
}
}
case object Start
case object Started
case object StartImp
case object ImpStarted
case object StartIndexer
case object IndexerStarted
case object Stop
case object Stopped
case object StopImp
case object ImpStopped
case object StopIndexer
case object IndexerStopped
case object ShutDown
case object All503
case object Indexer503
case object Imp503
case object State503
case object Resume
case object ResumeIndexer
case object Suspend
trait ESIndicesMapping {
/**
* gets relevant indices for given Infoton's UUID
*/
def indicesForUuid(uuid: String): Iterable[String]
}
class SimpleESIndicesMapping(mapping: Map[String, Iterable[String]]) extends ESIndicesMapping {
/**
* gets relevant indices for given Infoton's UUID
*/
override def indicesForUuid(uuid: String): Iterable[String] = mapping.get(uuid).getOrElse(Iterable.empty)
}
object BGIdentifiedException {
val IRWRelated = BGIdentifiedException("com.datastax.driver.core.exceptions")
val FTSRelated = BGIdentifiedException("org.elasticsearch")
}
case class BGIdentifiedException(prefix: String) {
def unapply(t: Throwable): Boolean = t.getClass.getName.startsWith(prefix)
}
| hochgi/CM-Well | server/cmwell-bg/src/main/scala/cmwell/bg/CMWellBGActor.scala | Scala | apache-2.0 | 8,791 |
package com.datastax.spark.connector.writer
import org.junit.Assert._
import org.junit.Test
class PropertyExtractorTest {
class TestClass(val field1: String, val field2: Int)
@Test
def testSimpleExtraction() {
val testObject = new TestClass("a", 1)
val propertyExtractor = new PropertyExtractor(classOf[TestClass], Seq("field1", "field2"))
val result = propertyExtractor.extract(testObject)
assertEquals(2, result.size)
assertEquals("a", result(0))
assertEquals(1, result(1))
}
@Test
def testAvailableProperties() {
val triedProperties = Seq("field1", "foo", "bar")
val availableProperties = PropertyExtractor.availablePropertyNames(classOf[TestClass], triedProperties)
assertEquals(Seq("field1"), availableProperties)
}
@Test(expected = classOf[NoSuchMethodException])
def testWrongPropertyName() {
val testObject = new TestClass("a", 1)
val propertyExtractor = new PropertyExtractor(classOf[TestClass], Seq("foo"))
propertyExtractor.extract(testObject)
}
}
| Stratio/spark-cassandra-connector | spark-cassandra-connector/src/test/scala/com/datastax/spark/connector/writer/PropertyExtractorTest.scala | Scala | apache-2.0 | 1,034 |
package org.orbeon.oxf.xml
import org.orbeon.saxon.model.BuiltInAtomicType
import org.orbeon.saxon.om._
import org.orbeon.saxon.trans.XPathException
import org.orbeon.saxon.value._
import java.math.{BigDecimal, BigInteger}
object SaxonUtilsDependsOnXPath extends SaxonUtilsDependsOnXPathTrait {
val anyToItem: Any => Item = convertToItem
val anyToItemIfNeeded: Any => Item = {
case i: Item => i
case a => anyToItem(a)
}
// Custom conversion for now XXX FIXME: we only care about types we use
private def convertToItem(value: Any): Item =
value match {
case v: Boolean => BooleanValue.get(v)
case v: Byte => new Int64Value(v.toLong, BuiltInAtomicType.BYTE, false)
case v: Float => new FloatValue(v)
case v: Double => new DoubleValue(v)
case v: Integer => new Int64Value(v.toLong, BuiltInAtomicType.INT, false)
case v: Long => new Int64Value(v, BuiltInAtomicType.LONG, false)
case v: Short => new Int64Value(v.toLong, BuiltInAtomicType.SHORT, false)
case v: String => StringValue.makeStringValue(v)
case v: BigDecimal => new BigDecimalValue(v)
case v: BigInteger => new BigIntegerValue(v)
case v: Array[Byte] => new HexBinaryValue(v)
case _ => throw new XPathException("Java object cannot be converted to an XQuery value")
}
}
| orbeon/orbeon-forms | core-cross-platform/js/src/main/scala/org/orbeon/oxf/xml/SaxonUtilsDependsOnXPath.scala | Scala | lgpl-2.1 | 1,395 |
/*
* StarFactory.scala
* Description needed
*
* Created By: Glenn Takata (gtakata@cra.com)
* Creation Date: Dec 15, 2014
*
* Copyright 2014 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.factored.factors.factory
import com.cra.figaro.language._
import com.cra.figaro.algorithm.factored.factors._
import com.cra.figaro.algorithm.lazyfactored._
/**
* A Sub-Factory to make Star Factors from arbitrary elements
*/
object StarFactory {
/**
* Make a StarFactor from an Element <p.
*
* This Factor has only one value whose probability is 1.0
*/
def makeStarFactor[T](elem: Element[T]): List[Factor[Double]] = {
val elemVar = Variable(elem)
require(elemVar.range.size == 1 && elemVar.range(0) == Star[T], "Trying to create a star factor from a value set that is not only star")
val factor = new BasicFactor[Double](List(), List(elemVar))
factor.set(List(0), 1.0)
List(factor)
}
} | agarbuno/figaro | Figaro/src/main/scala/com/cra/figaro/algorithm/factored/factors/factory/StarFactory.scala | Scala | bsd-3-clause | 1,125 |
/*
* Copyright 2014 Kate von Roeder (katevonroder at gmail dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itsdamiya.legendary.models
class UserSession(val user: User, val authToken: String) extends Serializable {
}
| Damiya/legendary | Legendary-Core/app/com/itsdamiya/legendary/models/UserSession.scala | Scala | apache-2.0 | 755 |
package models
import java.util.regex.Pattern
import java.util.Locale
case class OrderBy(
columnName: String,
order: Order
) {
require(OrderBy.OrderByPattern.matcher(columnName).matches)
require(order != null)
def invert = OrderBy(
columnName,
order.invert
)
override def toString = columnName + " " + order
}
object OrderBy {
val OrderByPattern = Pattern.compile("[a-zA-Z0-9._ ]+");
def apply(columnNameSpec: String): OrderBy = {
val s = columnNameSpec.split("[ ]+")
if (s.length == 2)
OrderBy(s(0).toLowerCase(Locale.ROOT), Order(s(1)))
else
OrderBy(s(0), Asc)
}
}
sealed abstract class Order {
def invert: Order
}
object Order {
def apply(s: String): Order =
if (s.trim.toUpperCase == Asc.toString) Asc else Desc
}
case object Asc extends Order {
override def invert = Desc
override def toString = "ASC"
}
case object Desc extends Order {
override def invert = Asc
override def toString = "DESC"
}
| ruimo/store2 | app/models/OrderBy.scala | Scala | apache-2.0 | 978 |
package org.datacleaner.visualization
import javax.inject.Inject
import javax.inject.Named
import org.datacleaner.api.Analyzer
import org.datacleaner.api.Configured
import org.datacleaner.api.Provided
import org.datacleaner.api.InputColumn
import org.datacleaner.api.InputRow
import org.datacleaner.api.Configured
import org.datacleaner.api.Provided
import org.datacleaner.api.Description
import org.datacleaner.api.Categorized
import org.datacleaner.storage.RowAnnotationFactory
import scala.collection.mutable.Map
import org.datacleaner.util.LabelUtils
object ScatterAnalyzer {
final val PROPERTY_VARIABLE1 = "Variable1"
final val PROPERTY_VARIABLE2 = "Variable2"
final val PROPERTY_GROUP_COLUMN = "Group column"
}
@Named("Scatter plot")
@Description("Plots the occurences of two number variables in a scatter plot chart. A useful visualization for identifying outliers in numeric data relationships.")
@Categorized(Array(classOf[VisualizationCategory]))
class ScatterAnalyzer extends Analyzer[ScatterAnalyzerResult] {
@Inject
@Configured(value = ScatterAnalyzer.PROPERTY_VARIABLE1)
@Description("The field with the first variable. Will be plotted on the horizontal X-axis.")
var variable1: InputColumn[Number] = null;
@Inject
@Configured(value = ScatterAnalyzer.PROPERTY_VARIABLE2)
@Description("The field with the second variable. Will be plotted on the vertical Y-axis.")
var variable2: InputColumn[Number] = null;
@Inject
@Configured(value = ScatterAnalyzer.PROPERTY_GROUP_COLUMN, required = false)
var groupColumn: InputColumn[_] = null;
@Inject
@Provided
var rowAnnotationFactory: RowAnnotationFactory = null;
val groups: Map[String, ScatterGroup] = Map[String, ScatterGroup]().withDefault(
groupName => {
val group = new ScatterGroup(groupName, rowAnnotationFactory)
groups.put(groupName, group)
group
});
override def run(row: InputRow, distinctCount: Int) = {
val value1 = row.getValue(variable1);
val value2 = row.getValue(variable2);
if (value1 != null && value2 != null) {
val groupNameValue = if (groupColumn == null) "Observations" else row.getValue(groupColumn)
val groupName = LabelUtils.getValueLabel(groupNameValue)
val point = (value1, value2);
val group = groups(groupName);
group.register(point, row, distinctCount);
}
}
override def getResult: ScatterAnalyzerResult = {
val groupList = groups.values.toList;
new ScatterAnalyzerResult(groupList, variable1, variable2, groupColumn);
}
} | anandswarupv/DataCleaner | components/visualization/src/main/scala/org/datacleaner/visualization/ScatterAnalyzer.scala | Scala | lgpl-3.0 | 2,590 |
package name.abhijitsarkar.scauth
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import name.abhijitsarkar.scauth.model.OAuthCredentials
import name.abhijitsarkar.scauth.service.TwitterSearchService
import name.abhijitsarkar.scauth.util.ActorPlumbing
import scala.concurrent.ExecutionContext
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Success
object TwitterApp extends App {
implicit val system = ActorSystem("twitter")
implicit val materializer = ActorMaterializer()
implicit val executionContext: ExecutionContext = { implicitly }
require(args.size == 2, "Usage: TwitterApp <consumerKey> <consumerSecret>")
private val consumerKey = args(0).trim
private val consumerSecret = args(1).trim
val oAuthCredentials = OAuthCredentials(consumerKey, consumerSecret)
implicit val actorPlumbing: ActorPlumbing = ActorPlumbing()
val twitterService = new TwitterSearchService(oAuthCredentials)
val searchResults = twitterService.search("@narendramodi")
searchResults.onComplete {
_ match {
case Success(results) => println(results)
case _ => println("Bad Twitter!")
}
}
} | asarkar/akka | scauth/src/test/scala/name/abhijitsarkar/scauth/TwitterApp.scala | Scala | gpl-3.0 | 1,166 |
import scala.quoted.*
class Foo {
def test(using Quotes) = '{
${3} // error
${new Object} // error
${"abc"} // error
${()} // error
${new Foo} // error
}
def unary_$ : Int = 9
}
| dotty-staging/dotty | tests/neg-macros/splice-non-expr.scala | Scala | apache-2.0 | 205 |
/**
* Copyright 2013 Gianluca Amato
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty ofa
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.ui
/**
* A parameter value is a possible instance of a given enumerative parameter.
* @tparam V the type of the instance
* @param name name of the value
* @param description description of the value
*/
case class ParameterValue[+V] (val value: V, val name: String, val description: String)
| rubino22/JDBeta | core/src/main/scala/it/unich/jandom/ui/ParameterValue.scala | Scala | lgpl-3.0 | 1,071 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jawa.core.compiler.compile.io
import java.io.IOException
object ErrorHandling
{
def translate[T](msg: => String)(f: => T): T =
try { f }
catch {
case e: IOException => throw new TranslatedIOException(msg + e.toString, e)
case e: Exception => throw new TranslatedException(msg + e.toString, e)
}
def wideConvert[T](f: => T): Either[Throwable, T] =
try { Right(f) }
catch
{
case ex @ (_: Exception | _: StackOverflowError) => Left(ex)
case err @ (_: ThreadDeath | _: VirtualMachineError) => throw err
case x: Throwable => Left(x)
}
def convert[T](f: => T): Either[Exception, T] =
try { Right(f) }
catch { case e: Exception => Left(e) }
def reducedToString(e: Throwable): String =
if(e.getClass == classOf[RuntimeException])
{
val msg = e.getMessage
if(msg == null || msg.isEmpty) e.toString else msg
}
else
e.toString
}
sealed class TranslatedException private[io](msg: String, cause: Throwable) extends RuntimeException(msg, cause) {
override def toString: String = msg
}
final class TranslatedIOException private[io](msg: String, cause: IOException) extends TranslatedException(msg, cause)
| arguslab/Argus-SAF | jawa/src/main/scala/org/argus/jawa/core/compiler/compile/io/ErrorHandling.scala | Scala | apache-2.0 | 1,579 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import akka.util.ByteString
import org.ensime.api._
import org.ensime.server.protocol.swank._
import org.ensime.sexp._
class SwankProtocol extends FramedStringProtocol {
import SwankFormats._
override def decode(bytes: ByteString): (Option[RpcRequestEnvelope], ByteString) = {
tryReadString(bytes) match {
case (Some(message), remainder) =>
val parsedMessage = message.parseSexp.convertTo[RpcRequestEnvelope]
(Some(parsedMessage), remainder)
case (None, remainder) =>
(None, remainder)
}
}
override def encode(resp: RpcResponseEnvelope): ByteString = writeString(resp.toSexp.prettyPrint)
}
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/server/src/main/scala/org/ensime/server/SwankProtocol.scala | Scala | apache-2.0 | 802 |
package collins.provisioning
import play.api.Logger
import collins.guava.GuavaCacheFactory
import collins.models.Asset
import collins.shell.Command
import collins.shell.CommandResult
trait Provisioner {
protected[this] val logger = Logger(getClass)
def profiles: Set[ProvisionerProfile]
def canProvision(asset: Asset): Boolean
def provision(request: ProvisionerRequest): CommandResult
def test(request: ProvisionerRequest): CommandResult
def profile(id: String): Option[ProvisionerProfile] = {
profiles.find(_.identifier == id)
}
def makeRequest(token: String, id: String, notification: Option[String] = None, suffix: Option[String] = None): Option[ProvisionerRequest] = {
profile(id).map { p =>
ProvisionerRequest(token, p, notification, suffix)
}
}
}
object Provisioner extends Provisioner {
protected[this] val profileCache =
GuavaCacheFactory.create(ProvisionerConfig.cacheSpecification, ProfileLoader())
// overrides ProvisionerInterface.profiles
override def profiles: Set[ProvisionerProfile] = {
profileCache.get(ProvisionerConfig.profilesFile)
}
// overrides ProvisionerInterface.canProvision
override def canProvision(asset: Asset): Boolean = {
ProvisionerConfig.allowedStatus(asset.statusId) && ProvisionerConfig.allowedType(asset.assetTypeId)
}
// overrides ProvisionerInterface.provision
override def provision(request: ProvisionerRequest): CommandResult = {
val result = runCommand(command(request, ProvisionerConfig.command))
if (result.exitCode != 0) {
logger.warn("Command executed: %s".format(command(request, ProvisionerConfig.command)))
logger.warn("Command code: %d, output %s".format(result.exitCode, result.stdout))
}
result
}
override def test(request: ProvisionerRequest): CommandResult = {
val cmd = try command(request, ProvisionerConfig.checkCommand) catch {
case _: Throwable => return CommandResult(0,"No check command specified")
}
val result = runCommand(cmd)
if (result.exitCode != 0) {
logger.warn("Command code: %d, output %s".format(result.exitCode, result.stdout))
}
result
}
protected def runCommand(cmd: String): CommandResult = {
Command(Seq(cmd), logger).run()
}
protected def command(request: ProvisionerRequest, cmdString: Option[String]): String = {
cmdString.map { cmd =>
cmd.replace("<tag>", request.token)
.replace("<profile-id>", request.profile.identifier)
.replace("<notify>", request.notification.getOrElse(""))
.replace("<suffix>", request.suffix.filter(_ => request.profile.allow_suffix).getOrElse(""))
.replace("<logfile>", getLogLocation(request))
}.getOrElse {
throw new Exception("provisioner.command must be specified")
}
}
private def getLogLocation(request: ProvisionerRequest): String = {
val tmpDir = System.getProperty("java.io.tmpdir", "/tmp").stripSuffix("/")
val filename = request.token.replaceAll("[^a-zA-Z0-9\\\\-]", "") + '-' + request.profile.identifier
tmpDir + "/" + filename + ".log"
}
} | byxorna/collins | app/collins/provisioning/Provisioner.scala | Scala | apache-2.0 | 3,091 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import com.google.inject.Inject
import controllers.SimpleValidator._
import fetch.{FetchController, SardineWrapper}
import ingest.{IngestController, IngestControllerHelper}
import play.api.mvc.{Action, AnyContent}
import services.exec.{Continuer, WorkQueue}
import services.model.{StateModel, StatusLogger}
import uk.gov.hmrc.address.services.es.IndexMetadata
import uk.gov.hmrc.address.services.writers.{Algorithm, WriterSettings}
import uk.gov.hmrc.play.microservice.controller.BaseController
object KnownProducts {
val OSGB = List("abi", "abp")
}
class GoController @Inject()(logger: StatusLogger,
worker: WorkQueue,
sardine: SardineWrapper,
fetchController: FetchController,
ingestController: IngestController,
esSwitchoverController: SwitchoverController,
esIndexController: IndexController,
indexMetadata: IndexMetadata) extends BaseController {
def doGoAuto(target: String,
bulkSize: Option[Int], loopDelay: Option[Int]): Action[AnyContent] = Action {
require(IngestControllerHelper.allowedTargets.contains(target))
val settings = IngestControllerHelper.settings(bulkSize, loopDelay, Algorithm.default)
worker.push(s"automatically searching and loading to $target", {
continuer =>
val tree = sardine.exploreRemoteTree
for (product <- KnownProducts.OSGB
if continuer.isBusy) {
val found = tree.findLatestFor(product)
if (found.isDefined) {
val model = StateModel(found.get)
pipeline(target, model, settings, continuer)
}
}
if (continuer.isBusy) {
target match {
case "es" => esIndexController.cleanup()
case _ => // no action
}
fetchController.cleanup()
}
})
Accepted
}
def doGo(target: String, product: String, epoch: Int, variant: String,
bulkSize: Option[Int], loopDelay: Option[Int],
forceChange: Option[Boolean]): Action[AnyContent] = Action {
request =>
require(IngestControllerHelper.allowedTargets.contains(target))
require(isAlphaNumeric(product))
require(isAlphaNumeric(variant))
val settings = IngestControllerHelper.settings(bulkSize, loopDelay, Algorithm.default)
val model = new StateModel(product, Some(epoch), Some(variant), forceChange = forceChange getOrElse false)
worker.push(s"automatically loading to $target ${model.pathSegment}${model.forceChangeString}", {
continuer =>
pipeline(target, model, settings, continuer)
})
Accepted
}
private def pipeline(target: String, model1: StateModel, settings: WriterSettings, continuer: Continuer) {
if (shouldIngest(model1, continuer)) {
val model2 = fetchController.fetch(model1, continuer)
val model3 = ingestController.ingestIfOK(model2, logger, settings, target, continuer)
target match {
case "es" => esSwitchoverController.switchIfOK(model3)
case _ => // no further action
}
}
}
def shouldIngest(model1: StateModel, continuer: Continuer): Boolean = {
val prod = model1.productName
val epoch = model1.epoch
val exists = indexExists(prod, epoch)
val force = model1.forceChange
val should = continuer.isBusy && (!exists || force)
if (!should) {
logger.info(s"Skipping ingest: index already exists in ES for $prod ${epoch.getOrElse(-1)}")
}
should
}
def indexExists(product: String, epoch: Option[Int]): Boolean = {
(product, epoch) match {
case (prod, Some(ep)) => indexMetadata.getIndexNameInUseFor(prod).exists(_.epoch == Some(ep))
case _ => false
}
}
}
| andywhardy/address-reputation-ingester | app/controllers/GoController.scala | Scala | apache-2.0 | 4,489 |
#!/usr/bin/env scala
val maximum_index = 100;
def getNthTerm(index: Int): Int = {
if(index == 0) {
return 0;
}
if(index == 1) {
return 1;
}
return getNthTerm(index - 1) + getNthTerm(index - 2);
}
def main(args: Array[String]) {
if(args.length < 1) {
Console.err.println("Not enough arguments");
return;
}
val index = scala.math.min(Integer.parseInt(args(0)), maximum_index);
val term = getNthTerm(index);
println("The value of the term at index " + index + " of the Fibbonacci sequence is\\n" +
term);
}
main(args)
| alewang/Projects | scala/numbers/fibb.scala | Scala | mit | 540 |
// Copyright (c) 2011-2015 ScalaMock Contributors (https://github.com/paulbutcher/ScalaMock/graphs/contributors)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package org.scalamock.test.scalatest
import org.scalamock.scalatest.MockFactory
import org.scalamock.test.mockable.TestTrait
import org.scalatest._
import org.scalatest.exceptions.TestFailedException
import scala.language.postfixOps
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
/**
* Tests that errors are reported correctly in ScalaTest suites
*/
class ErrorReportingTest extends AnyFlatSpec with Matchers with TestSuiteRunner {
"ScalaTest suite" should "report unexpected call correctly" in {
class TestedSuite extends AnyFunSuite with MockFactory {
test("execute block of code") {
val mockedTrait = mock[TestTrait]
mockedTrait.oneParamMethod(3)
}
}
val outcome = runTestCase[TestedSuite](new TestedSuite)
val errorMessage = getErrorMessage[TestFailedException](outcome)
errorMessage should startWith("Unexpected call")
}
it should "report unexpected call correctly when expectations are set" in {
class TestedSuite extends AnyFunSuite with MockFactory {
test("execute block of code") {
val mockedTrait = mock[TestTrait]
(mockedTrait.oneParamMethod _).expects(1).returning("one")
mockedTrait.oneParamMethod(3)
}
}
// Unexpected call should be reported by ScalaTest
val outcome = runTestCase[TestedSuite](new TestedSuite)
val errorMessage = getErrorMessage[TestFailedException](outcome)
errorMessage should startWith("Unexpected call")
}
it should "not hide NullPointerException" in {
class TestedSuite extends AnyFunSuite with MockFactory {
test("execute block of code") {
val mockedTrait = mock[TestTrait]
(mockedTrait.oneParamMethod _).expects(1).returning("one")
throw new NullPointerException;
}
}
val outcome = runTestCase[TestedSuite](new TestedSuite)
// NullPointerException should be reported by ScalaTest
getThrowable[NullPointerException](outcome) shouldBe a[NullPointerException]
}
it should "report default mock names" in {
class TestedSuite extends AnyFunSuite with MockFactory {
test("execute block of code") {
val mockA = mock[TestTrait]
val mockB = mock[TestTrait]
(mockA.oneParamMethod _).expects(3)
mockB.oneParamMethod(3)
}
}
val outcome = runTestCase[TestedSuite](new TestedSuite)
val errorMessage = getErrorMessage[TestFailedException](outcome)
errorMessage shouldBe
"""|Unexpected call: <mock-2> TestTrait.oneParamMethod(3)
|
|Expected:
|inAnyOrder {
| <mock-1> TestTrait.oneParamMethod(3) once (never called - UNSATISFIED)
|}
|
|Actual:
| <mock-2> TestTrait.oneParamMethod(3)
""".stripMargin.trim
}
it should "report unexpected calls in readable manner" in {
class TestedSuite extends AnyFunSuite with MockFactory {
val suiteScopeMock = mock[TestTrait]("suite mock")
(() => suiteScopeMock.noParamMethod()).expects().returning("two").twice()
test("execute block of code") {
val mockedTrait = mock[TestTrait]
(mockedTrait.polymorphicMethod _).expects(List(1)).returning("one")
suiteScopeMock.noParamMethod()
mockedTrait.oneParamMethod(3)
}
}
val outcome = runTestCase[TestedSuite](new TestedSuite)
val errorMessage = getErrorMessage[TestFailedException](outcome)
errorMessage shouldBe
"""|Unexpected call: <mock-1> TestTrait.oneParamMethod(3)
|
|Expected:
|inAnyOrder {
| <suite mock> TestTrait.noParamMethod() twice (called once - UNSATISFIED)
| <mock-1> TestTrait.polymorphicMethod[T](List(1)) once (never called - UNSATISFIED)
|}
|
|Actual:
| <suite mock> TestTrait.noParamMethod()
| <mock-1> TestTrait.oneParamMethod(3)
""".stripMargin.trim
}
}
| paulbutcher/ScalaMock | shared/src/test/scala/org/scalamock/test/scalatest/ErrorReportingTest.scala | Scala | mit | 5,176 |
package utils
import java.awt.image.{BufferedImage, DataBufferByte}
import java.io.InputStream
import javax.imageio.ImageIO
import pipelines._
/**
* A wrapper trait for images that might be stored in various ways. Be warned
* that using this wrapper probably introduces some inefficiency. Also, images
* are currently treated as immutable, which may introduce a serious
* performance problem; in the future we may need to add a set() method.
*
* If you have a choice and performance matters to you, use
* ChannelMajorArrayVectorizedImage, as it is likely to be the most efficient
* implementation.
*/
trait Image {
val metadata: ImageMetadata
/**
* Get the pixel value at (x, y, channelIdx). Channels are indexed as
* follows:
* - If the image is RGB, 0 => blue, 1 => green, 2 => red.
* - If the image is RGB+alpha, 0 => blue, 1=> green, 2 => red, and
* 3 => alpha.
* - Other channel schemes are unsupported; the only reason this matters
* is that input converters (e.g. from BufferedImage to Image) need to
* handle channels consistently.
*/
def get(x: Int, y: Int, channelIdx: Int): Double
/**
* Put a pixel value at (x, y, channelIdx).
*/
def put(x: Int, y: Int, channelIdx: Int, newVal: Double)
/**
* Returns a flat version of the image, represented as a single array.
* It is indexed as follows: The pixel value for (x, y, channelIdx)
* is at channelIdx + x*numChannels + y*numChannels*xDim.
*
* This implementation works for arbitrary image formats but it is
* inefficient.
*/
def toArray: Array[Double] = {
val flat = new Array[Double](this.flatSize)
var y = 0
while (y < this.metadata.yDim) {
val runningOffsetY = y*this.metadata.numChannels*this.metadata.xDim
var x = 0
while (x < this.metadata.xDim) {
val runningOffsetX = runningOffsetY + x*this.metadata.numChannels
var channelIdx = 0
while (channelIdx < this.metadata.numChannels) {
flat(channelIdx + runningOffsetX) = get(x, y, channelIdx)
channelIdx += 1
}
x += 1
}
y += 1
}
flat
}
def getSingleChannelAsIntArray(): Array[Int] = {
if (this.metadata.numChannels > 1) {
throw new RuntimeException(
"Cannot call getSingleChannelAsIntArray on an image with more than one channel.")
}
var index = 0;
var flat = new Array[Int](this.metadata.xDim*this.metadata.yDim)
(0 until metadata.xDim).map({ x =>
(0 until metadata.yDim).map({ y =>
val px = get(x, y, 0);
if(px < 1) {
flat(index) = (255*px).toInt
}
else {
flat(index) = math.round(px).toInt
}
index += 1
})
})
flat
}
def getSingleChannelAsFloatArray(): Array[Float] = {
if (this.metadata.numChannels > 1) {
throw new RuntimeException(
"Cannot call getSingleChannelAsFloatArray on an image with more than one channel.")
}
var index = 0;
var flat = new Array[Float](this.metadata.xDim*this.metadata.yDim)
(0 until metadata.yDim).map({ y =>
(0 until metadata.xDim).map({ x =>
flat(index) = get(x, y, 0).toFloat
index += 1
})
})
flat
}
def flatSize: Int = {
metadata.numChannels*metadata.xDim*metadata.yDim
}
/**
* An inefficient implementation of equals(). Subclasses should override
* this if they can implement it more cheaply and anyone cares about such
* things.
*/
override def equals(o: Any): Boolean = {
if (o == null || !o.isInstanceOf[Image]) {
false
} else {
val other = o.asInstanceOf[Image]
if (!this.metadata.equals(other.metadata)) {
false
} else {
for (xIdx <- (0 until metadata.xDim);
yIdx <- (0 until metadata.yDim);
channelIdx <- (0 until metadata.numChannels)) {
if (this.get(xIdx, yIdx, channelIdx) != other.get(xIdx, yIdx, channelIdx)) {
return false
}
}
true
}
}
}
}
/**
* Contains metadata about the storage format of an image.
*
* @param xDim is the height of the image(!)
* @param yDim is the width of the image
* @param numChannels is the number of color channels in the image
*/
case class ImageMetadata(xDim: Int, yDim: Int, numChannels: Int)
/**
* Wraps a byte array, where a byte is a color channel value. This is the
* format generated by Java's JPEG parser.
*
* VectorizedImage is indexed as follows: The pixel value for (x, y, channelIdx)
* is at channelIdx + y*numChannels + x*numChannels*yDim.
*/
case class ByteArrayVectorizedImage(
vectorizedImage: Array[Byte],
override val metadata: ImageMetadata) extends VectorizedImage {
override def imageToVectorCoords(x: Int, y: Int, channelIdx: Int): Int = {
channelIdx + y*metadata.numChannels + x*metadata.yDim*metadata.numChannels
}
// FIXME: This is correct but inefficient - every time we access the image we
// use several method calls (which are hopefully inlined) and a conversion
// from byte to double (which hopefully at least does not involve any
// boxing).
override def getInVector(vectorIdx: Int) = {
val signedValue = vectorizedImage(vectorIdx)
if (signedValue < 0) {
signedValue + 256
} else {
signedValue
}
}
override def putInVector(vectorIdx: Int, newVal: Double) = ???
}
/**
* VectorizedImage that indexed as follows: The pixel value for
* (x, y, channelIdx) is at channelIdx + x*numChannels + y*numChannels*xDim.
*/
case class ChannelMajorArrayVectorizedImage(
vectorizedImage: Array[Double],
override val metadata: ImageMetadata) extends VectorizedImage {
override def imageToVectorCoords(x: Int, y: Int, channelIdx: Int): Int = {
channelIdx + x * metadata.numChannels + y * metadata.xDim * metadata.numChannels
}
override def getInVector(vectorIdx: Int) = vectorizedImage(vectorIdx)
override def putInVector(vectorIdx: Int, newVal: Double) = {
vectorizedImage(vectorIdx) = newVal
}
override def toArray = vectorizedImage
}
/**
* VectorizedImage that is indexed as follows: The pixel value for (x, y, channelIdx)
* is at y + x*yDim + channelIdx*yDim*xDim
*/
case class ColumnMajorArrayVectorizedImage(
vectorizedImage: Array[Double],
override val metadata: ImageMetadata) extends VectorizedImage {
override def imageToVectorCoords(x: Int, y: Int, channelIdx: Int): Int = {
val cidx = channelIdx
y + x * metadata.yDim + cidx * metadata.yDim * metadata.xDim
}
override def getInVector(vectorIdx: Int) = {
vectorizedImage(vectorIdx)
}
override def putInVector(vectorIdx: Int, newVal: Double) = {
vectorizedImage(vectorIdx) = newVal
}
}
/**
* VectorizedImage which is indexed as follows: The pixel value for
* (x, y, channelIdx) is at x + y*xDim + channelIdx*xDim*yDim.
*/
case class RowMajorArrayVectorizedImage(
vectorizedImage: Array[Double],
override val metadata: ImageMetadata) extends VectorizedImage {
override def imageToVectorCoords(x: Int, y: Int, channelIdx: Int): Int = {
x + y * metadata.xDim + channelIdx * metadata.xDim * metadata.yDim
}
override def getInVector(vectorIdx: Int) = vectorizedImage(vectorIdx)
override def putInVector(vectorIdx: Int, newVal: Double) = {
vectorizedImage(vectorIdx) = newVal
}
}
/**
* Helper trait for implementing Images that wrap vectorized representations
* of images.
*/
trait VectorizedImage extends Image {
def imageToVectorCoords(x: Int, y: Int, channelIdx: Int): Int
def getInVector(vectorIdx: Int): Double
def putInVector(vectorIdx: Int, newVal: Double): Unit
override def get(x: Int, y: Int, channelIdx: Int) = {
getInVector(imageToVectorCoords(x, y, channelIdx))
}
override def put(x: Int, y: Int, channelIdx: Int, newVal: Double) = {
putInVector(imageToVectorCoords(x, y, channelIdx), newVal)
}
}
/**
* Wraps a double array.
*
* @param vectorizedImage is indexed as follows: The pixel value for (x, y, channelIdx)
* is at y + x.metadata.yDim + channelIdx*metadata.yDim*metadata.xDim
* @param metadata Image metadata.
*/
case class RowColumnMajorByteArrayVectorizedImage(
vectorizedImage: Array[Byte],
override val metadata: ImageMetadata) extends VectorizedImage {
override def imageToVectorCoords(x: Int, y: Int, channelIdx: Int): Int = {
val cidx = channelIdx
y + x*metadata.yDim + cidx*metadata.yDim*metadata.xDim
}
// FIXME: This is correct but inefficient - every time we access the image we
// use several method calls (which are hopefully inlined) and a conversion
// from byte to double (which hopefully at least does not involve any
// boxing).
override def getInVector(vectorIdx: Int) = {
val signedValue = vectorizedImage(vectorIdx)
if (signedValue < 0) {
signedValue + 256
} else {
signedValue
}
}
override def putInVector(vectorIdx: Int, newVal: Double) = ???
}
/**
* Represents a labeled image.
* @tparam L Type of the label.
*/
trait AbstractLabeledImage[L] {
def image: Image
def label: L
def filename: Option[String]
}
/**
* A labeled image. Commonly used in Image classification.
*
* @param image An Image.
* @param label A label. Should be in [0 .. K] where K is some number of unique labels.
*/
case class LabeledImage(image: Image, label: Int, filename: Option[String] = None)
extends AbstractLabeledImage[Int]
/**
* A multilabeled image. Commonly used in Image classification.
*
* @param image An Image.
* @param label A set of labels. Should be an array with all elements in [0 .. K]
* where K is some number of unique labels.
*
* @param filename A filename where this image was found. Useful for debugging.
*/
case class MultiLabeledImage(image: Image, label: Array[Int], filename: Option[String] = None)
extends AbstractLabeledImage[Array[Int]]
| o0neup/keystone | src/main/scala/utils/images/Image.scala | Scala | apache-2.0 | 9,956 |
import scala.collection.mutable.ArrayBuffer
/** A mutable map from isomorphism classes of graphs to variable names. */
class VarMap private (private val _keys: ArrayBuffer[Digraph]) {
/** The number of entries in this map. */
def size: Int = _keys.size
/** The keys of this map. */
def keys: Seq[Digraph] = _keys
/**
* Lookup the variable name associated with the class '[g]' to
* which 'g' belongs, or extend the map with a new variable name if
* no such class exists.
*/
def apply(g: Digraph): (Var, Boolean) =
(0 until size).find(i => g ~ _keys(i)) match {
case Some(i) => (mkName(i), false)
case None =>
val i = size
_keys += g
(mkName(i), true)
}
private def mkName(i: Int): Var = Var("x", i)
}
object VarMap {
def empty = new VarMap(new ArrayBuffer[Digraph])
}
| sstucki/pa-ode-gen | src/VarMap.scala | Scala | mit | 848 |
package com.pawelmandera.duplicates
import scala.util.Try
import org.specs2.mutable._
import com.pawelmandera.hash.ElementHashes
class SharedMemberCandidatesSpec extends Specification {
val smc = new SharedMemberCandidates {}
implicit object SymbolSetElementHashes extends ElementHashes[Set[Symbol]] {
def hashes(x: Set[Symbol]) = Try {
val xs = x.toSeq map { _.toString }
val hs = xs map { _.hashCode.toLong }
hs.toSet
}
}
"The SharedMemberCandidates" should {
"generate candidate duplicates based on overlap in sketches for one set" in {
val elemsA = Map(
'a -> Vector(1L, 2L, 3L),
'b -> Vector(1L, 2L, 4L))
val candidates = smc.candidates(elemsA, elemsA).toList
candidates.length must_== 1
}
"generate candidate duplicates based on overlap in sketches for two sets" in {
val elemsA = Map(
'a -> Vector(1L, 2L, 3L),
'b -> Vector(1L, 2L, 4L),
'c -> Vector(4L, 5L, 6L))
val elemsB = Map(
'x -> Vector(2L, 5L, 3L),
'y -> Vector(1L, 2L, 8L))
val candidates = smc.candidates(elemsA, elemsB).toList
candidates must containTheSameElementsAs(
Seq(Set('a, 'x), Set('a, 'y), Set('b, 'y), Set('c, 'x))
)
}
}
}
| pmandera/duometer | src/test/scala/com/pawelmandera/duplicates/SharedMemberCandidatesSpec.scala | Scala | apache-2.0 | 1,274 |
object Solution {
import scala.collection.mutable.HashMap
def markovChainSolver(dice: Int,
k: Int,
iterator:Int = 150): Vector[String] = {
val names = Vector("GO", "A1", "CC1", "A2", "T1", "R1", "B1", "CH1", "B2",
"B3", "JAIL", "C1", "U1", "C2", "C3", "R2", "D1", "CC2",
"D2", "D3", "FP", "E1", "CH2", "E2", "E3", "R3", "F1",
"F2", "U2", "F3", "G2J", "G1", "G2", "CC3", "G3", "R4",
"CH3", "H1", "T2", "H2")
val squares: Map[String, Int] = names.zipWithIndex.toMap
def nextHelper(acc: Map[String, Int],
indices: List[Int],
start: Int): Map[String, Int] = indices match {
case h :: t =>
val newAcc =
if (start <= h)
(start until h).map(names(_) -> h).toMap ++ acc
else{
assert(start < names.size)
(start until h + names.size)
.map(x => names(x % names.size) -> h).toMap ++ acc
}
nextHelper(newAcc, t, h)
case Nil => acc
}
val nextRSquares: Map[String, Int] = {
val rSquaresIndex =
squares.filter(x => x._1.startsWith("R")).map(_._2).toList.sorted
nextHelper(Map.empty[String, Int], rSquaresIndex, rSquaresIndex.last)
}
val nextUSquares: Map[String, Int] = {
val uSquareIndex =
squares.filter(x => x._1.startsWith("U")).map(_._2).toList.sorted
nextHelper(Map.empty[String, Int], uSquareIndex, uSquareIndex.last)
}
def limit(squareNumber: Int): Int =
(squareNumber + names.size) % names.size
def nextSquare(square: Int, roll: Int): Vector[Double] = {
val newSquare: Int = (square + roll) % names.size
val nexts: Array[Double] = {
val result = Array.fill(names.size)(0D)
val p = 0.0625D // 1 / 16
names(newSquare) match {
case "G2J" =>
result(squares("JAIL")) = 1D
case "CC1" | "CC2" | "CC3" =>
result(squares("JAIL")) = p
result(squares("GO")) = p
result(newSquare) = p * 14
// CH3 go back 3 quares will be CC3, need to be dealed differently.
case "CH1" | "CH2" =>
val squareList =
List(squares("GO"), squares("JAIL"), squares("C1"), squares("E3"),
squares("H2"), squares("R1"), nextRSquares(names(newSquare)),
nextRSquares(names(newSquare)), nextUSquares(names(newSquare)),
limit(newSquare - 3))
for (index <- squareList) {
result(index) += p
}
result(newSquare) += 6 * p
case "CH3" =>
val squareList =
List(squares("GO"), squares("JAIL"), squares("C1"), squares("E3"),
squares("H2"), squares("R1"), nextRSquares(names(newSquare)),
nextRSquares(names(newSquare)), nextUSquares(names(newSquare))
)
for (index <- squareList) {
result(index) += p
}
result(newSquare) += p * 6
result(squares("GO")) += p * p
result(squares("JAIL")) += p * p
result(limit(newSquare - 3)) += p * 14 * p
case _ =>
result(newSquare) = 1D
}
result
}
names.indices.toVector.map( next =>
nexts(next) / (dice * dice)
)
}
// P is the Markov matrix
val P: Vector[Vector[Double]] = {
val zeroVector = Vector.fill(names.size)(0D)
val rolls = for {
dice1 <- 1 to dice
dice2 <- 1 to dice
} yield dice1 + dice2
def row(i: Int): Vector[Double] =
rolls.toList.foldLeft(zeroVector) { (vec, roll) =>
(vec, nextSquare(i, roll)).zipped.map(_ + _)
}
names.indices.toVector.map(row(_))
}
def matrixMul(m1: Vector[Vector[Double]],
m2: Vector[Vector[Double]]) = {
val size = m1.size
val result = Array.fill(size)(Array.fill(size)(0D))
for {
i <- 0 until size
j <- 0 until size
k <- 0 until size
} result(i)(j) += m1(i)(k) * m2(k)(j)
result.map(_.toVector).toVector
}
// as k approaches infinity, all rows of P^k approach the stationary probability vector.
// trial and error says k=150 gives accuracy to at least 3 decimal places
val stationaryProbabilityVector =
Iterator.iterate(P)(matrixMul(_, P)).drop(iterator).next.head.map(_.toDouble)
val probabs = (stationaryProbabilityVector
.zipWithIndex.sortBy(-_._1).map(x => names(x._2) -> x._1))
// println(probabs, probabs.map(_._2).sum)
stationaryProbabilityVector
.zipWithIndex.sortBy(-_._1).take(k).map(x => names(x._2))
}
def main(args: Array[String]) {
val Array(n, k) = readLine.split(" ").map(_.toInt)
// 50 iteration is enough to get the precision we want.
val topNames = markovChainSolver(n, k, 50)
println(topNames.mkString(" "))
}
}
| advancedxy/hackerrank | project-euler/problem-84/MonopolyOdds.scala | Scala | mit | 5,037 |
package io.hydrosphere.mist.worker.runners
import java.io.File
import java.net.URL
import io.hydrosphere.mist.core.CommonData.RunJobRequest
import io.hydrosphere.mist.job.FunctionInstanceLoader
import io.hydrosphere.mist.utils.EitherOps._
import io.hydrosphere.mist.utils.{Err, Succ}
import io.hydrosphere.mist.worker.{MistScContext, SparkArtifact}
import mist.api.{FnContext, RuntimeJobInfo}
import mist.api.data.JsData
import org.apache.spark.util.SparkClassLoader
class ScalaRunner(artifact: SparkArtifact) extends JobRunner {
override def run(
request: RunJobRequest,
context: MistScContext):Either[Throwable, JsData] = {
val params = request.params
import params._
context.addJar(artifact)
val loader = prepareClassloader(artifact.local)
val jobsLoader = new FunctionInstanceLoader(loader)
val instance = jobsLoader.loadFnInstance(className, action) match {
case Succ(i) => Right(i)
case Err(ex) => Left(ex)
}
for {
inst <- instance
ctx = FnContext(context.sc, params.arguments, context.streamingDuration, RuntimeJobInfo(request.id, context.namespace))
result <- inst.run(ctx)
} yield result
}
// see #204, #220
private def prepareClassloader(file: File): ClassLoader = {
val existing = this.getClass.getClassLoader
val url = file.toURI.toURL
val patched = SparkClassLoader.withURLs(existing, url)
Thread.currentThread().setContextClassLoader(patched)
patched
}
}
| Hydrospheredata/mist | mist/worker/src/main/scala/io/hydrosphere/mist/worker/runners/ScalaRunner.scala | Scala | apache-2.0 | 1,497 |
package at.bioinform.tools.indexer
import java.io.File
import java.net.URI
case class Config(
fastaFile: File = new File("~/test.fa"),
clusterUrl: URI = new URI("http://localhost:9000"))
| peri4n/bIO | tools/src/main/scala/at/bioinform/tools/indexer/Config.scala | Scala | apache-2.0 | 227 |
package com.seanshubin.utility.json
case class SampleForMarshallingArrayOfLong(values: Seq[Long])
| SeanShubin/utility | json/src/test/scala/com/seanshubin/utility/json/SampleForMarshallingArrayOfLong.scala | Scala | unlicense | 99 |
package ignition.core.jobs
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.joda.time.{DateTime, DateTimeZone}
import org.slf4j.{Logger, LoggerFactory}
import scala.concurrent.Future
object CoreJobRunner {
val logger: Logger = LoggerFactory.getLogger(getClass)
case class RunnerContext(sparkContext: SparkContext,
sparkSession: SparkSession,
config: RunnerConfig)
// Used to provide contextual logging
def setLoggingContextValues(config: RunnerConfig): Unit = {
try { // yes, this may fail but we don't want everything to shut down
org.slf4j.MDC.put("setupName", config.setupName)
org.slf4j.MDC.put("tag", config.tag)
org.slf4j.MDC.put("user", config.user)
} catch {
case e: Throwable =>
// cry
}
}
case class RunnerConfig(setupName: String = "nosetup",
date: DateTime = DateTime.now.withZone(DateTimeZone.UTC),
tag: String = "notag",
user: String = "nouser",
master: String = "local[*]",
executorMemory: String = "2G",
extraArgs: Map[String, String] = Map.empty)
def runJobSetup(args: Array[String], jobsSetups: Map[String, (CoreJobRunner.RunnerContext => Unit, Map[String, String])], defaultSparkConfMap: Map[String, String]) {
val parser = new scopt.OptionParser[RunnerConfig]("Runner") {
help("help") text("prints this usage text")
arg[String]("<setup-name>") required() action { (x, c) =>
c.copy(setupName = x)
} text(s"one of ${jobsSetups.keySet}")
// Note: we use runner-option name because when passing args to spark-submit we need to avoid name conflicts
opt[String]('d', "runner-date") action { (x, c) =>
c.copy(date = new DateTime(x))
}
opt[String]('t', "runner-tag") action { (x, c) =>
c.copy(tag = x)
}
opt[String]('u', "runner-user") action { (x, c) =>
c.copy(user = x)
}
opt[String]('m', "runner-master") action { (x, c) =>
c.copy(master = x)
}
opt[String]('e', "runner-executor-memory") action { (x, c) =>
c.copy(executorMemory = x)
}
opt[(String, String)]('w', "runner-extra") unbounded() action { (x, c) =>
c.copy(extraArgs = c.extraArgs ++ Map(x))
}
}
parser.parse(args, RunnerConfig()) map { config =>
val setup = jobsSetups.get(config.setupName)
require(setup.isDefined,
s"Invalid job setup ${config.setupName}, available jobs setups: ${jobsSetups.keySet}")
val Some((jobSetup, jobConf)) = setup
val appName = s"${config.setupName}.${config.tag}"
val builder = SparkSession.builder
builder.config("spark.executor.memory", config.executorMemory)
builder.config("spark.eventLog.dir", "file:///media/tmp/spark-events")
builder.master(config.master)
builder.appName(appName)
builder.config("spark.hadoop.mapred.output.committer.class", classOf[DirectOutputCommitter].getName())
defaultSparkConfMap.foreach { case (k, v) => builder.config(k, v) }
jobConf.foreach { case (k, v) => builder.config(k, v) }
// Add logging context to driver
setLoggingContextValues(config)
try {
builder.enableHiveSupport()
} catch {
case t: Throwable => logger.warn("Failed to enable HIVE support", t)
}
val session = builder.getOrCreate()
val sc = session.sparkContext
// Also try to propagate logging context to workers
// TODO: find a more efficient and bullet-proof way
val configBroadCast = sc.broadcast(config)
sc.parallelize(Range(1, 2000), numSlices = 2000).foreachPartition(_ => setLoggingContextValues(configBroadCast.value))
val context = RunnerContext(sc, session, config)
try {
jobSetup.apply(context)
} catch {
case t: Throwable =>
t.printStackTrace()
System.exit(1) // force exit of all threads
}
import scala.concurrent.ExecutionContext.Implicits.global
Future {
// If everything is fine, the system will shut down without the help of this thread and YARN will report success
// But sometimes it gets stuck, then it's necessary to use the force, but this may finish the job as failed on YARN
Thread.sleep(30 * 1000)
System.exit(0) // force exit of all threads
}
}
}
}
| chaordic/ignition-core | src/main/scala/ignition/core/jobs/CoreJobRunner.scala | Scala | mit | 4,558 |
import sbt._
import Keys._
object B extends Build {
lazy val root =
Project("root", file("."))
.configs(FunTest)
.settings(inConfig(FunTest)(Defaults.testTasks) : _*)
.settings(
libraryDependencies += specs,
testOptions in Test := Seq(Tests.Filter(unitFilter)),
testOptions in FunTest := Seq(Tests.Filter(itFilter))
)
def itFilter(name: String): Boolean = (name endsWith "IntegrationSpec")
def unitFilter(name: String): Boolean = (name endsWith "Spec") && !itFilter(name)
lazy val FunTest = config("fun") extend(Test)
lazy val specs = "org.specs2" %% "specs2" % "2.0" % "test"
}
| fernandoacorreia/so23160453 | project/Build.scala | Scala | mit | 644 |
object test {
def foo[A] = 0
def foo[A] = foo[A]
}
| yusuke2255/dotty | tests/untried/neg/t649.scala | Scala | bsd-3-clause | 55 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.optimize
import cc.factorie.model.{WeightsMap, WeightsSet}
import cc.factorie.util.FastLogging
/**
* A conjugate gradient optimizer. Should not be used unless you know you want it because LBFGS is often better.
* @param initialStepSize The initial step size. Not too important because line search is performed.
* @author Andrew McCallum, Alexandre Passos
*/
class ConjugateGradient(val initialStepSize: Double = 1.0) extends GradientOptimizer with FastLogging {
private var _isConverged = false
def isConverged = _isConverged
var tolerance = 0.0001
var gradientTolerance = 0.001
var maxIterations = 1000
val eps = 1.0e-10 // a small number to rectify the special case of converging to exactly zero function value
// The state of a conjugate gradient search
//var fp = 0.0
var oldValue = 0.0
var gg = 0.0
var gam = 0.0
var dgg = 0.0
var stepSize = 0.0
var xi: WeightsMap = null
var g: WeightsMap = null
var h: WeightsMap = null
var iterations = 0
var lineOptimizer: BackTrackLineOptimizer = null
def reset(): Unit = {
xi = null
_isConverged = false
}
def initializeWeights(weights: WeightsSet): Unit = { }
def finalizeWeights(weights: WeightsSet): Unit = { }
def step(weights:WeightsSet, gradient:WeightsMap, value:Double): Unit = {
if (_isConverged) return
// If this is our first time in, then initialize
if (xi eq null) {
xi = gradient.copy
g = xi.copy
h = xi.copy
stepSize = initialStepSize
}
// Take a step in the current search direction, xi
if (lineOptimizer eq null) lineOptimizer = new BackTrackLineOptimizer(gradient, xi.copy, stepSize)
lineOptimizer.step(weights, xi, value)
// If the lineOptimizer has not yet converged, then don't yet do any of the ConjugateGradient-specific things below
if (lineOptimizer.isConverged){
lineOptimizer = null // So we create a new one next time around
xi = gradient.copy
// This termination provided by "Numeric Recipes in C".
if (2.0 * math.abs(value - oldValue) <= tolerance * (math.abs(value) + math.abs(oldValue) + eps)) {
logger.info("ConjugateGradient converged: old value="+oldValue+" new value="+value+" tolerance="+tolerance)
_isConverged = true
return
}
// This termination provided by McCallum
if (xi.twoNorm < gradientTolerance) {
logger.info("ConjugateGradient converged: maximum gradient component: "+xi.twoNorm+" less than "+tolerance)
_isConverged = true
return
}
oldValue = value
// compute gamma, new g and new h
{
dgg = 0.0
gg = 0.0
val xia = xi.toArray
val ga = g.toArray
var i = 0
while (i < ga.length) {
gg += ga(i) * ga(i) // previous gradient
dgg += xia(i) * (xia(i) - ga(i)) // current gradient
i += 1
}
gam = dgg / gg
g.keys.foreach(k => g(k) := xi(k))
h.keys.foreach(k => h(k) *= gam)
h += g
assert(!h.containsNaN())
}
/* gdruck: If using the BackTrackLineSearch, then the search stops whenever
a step is found that increases the value significantly (according
to a threshold from Numerical Recipes). ConjugateGradient
assumes that line maximization finds something close
to the maximum in that direction. In tests, sometimes the
direction suggested by CG points downhill. Consequently, here I am
setting the search direction to the gradient if the slope is
negative or 0. */
// TODO Implement GradientBracketLineMaximizer (used in Numerical Recipes) which should avoid this problem!
if (xi.dot(h) > 0) xi := h else h := xi
iterations += 1
lineOptimizer = new BackTrackLineOptimizer(gradient, xi.copy, stepSize)
lineOptimizer.step(weights, xi, value)
}
}
} | patverga/factorie | src/main/scala/cc/factorie/optimize/ConjugateGradient.scala | Scala | apache-2.0 | 4,604 |
case class Atbash() {
def encode(s: String): String =
s.foldLeft("")((acc, c) => acc + substitute(c)).grouped(5).mkString(" ")
private def substitute(c: Char) =
if (c.isDigit) c.toString
else if (c.isLetter) ('a' + ('z' - c.toLower)).toChar.toString
else ""
}
| nlochschmidt/xscala | atbash-cipher/example.scala | Scala | mit | 281 |
/*
* Copyright 2014 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.lazyseq
/**
* An empty resource reader
*/
private object EmptyLazySeq extends LazySeq[Nothing] {
final def foreach[U](f: Nothing => U) {}
} | frugalmechanic/fm-lazyseq | src/main/scala/fm/lazyseq/EmptyLazySeq.scala | Scala | apache-2.0 | 782 |
package org.scalajs.jsenv
import scala.annotation.tailrec
import org.scalajs.core.tools.io._
import java.io.File
/** A helper class to temporarily store virtual files to the filesystem.
*
* Can be used with tools that require real files.
* @param singleDir if true, forces files to be copied into
* [[cacheDir]]. Useful to setup include directories for
* example.
*/
final class VirtualFileMaterializer(singleDir: Boolean = false) {
import VirtualFileMaterializer._
val cacheDir = {
val dir = createTempDir()
dir.deleteOnExit()
dir
}
/** Create a target file to write/copy to. Will also call
* deleteOnExit on the file.
*/
private def trgFile(name: String): File = {
val f = new File(cacheDir, name)
f.deleteOnExit()
f
}
def materialize(vf: VirtualTextFile): File = vf match {
case vf: FileVirtualFile if !singleDir =>
vf.file
case _ =>
val trg = trgFile(vf.name)
IO.copyTo(vf, WritableFileVirtualTextFile(trg))
trg
}
def materialize(vf: VirtualBinaryFile): File = vf match {
case vf: FileVirtualFile if !singleDir =>
vf.file
case _ =>
val trg = trgFile(vf.name)
IO.copyTo(vf, WritableFileVirtualBinaryFile(trg))
trg
}
/** Removes the cache directory. Any operation on this
* VirtualFileMaterializer is invalid after [[close]] has been
* called.
*/
def close(): Unit = {
cacheDir.listFiles().foreach(_.delete)
cacheDir.delete()
}
// scalastyle:off line.size.limit
/* Taken from Guava:
* https://github.com/google/guava/blob/1c285fc8d289c43b46aa55e7f90ec0359be5b69a/guava/src/com/google/common/io/Files.java#L413-L426
*/
// scalastyle:on line.size.limit
private def createTempDir(): File = {
val baseDir = new File(System.getProperty("java.io.tmpdir"))
val baseName = System.currentTimeMillis() + "-"
@tailrec
def loop(tries: Int): File = {
val tempDir = new File(baseDir, baseName + tries)
if (tempDir.mkdir())
tempDir
else if (tries < TempDirAttempts)
loop(tries + 1)
else {
throw new IllegalStateException("Failed to create directory within " +
s"$TempDirAttempts attempts (tried ${baseName}0 to " +
s"${baseName}${TempDirAttempts - 1})")
}
}
loop(0)
}
}
object VirtualFileMaterializer {
private final val TempDirAttempts = 10000
}
| jmnarloch/scala-js | js-envs/src/main/scala/org/scalajs/jsenv/VirtualFileMaterializer.scala | Scala | bsd-3-clause | 2,426 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.