code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pagespecs
import langswitch.Languages
import testsupport.ItSpec
import testsupport.stubs._
import testsupport.testdata.TdAll
import timetopaytaxpayer.cor.model.SaUtr
import uk.gov.hmrc.auth.core.Enrolment
class YouNeedToRequestAccessToSelfAssessmentPageSpec extends ItSpec {
def begin(
utr: Option[SaUtr] = Some(TdAll.saUtr),
allEnrolments: Option[Set[Enrolment]] = Some(Set(TdAll.saEnrolment))
): Unit = {
startPage.open()
startPage.assertPageIsDisplayed()
AuthStub.authorise(utr, allEnrolments)
()
}
private case class Scenario(
allEnrolments: Option[Set[Enrolment]],
maybeSaUtr: Option[SaUtr],
caseName: String = ""
)
def begin(): Unit = {
val s = requestSaScenarios.head
begin(s.maybeSaUtr, s.allEnrolments)
}
def startNowAndAssertRequestToSA(): Unit = {
startPage.clickOnStartNowButton()
youNeedToRequestAccessToSelfAssessment.assertPageIsDisplayed()
}
private val requestSaScenarios = List(
Scenario(TdAll.saEnrolment, None, "no UTR found"),
Scenario(None, TdAll.saUtr, "no SA enrolment"),
Scenario(None, None, "no SA enrolment nor UTR"),
Scenario(TdAll.unactivatedSaEnrolment, TdAll.saUtr, "no active SA enrolment")
)
"language" in {
begin()
startNowAndAssertRequestToSA()
youNeedToRequestAccessToSelfAssessment.clickOnWelshLink()
youNeedToRequestAccessToSelfAssessment.assertPageIsDisplayed(Languages.Welsh)
youNeedToRequestAccessToSelfAssessment.clickOnEnglishLink()
youNeedToRequestAccessToSelfAssessment.assertPageIsDisplayed(Languages.English)
}
"back button" in {
begin()
startNowAndAssertRequestToSA()
youNeedToRequestAccessToSelfAssessment.backButtonHref.value shouldBe s"${baseUrl.value}${startPage.path}"
}
"take the user to request page" in {
requestSaScenarios.foreach { s =>
begin(s.maybeSaUtr, s.allEnrolments)
startNowAndAssertRequestToSA()
}
}
"click on the call to action and navigate to PTA" in {
requestSaScenarios.foreach { s =>
begin(s.maybeSaUtr, s.allEnrolments)
startNowAndAssertRequestToSA()
AddTaxesFeStub.enrolForSaStub(s.maybeSaUtr)
AddTaxesFeStub.enrolForSaStubbedPage()
youNeedToRequestAccessToSelfAssessment.clickTheButton()
enrolForSaPage.assertPageIsDisplayed()
}
}
"click on the call to action and navigate call us page if auth sends no credentials/providerId" in {
startPage.open()
startPage.assertPageIsDisplayed()
AuthStub.authorise(allEnrolments = None, credentials = None)
startPage.clickOnStartNowButton()
youNeedToRequestAccessToSelfAssessment.assertPageIsDisplayed()
youNeedToRequestAccessToSelfAssessment.clickTheButton()
notEnrolledPage.assertPageIsDisplayed()
}
private implicit def toOption[T](t: T): Option[T] = Some(t)
private implicit def toSet[T](t: T): Set[T] = Set(t)
private implicit def toOptionSet[T](t: T): Option[Set[T]] = Some(Set(t))
}
|
hmrc/self-service-time-to-pay-frontend
|
test/pagespecs/YouNeedToRequestAccessToSelfAssessmentPageSpec.scala
|
Scala
|
apache-2.0
| 3,632
|
import scala.io.Source
import scala.util.control.Breaks._
object IODemo{
def main(args: Array[String]){
val fileName="./test.txt"
readAndPrint(fileName) //读文件
/*单个字符处理
val source = Source.fromFile(fileName)
for(c <- source)
println(c)
*/
/* 读取url
val source1 = Source.fromURL("http://www.baidu.com")
val lineIterator1 = source1.getLines()
for(l <- lineIterator1){
println(l)
}
source1.close()
*/
/*
val source2 = Source.fromString("Hello, World!")
val lineIterator2 = source2.getLines()
for(l <- lineIterator2){
println(l)
}
source2.close()
*/
val source3 = Source.stdin //从标准输入读取
breakable{
while(source3.hasNext){ //一个个字符判断
val s3in = source3.next
if(s3in == 'q'){
break;
}else{
println(s3in)
}
}
}
}
def readAndPrint(fileName: String){
val source = Source.fromFile(fileName)
val lineIterator = source.getLines()
for(l <- lineIterator){
println(l)
}
source.close()
}
}
|
PengLiangWang/Scala
|
IO/IODemo.scala
|
Scala
|
gpl-3.0
| 1,406
|
package im.actor.server.stickers
import im.actor.serialization.ActorSerializer
import im.actor.server.sticker.{ Sticker, StickerImage }
object StickerMessages {
def register(): Unit =
ActorSerializer.register(
100001 → classOf[Sticker],
100002 → classOf[StickerImage]
)
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-core/src/main/scala/im/actor/server/stickers/StickerMessages.scala
|
Scala
|
agpl-3.0
| 300
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.crypto
import java.security.SecureRandom
import org.apache.commons.codec.binary.Base64
import org.scalatest.{Matchers, OptionValues, WordSpecLike}
import play.api.test.FakeApplication
import play.api.test.Helpers._
class CryptoGCMWithKeysFromConfigSpec extends WordSpecLike with Matchers with OptionValues {
private val keybytes = new Array[Byte](16 * 2)
private val previousKeybytes1 = new Array[Byte](16 * 2)
private val previousKeybytes2 = new Array[Byte](16 * 2)
val rand = new SecureRandom()
rand.nextBytes(keybytes)
rand.nextBytes(previousKeybytes1)
rand.nextBytes(previousKeybytes2)
private val baseConfigKey = "crypto.spec"
private object CurrentKey {
val configKey = baseConfigKey + ".key"
val encryptionKey = Base64.encodeBase64String(keybytes)
val plainMessage = PlainText("this is my message")
val plainByteMessage = PlainBytes("this is a bunch of bytes".getBytes)
val plainByteMessageResponse = PlainText("this is a bunch of bytes")
}
private object PreviousKey1 {
val encryptionKey = Base64.encodeBase64String(previousKeybytes1)
val plainMessage = PlainText("this is the first plain message")
val plainByteMessage = PlainBytes("this is the first bunch of bytes".getBytes)
val plainByteMessageResponse = PlainText("this is the first bunch of bytes")
}
private object PreviousKey2 {
val encryptionKey = Base64.encodeBase64String(previousKeybytes2)
val plainMessage = PlainText("this is the second plain message")
val plainByteMessage = PlainBytes("this is the second bunch of bytes".getBytes)
val plainByteMessageResponse = PlainText("this is the second bunch of bytes")
}
private object PreviousKeys {
val configKey = baseConfigKey + ".previousKeys"
val encryptionKeys = Seq(PreviousKey1.encryptionKey, PreviousKey2.encryptionKey)
}
"Constructing a CompositeCryptoWithKeysFromConfig with a current key, but no previous keys configured" should {
val fakeApplicationWithCurrentKeyOnly = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey
))
"return a properly initialised, functional AuthenticatedEncryption object that works with the current key only" in running(fakeApplicationWithCurrentKeyOnly) {
val crypto = CryptoGCMWithKeysFromConfig(baseConfigKey)
crypto.decrypt(crypto.encrypt(CurrentKey.plainMessage)) shouldBe CurrentKey.plainMessage
crypto.decrypt(crypto.encrypt(CurrentKey.plainByteMessage)) shouldBe CurrentKey.plainByteMessageResponse
val previousKey1Crypto = CompositeSymmetricCrypto.aesGCM(PreviousKey1.encryptionKey, Seq.empty)
val encryptedWithPreviousKey1 = crypto.encrypt(PreviousKey1.plainMessage, previousKey1Crypto)
intercept[SecurityException] {
crypto.decrypt(encryptedWithPreviousKey1)
}
}
}
"Constructing a CryptoGCMWithKeysFromConfig with a current key and empty previous keys" should {
val fakeApplicationWithEmptyPreviousKeys = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey,
PreviousKeys.configKey -> List.empty)
)
"return a properly initialised, functional AuthenticatedEncryption object that works with the current key only" in running(fakeApplicationWithEmptyPreviousKeys) {
val crypto = CryptoGCMWithKeysFromConfig(baseConfigKey)
crypto.decrypt(crypto.encrypt(CurrentKey.plainMessage)) shouldBe CurrentKey.plainMessage
crypto.decrypt(crypto.encrypt(CurrentKey.plainByteMessage)) shouldBe CurrentKey.plainByteMessageResponse
val previousKey1Crypto = CompositeSymmetricCrypto.aesGCM(PreviousKey1.encryptionKey, Seq.empty)
val encryptedWithPreviousKey1 = crypto.encrypt(PreviousKey1.plainMessage, previousKey1Crypto)
intercept[SecurityException] {
crypto.decrypt(encryptedWithPreviousKey1)
}
}
}
"Constructing a CompositeCryptoWithKeysFromConfig with both current and previous keys" should {
val fakeApplicationWithCurrentAndPreviousKeys = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey,
PreviousKeys.configKey -> PreviousKeys.encryptionKeys)
)
"allows decrypting payloads that were encrypted using previous keys" in running(fakeApplicationWithCurrentAndPreviousKeys) {
val crypto = CryptoGCMWithKeysFromConfig(baseConfigKey)
val previousKey1Crypto = CompositeSymmetricCrypto.aesGCM(PreviousKey1.encryptionKey, Seq.empty)
val encryptedWithPreviousKey1 = crypto.encrypt(PreviousKey1.plainMessage, previousKey1Crypto)
val encryptedBytesWithPreviousKey1 = crypto.encrypt(PreviousKey1.plainByteMessage, previousKey1Crypto)
crypto.decrypt(encryptedWithPreviousKey1) shouldBe PreviousKey1.plainMessage
crypto.decrypt(encryptedBytesWithPreviousKey1) shouldBe PreviousKey1.plainByteMessageResponse
val previousKey2Crypto = CompositeSymmetricCrypto.aesGCM(PreviousKey2.encryptionKey, Seq.empty)
val encryptedWithPreviousKey2 = crypto.encrypt(PreviousKey2.plainMessage, previousKey2Crypto)
val encryptedBytesWithPreviousKey2 = crypto.encrypt(PreviousKey2.plainByteMessage, previousKey2Crypto)
crypto.decrypt(encryptedWithPreviousKey2) shouldBe PreviousKey2.plainMessage
crypto.decrypt(encryptedBytesWithPreviousKey2) shouldBe PreviousKey2.plainByteMessageResponse
}
}
"Constructing a CompositeCryptoWithKeysFromConfig without current or previous keys" should {
val fakeApplicationWithoutAnyKeys = FakeApplication()
"throw a SecurityException on construction" in running(fakeApplicationWithoutAnyKeys) {
intercept[SecurityException]{
CryptoGCMWithKeysFromConfig(baseConfigKey)
}
}
}
"Constructing a CompositeCryptoWithKeysFromConfig without a current key, but with previous keys" should {
val fakeApplicationWithPreviousKeysOnly = FakeApplication(additionalConfiguration = Map(
PreviousKeys.configKey -> PreviousKeys.encryptionKeys
))
"throw a SecurityException on construction" in running(fakeApplicationWithPreviousKeysOnly) {
intercept[SecurityException]{
CryptoGCMWithKeysFromConfig(baseConfigKey)
}
}
}
"Constructing a CryptoGCMWithKeysFromConfig with an invalid key" should {
val keyWithInvalidNumberOfBits = "ZGVmZ2hpamtsbW4K"
val keyWithInvalidKeySize = "defgh£jklmn"
val fakeApplicationWithShortCurrentKey = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> keyWithInvalidNumberOfBits,
PreviousKeys.configKey -> PreviousKeys.encryptionKeys
))
val fakeApplicationWithInvalidKeySize = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> keyWithInvalidKeySize,
PreviousKeys.configKey -> PreviousKeys.encryptionKeys
))
val fakeApplicationWithShortFirstPreviousKey = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey,
PreviousKeys.configKey -> Seq(keyWithInvalidNumberOfBits, PreviousKey1.encryptionKey, PreviousKey2.encryptionKey)
))
val fakeApplicationWithInvalidBase64FirstPreviousKey = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey,
PreviousKeys.configKey -> Seq(keyWithInvalidKeySize, PreviousKey1.encryptionKey, PreviousKey2.encryptionKey)
))
val fakeApplicationWithShortOtherPreviousKey = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey,
PreviousKeys.configKey -> Seq(PreviousKey1.encryptionKey, keyWithInvalidNumberOfBits, PreviousKey2.encryptionKey)
))
val fakeApplicationWithInvalidBase64OtherPreviousKey = FakeApplication(additionalConfiguration = Map(
CurrentKey.configKey -> CurrentKey.encryptionKey,
PreviousKeys.configKey -> Seq(PreviousKey1.encryptionKey, keyWithInvalidKeySize, PreviousKey2.encryptionKey)
))
"throw a SecurityException if the current key is too short" in running(fakeApplicationWithShortCurrentKey) {
intercept[SecurityException]{
CryptoGCMWithKeysFromConfig(baseConfigKey)
}
}
"throw a SecurityException if the current key length is not 128 bits" in running(fakeApplicationWithInvalidKeySize) {
intercept[SecurityException]{
CryptoGCMWithKeysFromConfig(baseConfigKey)
}
}
"throw a SecurityException if the first previous key is too short" in running(fakeApplicationWithShortFirstPreviousKey) {
intercept[SecurityException]{
CryptoWithKeysFromConfig(baseConfigKey)
}
}
"throw a SecurityException if the first previous key cannot be base 64 decoded" in running(fakeApplicationWithInvalidBase64FirstPreviousKey) {
intercept[SecurityException]{
CryptoWithKeysFromConfig(baseConfigKey)
}
}
"throw a SecurityException if the other previous key is too short" in running(fakeApplicationWithShortOtherPreviousKey) {
intercept[SecurityException]{
CryptoWithKeysFromConfig(baseConfigKey)
}
}
"throw a SecurityException if the other previous key cannot be base 64 decoded" in running(fakeApplicationWithInvalidBase64OtherPreviousKey) {
intercept[SecurityException]{
CryptoWithKeysFromConfig(baseConfigKey)
}
}
}
}
|
scottcutts/crypto
|
src/test/scala/uk/gov/hmrc/crypto/CryptoGCMWithKeysFromConfigSpec.scala
|
Scala
|
apache-2.0
| 10,011
|
package spire
package std
import spire.algebra.{EuclideanRing, IsIntegral, NRoot, Order, Signed}
import spire.math.BitString
trait ByteIsEuclideanRing extends EuclideanRing[Byte] {
override def minus(a:Byte, b:Byte): Byte = (a - b).toByte
def negate(a:Byte): Byte = (-a).toByte
def one: Byte = 1.toByte
def plus(a:Byte, b:Byte): Byte = (a + b).toByte
override def pow(a: Byte, b:Int): Byte = Math.pow(a, b).toByte
override def times(a:Byte, b:Byte): Byte = (a * b).toByte
def zero: Byte = 0.toByte
override def fromInt(n: Int): Byte = n.toByte
def quot(a: Byte, b: Byte): Byte = (a / b).toByte
def mod(a: Byte, b: Byte): Byte = (a % b).toByte
def gcd(a: Byte, b: Byte): Byte = spire.math.gcd(a, b).toByte
}
// Not included in Instances trait.
trait ByteIsNRoot extends NRoot[Byte] {
def nroot(x: Byte, n: Int): Byte = {
def findnroot(prev: Int, add: Int): Byte = {
val next = prev | add
val e = Math.pow(next, n)
if (e == x || add == 0) {
next.toByte
} else if (e <= 0 || e > x) {
findnroot(prev, add >> 1)
} else {
findnroot(next, add >> 1)
}
}
findnroot(0, 1 << ((33 - n) / n))
}
def log(a: Byte): Byte = Math.log(a.toDouble).toByte
def fpow(a: Byte, b: Byte): Byte = Math.pow(a, b).toByte
}
trait ByteIsSigned extends Signed[Byte] {
def signum(a: Byte): Int = a
def abs(a: Byte): Byte = (if (a < 0) -a else a).toByte
}
trait ByteOrder extends Order[Byte] {
override def eqv(x:Byte, y:Byte): Boolean = x == y
override def neqv(x:Byte, y:Byte): Boolean = x != y
override def gt(x: Byte, y: Byte): Boolean = x > y
override def gteqv(x: Byte, y: Byte): Boolean = x >= y
override def lt(x: Byte, y: Byte): Boolean = x < y
override def lteqv(x: Byte, y: Byte): Boolean = x <= y
def compare(x: Byte, y: Byte): Int = java.lang.Integer.signum((x: Int) - (y: Int))
}
trait ByteIsReal extends IsIntegral[Byte] with ByteOrder with ByteIsSigned {
def toDouble(n: Byte): Double = n.toDouble
def toBigInt(n: Byte): BigInt = BigInt(n)
}
@SerialVersionUID(0L)
class ByteIsBitString extends BitString[Byte] with Serializable {
def one: Byte = (-1: Byte)
def zero: Byte = (0: Byte)
def and(a: Byte, b: Byte): Byte = (a & b).toByte
def or(a: Byte, b: Byte): Byte = (a | b).toByte
def complement(a: Byte): Byte = (~a).toByte
override def xor(a: Byte, b: Byte): Byte = (a ^ b).toByte
def signed: Boolean = true
def width: Int = 8
def toHexString(n: Byte): String = Integer.toHexString(n & 0xff)
def bitCount(n: Byte): Int = Integer.bitCount(n & 0xff)
def highestOneBit(n: Byte): Byte = (Integer.highestOneBit(n & 0xff) & 0xff).toByte
def lowestOneBit(n: Byte): Byte = (Integer.lowestOneBit(n & 0xff) & 0xff).toByte
def numberOfLeadingZeros(n: Byte): Int = Integer.numberOfLeadingZeros(n & 0xff) - 24
def numberOfTrailingZeros(n: Byte): Int = if (n == 0) 8 else Integer.numberOfTrailingZeros(n & 0xff)
def leftShift(n: Byte, i: Int): Byte = (((n & 0xff) << (i & 7)) & 0xff).toByte
def rightShift(n: Byte, i: Int): Byte = (((n & 0xff) >>> (i & 7)) & 0xff).toByte
def signedRightShift(n: Byte, i: Int): Byte = ((n >> (i & 7)) & 0xff).toByte
def rotateLeft(n: Byte, i: Int): Byte = {
val j = i & 7
((((n & 0xff) << j) | ((n & 0xff) >>> (8 - j))) & 0xff).toByte
}
def rotateRight(n: Byte, i: Int): Byte = {
val j = i & 7
((((n & 0xff) >>> j) | ((n & 0xff) << (8 - j))) & 0xff).toByte
}
}
@SerialVersionUID(0L)
class ByteAlgebra extends ByteIsEuclideanRing with ByteIsReal with Serializable
trait ByteInstances {
implicit final val ByteBitString = new ByteIsBitString
implicit final val ByteAlgebra = new ByteAlgebra
import spire.math.NumberTag._
implicit final val ByteTag = new BuiltinIntTag[Byte](0, Byte.MinValue, Byte.MaxValue)
}
|
tixxit/spire
|
core/shared/src/main/scala/spire/std/byte.scala
|
Scala
|
mit
| 3,817
|
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.exceptions
class UnknownLengthException ( length : Int )
extends DatabaseException( "Can't handle the length %d".format(length) )
|
outbrain/postgresql-async
|
db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnknownLengthException.scala
|
Scala
|
apache-2.0
| 807
|
package mesosphere.marathon.api.v2.json
import java.lang.{ Double => JDouble, Integer => JInt }
import com.fasterxml.jackson.annotation.{ JsonIgnoreProperties, JsonProperty }
import mesosphere.marathon.Protos.Constraint
import mesosphere.marathon.api.validation.FieldConstraints._
import mesosphere.marathon.api.validation.{ PortIndices, ValidV2AppDefinition }
import mesosphere.marathon.health.{ HealthCheck, HealthCounts }
import mesosphere.marathon.state._
import mesosphere.marathon.upgrade.DeploymentPlan
import org.apache.mesos.{ Protos => mesos }
import scala.collection.immutable.Seq
import scala.concurrent.duration._
@PortIndices
@JsonIgnoreProperties(ignoreUnknown = true)
@ValidV2AppDefinition
case class V2AppDefinition(
id: PathId = AppDefinition.DefaultId,
cmd: Option[String] = AppDefinition.DefaultCmd,
args: Option[Seq[String]] = AppDefinition.DefaultArgs,
user: Option[String] = AppDefinition.DefaultUser,
env: Map[String, String] = AppDefinition.DefaultEnv,
@FieldMin(0) instances: JInt = AppDefinition.DefaultInstances,
cpus: JDouble = AppDefinition.DefaultCpus,
mem: JDouble = AppDefinition.DefaultMem,
disk: JDouble = AppDefinition.DefaultDisk,
@FieldPattern(regexp = "^(//cmd)|(/?[^/]+(/[^/]+)*)|$") executor: String = AppDefinition.DefaultExecutor,
constraints: Set[Constraint] = AppDefinition.DefaultConstraints,
uris: Seq[String] = AppDefinition.DefaultUris,
storeUrls: Seq[String] = AppDefinition.DefaultStoreUrls,
@FieldPortsArray ports: Seq[JInt] = AppDefinition.DefaultPorts,
requirePorts: Boolean = AppDefinition.DefaultRequirePorts,
@FieldJsonProperty("backoffSeconds") backoff: FiniteDuration = AppDefinition.DefaultBackoff,
backoffFactor: JDouble = AppDefinition.DefaultBackoffFactor,
@FieldJsonProperty("maxLaunchDelaySeconds") maxLaunchDelay: FiniteDuration = AppDefinition.DefaultMaxLaunchDelay,
container: Option[Container] = AppDefinition.DefaultContainer,
healthChecks: Set[HealthCheck] = AppDefinition.DefaultHealthChecks,
dependencies: Set[PathId] = AppDefinition.DefaultDependencies,
upgradeStrategy: UpgradeStrategy = AppDefinition.DefaultUpgradeStrategy,
labels: Map[String, String] = AppDefinition.DefaultLabels,
acceptedResourceRoles: Option[Set[String]] = None,
version: Timestamp = Timestamp.now()) extends Timestamped {
assert(
portIndicesAreValid(),
"Health check port indices must address an element of the ports array or container port mappings."
)
/**
* Returns true if all health check port index values are in the range
* of ths app's ports array, or if defined, the array of container
* port mappings.
*/
def portIndicesAreValid(): Boolean =
this.toAppDefinition.portIndicesAreValid()
/**
* Returns the canonical internal representation of this API-specific
* application defintion.
*/
def toAppDefinition: AppDefinition =
AppDefinition(
id, cmd, args, user, env, instances, cpus,
mem, disk, executor, constraints, uris,
storeUrls, ports, requirePorts, backoff,
backoffFactor, maxLaunchDelay, container,
healthChecks, dependencies, upgradeStrategy,
labels, acceptedResourceRoles, version)
def withTaskCountsAndDeployments(
appTasks: Seq[EnrichedTask], healthCounts: HealthCounts,
runningDeployments: Seq[DeploymentPlan]): V2AppDefinition.WithTaskCountsAndDeployments = {
new V2AppDefinition.WithTaskCountsAndDeployments(appTasks, healthCounts, runningDeployments, this)
}
def withTasksAndDeployments(
appTasks: Seq[EnrichedTask], healthCounts: HealthCounts,
runningDeployments: Seq[DeploymentPlan]): V2AppDefinition.WithTasksAndDeployments =
new V2AppDefinition.WithTasksAndDeployments(appTasks, healthCounts, runningDeployments, this)
def withTasksAndDeploymentsAndFailures(
appTasks: Seq[EnrichedTask], healthCounts: HealthCounts,
runningDeployments: Seq[DeploymentPlan],
taskFailure: Option[TaskFailure]): V2AppDefinition.WithTasksAndDeploymentsAndTaskFailures =
new V2AppDefinition.WithTasksAndDeploymentsAndTaskFailures(
appTasks, healthCounts,
runningDeployments, taskFailure, this
)
def withCanonizedIds(base: PathId = PathId.empty): V2AppDefinition = {
val baseId = id.canonicalPath(base)
copy(id = baseId, dependencies = dependencies.map(_.canonicalPath(baseId)))
}
}
object V2AppDefinition {
def apply(app: AppDefinition): V2AppDefinition =
V2AppDefinition(
app.id, app.cmd, app.args, app.user, app.env, app.instances, app.cpus,
app.mem, app.disk, app.executor, app.constraints, app.uris,
app.storeUrls, app.ports, app.requirePorts, app.backoff,
app.backoffFactor, app.maxLaunchDelay, app.container,
app.healthChecks, app.dependencies, app.upgradeStrategy,
app.labels, app.acceptedResourceRoles, app.version)
protected[marathon] class WithTaskCountsAndDeployments(
appTasks: Seq[EnrichedTask],
healthCounts: HealthCounts,
runningDeployments: Seq[DeploymentPlan],
private val app: V2AppDefinition)
extends V2AppDefinition(
app.id, app.cmd, app.args, app.user, app.env, app.instances, app.cpus,
app.mem, app.disk, app.executor, app.constraints, app.uris,
app.storeUrls, app.ports, app.requirePorts, app.backoff,
app.backoffFactor, app.maxLaunchDelay, app.container,
app.healthChecks, app.dependencies, app.upgradeStrategy,
app.labels, app.acceptedResourceRoles, app.version) {
/**
* Snapshot of the number of staged (but not running) tasks
* for this app
*/
@JsonProperty
val tasksStaged: Int = appTasks.count { eTask =>
eTask.task.getStagedAt != 0 && eTask.task.getStartedAt == 0
}
/**
* Snapshot of the number of running tasks for this app
*/
@JsonProperty
val tasksRunning: Int = appTasks.count { eTask =>
eTask.task.hasStatus &&
eTask.task.getStatus.getState == mesos.TaskState.TASK_RUNNING
}
/**
* Snapshot of the number of healthy tasks for this app
*/
@JsonProperty
val tasksHealthy: Int = healthCounts.healthy
/**
* Snapshot of the number of unhealthy tasks for this app
*/
@JsonProperty
val tasksUnhealthy: Int = healthCounts.unhealthy
/**
* Snapshot of the running deployments that affect this app
*/
@JsonProperty
def deployments: Seq[Identifiable] = {
runningDeployments.collect {
case plan: DeploymentPlan if plan.affectedApplicationIds contains app.id => Identifiable(plan.id)
}
}
}
protected[marathon] class WithTasksAndDeployments(
appTasks: Seq[EnrichedTask], healthCounts: HealthCounts,
runningDeployments: Seq[DeploymentPlan],
private val app: V2AppDefinition)
extends WithTaskCountsAndDeployments(appTasks, healthCounts, runningDeployments, app) {
@JsonProperty
def tasks: Seq[EnrichedTask] = appTasks
}
protected[marathon] class WithTasksAndDeploymentsAndTaskFailures(
appTasks: Seq[EnrichedTask], healthCounts: HealthCounts,
runningDeployments: Seq[DeploymentPlan],
taskFailure: Option[TaskFailure],
private val app: V2AppDefinition)
extends WithTasksAndDeployments(appTasks, healthCounts, runningDeployments, app) {
@JsonProperty
def lastTaskFailure: Option[TaskFailure] = taskFailure
}
}
|
HardikDR/marathon
|
src/main/scala/mesosphere/marathon/api/v2/json/V2AppDefinition.scala
|
Scala
|
apache-2.0
| 7,452
|
package core.formatter.geo
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.databind.{ JsonSerializer, SerializerProvider }
import com.lvxingpai.model.marketplace.seller.Seller
import core.model.misc.GeoCommodity
import scala.collection.JavaConversions._
/**
* Created by pengyt on 2015/11/19.
*/
class GeoCommoditySerializer extends JsonSerializer[GeoCommodity] {
override def serialize(geo: GeoCommodity, gen: JsonGenerator, serializers: SerializerProvider): Unit = {
gen.writeStartObject()
gen.writeStringField("geoId", Option(geo.geoId.toString) getOrElse "")
gen.writeFieldName("sellers")
gen.writeStartArray()
val orderAct = serializers.findValueSerializer(classOf[Seller], null)
Option(geo.sellers) map (_.toSeq) getOrElse Seq() foreach (orderAct.serialize(_, gen, serializers))
gen.writeEndArray()
gen.writeEndObject()
}
}
|
Lvxingpai/Hanse
|
app/core/formatter/geo/GeoCommoditySerializer.scala
|
Scala
|
apache-2.0
| 903
|
/*
* This software is licensed under the GNU Affero General Public License, quoted below.
*
* This file is a part of PowerAPI.
*
* Copyright (C) 2011-2016 Inria, University of Lille 1.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI.
*
* If not, please consult http://www.gnu.org/licenses/agpl-3.0.html.
*/
package org.powerapi.module.libpfm
import java.util.UUID
import scala.collection.BitSet
import scala.concurrent.duration.DurationInt
import scala.concurrent.Await
import akka.actor.Props
import akka.pattern.gracefulStop
import akka.testkit.{EventFilter, TestActorRef}
import akka.util.Timeout
import org.powerapi.UnitTest
import org.powerapi.core.MonitorChannel.publishMonitorTick
import org.powerapi.core.target.{All, Process}
import org.powerapi.core.{MessageBus, Tick}
import org.powerapi.module.SensorChannel.{startSensor, stopSensor}
import org.powerapi.module.Sensors
import org.powerapi.module.libpfm.PerformanceCounterChannel.{PCReport, subscribePCReport}
import org.scalamock.scalatest.MockFactory
class LibpfmCoreSensorSuite extends UnitTest with MockFactory {
val timeout = Timeout(20.seconds)
val topology = Map(0 -> Set(0, 1), 1 -> Set(2, 3))
val events = Set("event", "event1")
override def afterAll() = {
system.terminate()
}
trait Bus {
val eventBus = new MessageBus
}
"A LibpfmCoreSensor" should "handle MonitorTick messages and sense HW counter values for the All target" in new Bus {
val configuration = BitSet()
val helper = mock[LibpfmHelper]
val muid = UUID.randomUUID()
val target = All
val tick1 = new Tick {
val topic = "test"
val timestamp = System.currentTimeMillis()
}
val tick2 = new Tick {
val topic = "test"
val timestamp = System.currentTimeMillis() + 1000
}
val fds = (0 until topology.values.flatten.size * events.size).iterator
val sensors = TestActorRef(Props(classOf[Sensors], eventBus), "sensors")
helper.disablePC _ expects * anyNumberOfTimes() returning true
helper.closePC _ expects * anyNumberOfTimes() returning true
helper.resetPC _ expects * repeat topology.values.flatten.size * events.size returning true
helper.enablePC _ expects * repeat topology.values.flatten.size * events.size returning true
for {
core: Int <- topology.keys
index: Int <- topology(core)
event: String <- events
} helper.configurePC _ expects(CID(index), configuration, event) returning Some(fds.next)
helper.readPC _ expects * repeat topology.values.flatten.size * events.size returning Array(1l, 1l, 1l)
EventFilter.info(occurrences = 1, start = s"sensor is started, class: ${classOf[LibpfmCoreSensor].getName}").intercept({
startSensor(muid, target, classOf[LibpfmCoreSensor], Seq(eventBus, muid, target, helper, timeout, topology, configuration, events))(eventBus)
})
subscribePCReport(muid, target)(eventBus)(testActor)
for (i <- 0 until topology.values.flatten.size * events.size) {
helper.readPC _ expects i returning Array[Long](i + 5, 2, 2)
helper.scale _ expects where {
(now: Array[Long], old: Array[Long]) => now.deep == Array[Long](i + 5, 2, 2).deep && old.deep == Array[Long](1, 1, 1).deep
} returning Some(i + 5 - 1)
}
val results = Map[(Int, String), Long]((0, "event") -> 10, (0, "event1") -> 12, (1, "event") -> 18, (1, "event1") -> 20)
publishMonitorTick(muid, All, tick1)(eventBus)
expectMsgClass(classOf[PCReport]) match {
case PCReport(_, _, All, values, _) =>
values.size should equal(topology.size)
for (value <- values) {
for ((event, counters) <- value._2) {
counters.map(_.value).sum should equal(results((value._1, event)))
}
}
case _ =>
{}
}
EventFilter.info(occurrences = 1, start = s"sensor is stopped, class: ${classOf[LibpfmCoreSensor].getName}").intercept({
stopSensor(muid)(eventBus)
})
publishMonitorTick(muid, All, tick2)(eventBus)
expectNoMsg()
EventFilter.info(occurrences = 1, start = s"sensor is started, class: ${classOf[LibpfmCoreSensor].getName}").intercept({
startSensor(muid, Process(1), classOf[LibpfmCoreSensor], Seq(eventBus, muid, Process(1), helper, timeout, topology, configuration, events))(eventBus)
})
subscribePCReport(muid, Process(1))(eventBus)(testActor)
publishMonitorTick(muid, Process(1), tick1)(eventBus)
expectNoMsg()
Await.result(gracefulStop(sensors, timeout.duration), timeout.duration)
}
}
|
Spirals-Team/powerapi
|
powerapi-core/src/test/scala/org/powerapi/module/libpfm/LibpfmCoreSensorSuite.scala
|
Scala
|
agpl-3.0
| 5,110
|
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
import sbt.util.Level
import sbt.internal.util.complete.HistoryCommands
import scala.annotation.tailrec
import java.io.File
import sbt.io.Path._
object BasicCommandStrings {
val HelpCommand = "help"
val CompletionsCommand = "completions"
val Exit = "exit"
val Quit = "quit"
/** The command name to terminate the program.*/
val TerminateAction: String = Exit
def helpBrief = (HelpCommand, s"Displays this help message or prints detailed help on requested commands (run '$HelpCommand <command>').")
def helpDetailed = HelpCommand + """
Prints a help summary.
""" + HelpCommand + """ <command>
Prints detailed help for command <command>.
""" + HelpCommand + """ <regular expression>
Searches the help according to the provided regular expression.
"""
def CompletionsDetailed = "Displays a list of completions for the given argument string (run 'completions <string>')."
def CompletionsBrief = (CompletionsCommand, CompletionsDetailed)
def HistoryHelpBrief = (HistoryCommands.Start -> "History command help. Lists and describes all history commands.")
def historyHelp = Help(Nil, (HistoryHelpBrief +: HistoryCommands.descriptions).toMap, Set(HistoryCommands.Start))
def exitBrief = "Terminates the build."
def logLevelHelp =
{
val levels = Level.values.toSeq
val levelList = levels.mkString(", ")
val brief = ("<log-level>", "Sets the logging level to 'log-level'. Valid levels: " + levelList)
val detailed = levels.map(l => (l.toString, logLevelDetail(l))).toMap
Help(brief, detailed)
}
private[this] def logLevelDetail(level: Level.Value): String =
s"""$level
Sets the global logging level to $level.
This will be used as the default level for logging from commands, settings, and tasks.
Any explicit `logLevel` configuration in a project overrides this setting.
${runEarly(level.toString)}
Sets the global logging level as described above, but does so before any other commands are executed on startup, including project loading.
This is useful as a startup option:
* it takes effect before any logging occurs
* if no other commands are passed, interactive mode is still entered
"""
def runEarly(command: String) = {
val sep = if (command.isEmpty || Character.isLetter(command.charAt(0))) "" else " "
s"$EarlyCommand$sep$command"
}
private[sbt] def isEarlyCommand(s: String): Boolean = {
s.startsWith(EarlyCommand) && s != Compat.FailureWall && s != Compat.ClearOnFailure
}
val EarlyCommand = "--"
val EarlyCommandBrief = (s"$EarlyCommand<command>", "Schedules a command to run before other commands on startup.")
val EarlyCommandDetailed =
s"""$EarlyCommand<command>
Schedules an early command, which will be run before other commands on the command line.
The order is preserved between all early commands, so `sbt --a --b` executes `a` and `b` in order.
"""
def ReadCommand = "<"
def ReadFiles = " file1 file2 ..."
def ReadDetailed =
ReadCommand + ReadFiles + """
Reads the lines from the given files and inserts them as commands.
All empty lines and lines that start with '#' are ignored.
If a file does not exist or is not readable, this command fails.
All the lines from all the files are read before any of the commands
are executed. Thus, if any file is not readable, none of commands
from any of the files (even the existing ones) will be run.
You probably need to escape this command if entering it at your shell."""
def ApplyCommand = "apply"
def ApplyDetailed =
ApplyCommand + """ [-cp|-classpath <classpath>] <module-name>*
Transforms the current State by calling <module-name>.apply(currentState) for each listed module name.
Here, currentState is of type sbt.State.
If a classpath is provided, modules are loaded from a new class loader for this classpath.
"""
def RebootCommand = "reboot"
def RebootDetailed =
RebootCommand + """ [full]
This command is equivalent to exiting sbt, restarting, and running the
remaining commands with the exception that the JVM is not shut down.
If 'full' is specified, the boot directory (`~/.sbt/boot` by default)
is deleted before restarting. This forces an update of sbt and Scala
and is useful when working with development versions of sbt or Scala."""
def Multi = ";"
def MultiBrief = (Multi + " <command> (" + Multi + " <command>)*", "Runs the provided semicolon-separated commands.")
def MultiDetailed =
Multi + " command1 " + Multi + """ command2 ...
Runs the specified commands."""
def AppendCommand = "append"
def AppendLastDetailed =
AppendCommand + """ <command>
Appends 'command' to list of commands to run.
"""
val AliasCommand = "alias"
def AliasDetailed =
AliasCommand + """
Prints a list of defined aliases.
""" +
AliasCommand + """ name
Prints the alias defined for `name`.
""" +
AliasCommand + """ name=value
Sets the alias `name` to `value`, replacing any existing alias with that name.
Whenever `name` is entered, the corresponding `value` is run.
If any argument is provided to `name`, it is appended as argument to `value`.
""" +
AliasCommand + """ name=
Removes the alias for `name`."""
def Shell = "shell"
def ShellDetailed = "Provides an interactive prompt from which commands can be run."
def StashOnFailure = "sbtStashOnFailure"
def PopOnFailure = "sbtPopOnFailure"
// commands with poor choices for names since they clash with the usual conventions for command line options
// these are not documented and are mainly internal commands and can be removed without a full deprecation cycle
object Compat {
def OnFailure = "-"
def ClearOnFailure = "--"
def FailureWall = "---"
def OnFailureDeprecated = deprecatedAlias(OnFailure, BasicCommandStrings.OnFailure)
def ClearOnFailureDeprecated = deprecatedAlias(ClearOnFailure, BasicCommandStrings.ClearOnFailure)
def FailureWallDeprecated = deprecatedAlias(FailureWall, BasicCommandStrings.FailureWall)
private[this] def deprecatedAlias(oldName: String, newName: String): String =
s"The `$oldName` command is deprecated in favor of `$newName` and will be removed in 0.14.0"
}
def FailureWall = "resumeFromFailure"
def ClearOnFailure = "sbtClearOnFailure"
def OnFailure = "onFailure"
def OnFailureDetailed =
OnFailure + """ command
Registers 'command' to run when a command fails to complete normally.
Only one failure command may be registered at a time, so this command
replaces the previous command if there is one.
The failure command resets when it runs once, so it must be added
again if desired."""
def IfLast = "iflast"
def IfLastCommon = "If there are no more commands after this one, 'command' is run."
def IfLastDetailed =
IfLast + """ <command>
""" + IfLastCommon
val ContinuousExecutePrefix = "~"
def continuousDetail = "Executes the specified command whenever source files change."
def continuousBriefHelp = (ContinuousExecutePrefix + " <command>", continuousDetail)
}
|
mdedetrich/sbt
|
main/command/src/main/scala/sbt/BasicCommandStrings.scala
|
Scala
|
bsd-3-clause
| 7,139
|
package se.lu.nateko.cp.meta.services.sparql.magic.fusion
import org.eclipse.rdf4j.model.IRI
import org.eclipse.rdf4j.model.Literal
import org.eclipse.rdf4j.model.Value
import org.eclipse.rdf4j.query.algebra.BindingSetAssignment
import org.eclipse.rdf4j.query.algebra.Exists
import org.eclipse.rdf4j.query.algebra.Extension
import org.eclipse.rdf4j.query.algebra.Not
import org.eclipse.rdf4j.query.algebra.QueryModelNode
import org.eclipse.rdf4j.query.algebra.SingletonSet
import org.eclipse.rdf4j.query.algebra.StatementPattern
import org.eclipse.rdf4j.query.algebra.TupleExpr
import org.eclipse.rdf4j.query.algebra.Union
import org.eclipse.rdf4j.query.algebra.ValueExpr
import se.lu.nateko.cp.meta.services.CpmetaVocab
import se.lu.nateko.cp.meta.services.sparql.index
import se.lu.nateko.cp.meta.services.sparql.index.{Exists => _, _}
import se.lu.nateko.cp.meta.services.sparql.magic.fusion.StatsFetchPatternSearch.GroupPattern
import se.lu.nateko.cp.meta.utils.rdf4j._
import DofPatternFusion._
sealed trait FusionPattern
case class DobjStatFusion(exprToFuse: Extension, node: StatsFetchNode) extends FusionPattern
case class DobjListFusion(
fetch: DataObjectFetch,
exprsToFuse: Seq[TupleExpr],
propVars: Map[NamedVar, Property],
nonMagicQMNodes: Seq[QueryModelNode]
) extends FusionPattern{
def essentiallyEqual(other: DobjListFusion): Boolean =
this.fetch == other.fetch &&
this.propVars == other.propVars &&
this.nonMagicNodeIds == other.nonMagicNodeIds
def isPureCpIndexQuery: Boolean = nonMagicQMNodes.isEmpty
def nonMagicNodeIds = nonMagicQMNodes.map(System.identityHashCode).toSet
}
class DofPatternFusion(meta: CpmetaVocab){
def findFusions(patt: DofPattern): Seq[FusionPattern] = patt match{
case DofPattern.Empty => Nil
case pdp @ ProjectionDofPattern(_, _, _, _, Some(outer)) =>
findFusions(pdp.copy(outer = None)) ++ findFusions(outer)
case pdp @ ProjectionDofPattern(lj: LeftJoinDofPattern, _, Some(groupBy), _, _) =>
findStatsFusion(groupBy, lj).fold(findFusions(pdp.copy(groupBy = None)))(Seq(_))
case pdp: ProjectionDofPattern => findFusions(pdp.inner) match{
case Seq(singleResult: DobjListFusion) => Seq(addOrderByAndOffset(pdp, singleResult))
case any => any
}
case lj: LeftJoinDofPattern => findFusions(lj.left) ++ lj.optionals.flatMap(findFusions)
case union: DofPatternUnion =>
val subSeqs = union.subs.map(findFusions)
val subs = subSeqs.flatten.collect{case dlf: DobjListFusion => dlf}
def allMergable: Boolean = subs.distinctBy{sub =>
val nonMagicNodeIds = sub.nonMagicQMNodes.map(System.identityHashCode).toSet
(sub.fetch.sort, sub.fetch.offset, nonMagicNodeIds)
}.size == 1
val oneListFusionPerSubPatt: Boolean = subSeqs.forall(_.size == 1) && subs.size == subSeqs.size
if(oneListFusionPerSubPatt && allMergable){
val newExprsToFuse = subs.flatMap(_.exprsToFuse).distinctBy(System.identityHashCode) :+ union.union
val allSame = subs.sliding(2,1).forall(s => s(0) essentiallyEqual s(1))
if(allSame)
Seq(subs.head.copy(exprsToFuse = newExprsToFuse))
else {
unionVarProps(subs.map(_.propVars)).fold(Seq.empty[FusionPattern]){propVars =>
val sampleFetch = subs.head.fetch
Seq(DobjListFusion(
fetch = DataObjectFetch(Or(subs.map(_.fetch.filter)).flatten, sampleFetch.sort, sampleFetch.offset),
exprsToFuse = newExprsToFuse,
propVars = propVars,
nonMagicQMNodes = subs.head.nonMagicQMNodes
))
}
}
}
else subSeqs.flatten
case plain: PlainDofPattern => findPlainFusion(plain).toSeq
}
def addOrderByAndOffset(pdp: ProjectionDofPattern, inner: DobjListFusion): DobjListFusion = {
val sortBy = pdp.orderBy.map(op => op -> inner.propVars.get(op.sortVar)).collect{
case (op, Some(cp: ContProp)) => SortBy(cp, op.descending)
}
val offset = pdp.offset.filter(_ => inner.isPureCpIndexQuery)
val exprs = inner.exprsToFuse ++ sortBy.flatMap(_ => pdp.orderBy.map(_.expr)) ++ offset.map(_.slice)
inner.copy(
fetch = inner.fetch.copy(sort = sortBy, offset = offset.fold(0)(_.offset)),
exprsToFuse = exprs
)
}
def findPlainFusion(patt: PlainDofPattern): Option[DobjListFusion] = patt.dobjVar.collect{
//if dobj is pre-specified, then there is no need for SPARQL magic
case dobjVar if(patt.varValues.get(dobjVar).flatMap(_.vals).isEmpty) =>
val varProps = getVarPropLookup(patt)
val andOrFilterParser = new FilterPatternSearch(varProps, meta)
val filtsAndExprs = patt.filters.flatMap{fexp =>
andOrFilterParser.parseFilterExpr(fexp).map(_ -> fexp.getParentNode)
}
val filts = filtsAndExprs.map(_._1)
val filtExprs = filtsAndExprs.collect{case (_, te: TupleExpr) => te}
val categFiltsAndExprs = varProps.toSeq.flatMap{
case (v, prop: CategProp) => getCategFilter(v, prop, patt.varValues)
case _ => None
}
val categFilts: Seq[Filter] = categFiltsAndExprs.map(_._1)
val categExprs = categFiltsAndExprs.flatMap(_._2)
val reqProps = varProps.valuesIterator.collect{
case cp: ContProp => cp
case optp: OptUriProperty => optp
}.distinct.toSeq
val allFilts = And(categFilts ++ filts ++ reqProps.map(index.Exists(_))).optimize
val namedVarProps = varProps.collect{
case (nv: NamedVar, prop) => nv -> prop
}
val engagedVars = namedVarProps.keySet.toSet[QVar]
val statPattExprs = patt.propPaths.values.flatten.collect{
//filenames are not in the index, need to leave this pattern in the query
case sp2 @ StatementPattern2(pred, sp) if pred != meta.hasName && engagedVars.contains(sp2.targetVar) => sp
}
val assignmentExprs = patt.varValues.collect{
case (v, vif) if varProps.contains(v) => vif.providers
}.flatten
val allExprs = filtExprs ++ categExprs ++ statPattExprs ++ assignmentExprs
val nonMagicFilterExprs = patt.filters.map(_.getParentNode).filter(f => !filtExprs.contains(f))
val nonMagicStatPatts = patt.propPaths.flatMap(_._2).filterNot{sp2 =>
val objVar = sp2.sp.getObjectVar
varProps.contains(sp2.targetVar) || (objVar.isAnonymous && !objVar.hasValue)
}.map(_.sp)
val nonMagicQMNodes = nonMagicFilterExprs ++ nonMagicStatPatts
DobjListFusion(DataObjectFetch(allFilts, None, 0), allExprs, namedVarProps, nonMagicQMNodes)
}
def getVarPropLookup(patt: PlainDofPattern): VarPropLookup = {
def endVar(steps: IRI*): Iterable[QVar] = steps.reverse.toList match{
case Nil => patt.dobjVar
case head :: tail => for(
prev <- endVar(tail:_*);
statPatts <- patt.propPaths.get(prev).toSeq;
statPat <- statPatts.filter(_.pred === head)
) yield statPat.targetVar
}
def propVar(prop: Property, steps: IRI*) = endVar(steps:_*).map(_ -> prop)
//TODO This approach disregards the possibility of duplicate entries (all but one get discarded)
Seq(
propVar(DobjUri),
propVar(Spec , meta.hasObjectSpec ),
propVar(VariableName , meta.hasVariableName),
propVar(Keyword , meta.hasKeyword ),
propVar(FileName , meta.hasName ),
propVar(FileSize , meta.hasSizeInBytes),
propVar(Submitter , meta.wasSubmittedBy , meta.prov.wasAssociatedWith),
propVar(SubmissionStart, meta.wasSubmittedBy , meta.prov.startedAtTime ),
propVar(SubmissionEnd , meta.wasSubmittedBy , meta.prov.endedAtTime ),
propVar(Station , meta.wasAcquiredBy , meta.prov.wasAssociatedWith),
propVar(Site , meta.wasAcquiredBy , meta.wasPerformedAt ),
propVar(DataStart , meta.hasStartTime ),
propVar(DataStart , meta.wasAcquiredBy , meta.prov.startedAtTime ),
propVar(DataEnd , meta.hasEndTime ),
propVar(DataEnd , meta.wasAcquiredBy , meta.prov.endedAtTime ),
propVar(SamplingHeight , meta.wasAcquiredBy , meta.hasSamplingHeight ),
).flatten.toMap
}
def findStatsFusion(groupBy: StatGroupByPattern, inner: LeftJoinDofPattern): Option[DobjStatFusion] = findFusions(inner.left) match{
case Seq(DobjListFusion(DataObjectFetch(filter, None, 0), _, propVars, nonMagics))
if nonMagics.isEmpty && propVars.get(NamedVar(groupBy.dobjVar)).contains(DobjUri) =>
val optionals = inner.optionals.collect{
case pdp @ PlainDofPattern(None, _, _, Nil) =>
findPlainFusion(pdp.copy(dobjVar = Some(NamedVar(groupBy.dobjVar))))
}.flatten
if(optionals.size != inner.optionals.size || optionals.isEmpty) None else {
val lookup = (propVars ++ optionals.flatMap(_.propVars)).map(_.swap)
for(
specVar <- lookup.get(Spec);
submVar <- lookup.get(Submitter);
stationVar <- lookup.get(Station);
siteVarOpt = lookup.get(Site);
if (Seq(specVar, submVar, stationVar) ++ siteVarOpt).map(_.name).toSet == groupBy.groupVars
) yield{
val gp = GroupPattern(filter, submVar.name, stationVar.name, specVar.name, siteVarOpt.map(_.name))
val node = new StatsFetchNode(groupBy.countVar, gp)
DobjStatFusion(groupBy.expr, node)
}
}
case _ => None
}
}
object DofPatternFusion{
type PlainFusionRes = (Filter, Set[TupleExpr])
type VarPropLookup = Map[QVar, Property]
type NamedVarPropLookup = Map[NamedVar, Property]
def unionVarProps(varProps: Seq[NamedVarPropLookup]): Option[NamedVarPropLookup] = varProps match{
case Nil => Some(Map.empty[NamedVar, Property])
case Seq(single) => Some(single)
case Seq(vp1, rest @ _*) => unionVarProps(rest).flatMap{vp2 =>
val keys = vp1.keySet.intersect(vp2.keySet)
if(keys.forall(v => vp1(v) eq vp2(v))) Some(
vp1.filter{
case (v, _) => keys.contains(v)
}
) else None
}
}
def getCategFilter(v: QVar, cp: CategProp, vvals: Map[QVar, ValueInfoPattern]): Option[(Filter, Set[TupleExpr])] = {
val valsExprsOpt: Option[(Seq[Value], Set[TupleExpr])] = vvals.get(v).flatMap{vip =>
vip.vals.map(_.toSeq -> vip.providers.toSet)
}
valsExprsOpt.map{
case (vals, exprs) =>
val filter: Filter = if(vals.isEmpty) Nothing else {
val iris = vals.collect{case iri: IRI => iri}
cp match{
case uriProp: UriProperty => CategFilter(uriProp, iris)
case optUri: OptUriProperty => CategFilter(optUri, iris.map(Some(_)))
case strProp: StringCategProp => CategFilter(
strProp,
vals.collect{case lit: Literal => asString(lit)}.flatten
)
}
}
filter -> exprs
}
}
}
|
ICOS-Carbon-Portal/meta
|
src/main/scala/se/lu/nateko/cp/meta/services/sparql/magic/fusion/DofPatternFusion.scala
|
Scala
|
gpl-3.0
| 10,337
|
package wdl
import scala.util.{Failure, Success, Try}
class NamespaceSpec extends WdlTest {
"WdlNamespace" should {
"enforce optional output types" in {
val namespace = Try(loadWdl("type_checks.wdl"))
namespace match {
case Failure(f) => f.getMessage should startWith("ERROR: oopsNotOptionalArray is declared as a Array[Int] but the expression evaluates to a Array[Int?]")
case Success(_) => fail("Should have failed to load namespace")
}
}
}
}
|
ohsu-comp-bio/cromwell
|
wdl/src/test/scala/wdl/NamespaceSpec.scala
|
Scala
|
bsd-3-clause
| 495
|
// Copyright (c) 2013, Johns Hopkins University. All rights reserved.
// This software is released under the 2-clause BSD license.
// See /LICENSE.txt
// Travis Wolfe, twolfe18@gmail.com, 30 July 2013
package edu.jhu.hlt.parma.inference.topics
import org.apache.commons.math3.special.Gamma
import scala.collection.mutable.ArrayBuffer
import scala.math._
class LDATrainer(numTopics: Int, vocabSize: Int, @transient docs: ArrayBuffer[Document]) extends Serializable {
// Trainer settings
val NUM_INIT = 1
var VAR_MAX_ITER = 20 // If EM fails, this will be increased.
val VAR_CONVERGED = 1e-6
var EM_MAX_ITER = 100
val EM_CONVERGED = 1e-4
val ESTIMATE_ALPHA = false
// val rnd = new scala.util.Random(12345)
// val docs = corpus
val model = new LDA(numTopics, vocabSize)
val inferencer = new LDAInferencer(VAR_CONVERGED, VAR_MAX_ITER)
val stats = new LDAStats(numTopics, vocabSize)
def maxCorpusLength(docs: ArrayBuffer[Document]) : Int = {
var max = 0
for(doc <- docs) {
if(doc.length > max) max = doc.length
}
max
}
def setMaxEMIter(iter: Int) {
EM_MAX_ITER = iter
}
def zeroInitialize {
var k = 0
var w = 0
while(k < model.num_topics) {
stats.class_total(k) = 0.0
w = 0
while(w < vocabSize) {
stats.class_word(k)(w) = 0.0
w += 1
}
k += 1
}
}
// Infer a particular document's
def getDocumentTopicDist(doc: Document) : Array[Double] = {
val gamma = Array.ofDim[Double](numTopics)
val l = doc.length
val phi = Array.ofDim[Double](l, numTopics)
inferencer.infer(doc, model, gamma, phi)
gamma
}
def getTopicWordProb(topic: Int, word: Int) : Double = {
stats.class_word(topic)(word)
}
// Java <=> Scala
def getNumTopics : Int = {
numTopics
}
def runEM {
// 1. Initialize variational parameters
val var_gamma = Array.ofDim[Double](docs.size, model.num_topics)
val max_length = maxCorpusLength(docs)
println("max length = " + max_length)
val phi = Array.ofDim[Double](max_length, model.num_topics)
// 2. Initialize the model
init
maximize
println("model alpha = " + model.alpha)
// 3. Run EM
var iter = 0
var likelihood = 0.0
var likelihood_old = 0.0
var converged = 1.0
while (((converged < 0) || (converged > EM_CONVERGED) || (iter <= 2)) && (iter <= EM_MAX_ITER)) {
iter += 1
println("**** em iteration " + iter + " ****\\n")
likelihood = 0.0
zeroInitialize
// E-Step
println("e-step...")
var d = 0
while(d < docs.size) {
likelihood += docEStep(docs(d), var_gamma(d), phi)
d += 1
}
println("likelihood: " + likelihood)
// M-Step
println("m-step...")
maximize
// Check for convergence
converged = (likelihood_old - likelihood) / (likelihood_old)
if(converged < 0) VAR_MAX_ITER = VAR_MAX_ITER * 2
likelihood_old = likelihood
}
}
def init {
var k = 0
var i = 0
var n = 0
while(k < model.num_topics) {
i = 0
while(i < NUM_INIT) {
// val d = floor(rnd.nextDouble * docs.size)
val d = k
//println("initialized with document " + d)
val doc = docs(d)
n = 0
while(n < doc.length) {
stats.class_word(k)(doc.words(n)) += doc.counts(n)
n += 1
}
i += 1
}
n = 0
while(n < vocabSize) {
stats.class_word(k)(n) += 1
stats.class_total(k) += stats.class_word(k)(n)
n += 1
}
k += 1
}
// XXX DEBUG print class total
// k = 0
// while(k < model.num_topics) {
// println("class_total["+k+"]="+stats.class_total(k))
// k += 1
// }
}
def docEStep(doc: Document, gamma: Array[Double], phi: Array[Array[Double]]) : Double = {
// Posterior inference
val likelihood = inferencer.infer(doc, model, gamma, phi)
// Update sufficient statistics
var gamma_sum = 0.0
var k = 0
while(k < model.num_topics) {
gamma_sum += gamma(k)
stats.alpha_suffstats += Gamma.digamma(gamma(k))
k += 1
}
stats.alpha_suffstats -= model.num_topics * Gamma.digamma(gamma_sum)
var n = 0
while(n < doc.length) {
var k = 0
while(k < model.num_topics) {
stats.class_word(k)(doc.words(n)) += doc.counts(n) * phi(n)(k)
stats.class_total(k) += doc.counts(n) * phi(n)(k)
k += 1
}
n += 1
}
stats.num_docs += 1
likelihood
}
// Compute MLE LDA model from sufficient stats
def maximize {
var k = 0
while(k < model.num_topics) {
var w = 0
while(w < model.vocab_size) {
if(stats.class_word(k)(w) > 0) {
model.log_prob_w(k)(w) = Math.log(stats.class_word(k)(w)) -
Math.log(stats.class_total(k));
} else {
model.log_prob_w(k)(w) = -100
}
w += 1
}
k += 1
}
if(ESTIMATE_ALPHA) {
// TODO
}
}
}
|
hltcoe/parma
|
src/main/scala/edu/jhu/hlt/parma/inference/topics/LDATrainer.scala
|
Scala
|
bsd-2-clause
| 5,064
|
///////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2010 Travis Brown, The University of Texas at Austin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////////
package opennlp.fieldspring.topo
import org.specs._
import org.specs.runner._
class CoordinateTest extends JUnit4(CoordinateSpec)
object CoordinateSpec extends Specification {
"A degree-constructed coordinate" should {
val coordinate = Coordinate.fromDegrees(45, -45)
"have the correct radian value for latitude" in {
coordinate.getLat must_== math.Pi / 4
}
"have the correct radian value for longitude" in {
coordinate.getLng must_== -math.Pi / 4
}
"be equal to its radian-constructed equivalent" in {
coordinate must_== Coordinate.fromRadians(math.Pi / 4, -math.Pi / 4)
}
}
"A coordinate at the origin" should {
val coordinate = Coordinate.fromDegrees(0, 0)
"have the correct angular distance from a coordinate 1 radian away horizontally" in {
coordinate.distance(Coordinate.fromRadians(0, 1)) must_== 1
}
"have the correct distance from a coordinate 1 radian away vertically" in {
coordinate.distance(Coordinate.fromRadians(1, 0)) must_== 1
}
}
}
|
utcompling/fieldspring
|
src/test/scala/opennlp/fieldspring/topo/Coordinate.scala
|
Scala
|
apache-2.0
| 1,842
|
/* Copyright 2009-2011 Jay Conrod
*
* This file is part of Tungsten.
*
* Tungsten is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2 of
* the License, or (at your option) any later version.
*
* Tungsten is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Tungsten. If not, see
* <http://www.gnu.org/licenses/>.
*/
/* The purpose of the PhiConversion pass is to convert the live-in/live-out notation
* from Tungsten style to LLVM style. In Tungsten, live-in variables are described by
* parameters for each block. Live-out variables are described by explicit arguments
* for each branch. LLVM uses the more standard Phi instructions. If a block receives
* two (or more) different values for the same variable from different predecessors,
* a Phi instruction is written showing which value is received from each block.
*
* PhiAnalysis is a data flow analysis which determines which values actually require
* Phi instructions. PhiConversion actually inserts the Phi instructions and removes
* block parameters and arguments.
*/
package tungsten.llvm
import tungsten.{DataFlow, Graph, Symbol}
import tungsten.Utilities._
class PhiAnalysis(module: tungsten.Module)
extends DataFlow
{
/** Each node is a block name in the CFG for a function */
type Node = Symbol
/** The data on each edge is the argument list passed along that branch */
type Data = List[tungsten.Value]
/** The initial data set for each branch is the argument list from the original program */
def bottom(u: Node, v: Node): Data = {
val predecessor = module.getBlock(u)
predecessor.liveOutBindings(module)(v)
}
/** This function looks at the argument lists incoming from all predecessors to a block. If
* a parameter is constant (has the same argument from every predecessor), references to
* it in outgoing argument lists are replaced by the constant value.
*/
def flow(graph: Graph[Node], node: Node, inData: Map[Node, Data]): Map[Node, Data] = {
val block = module.getBlock(node)
/* First, we get a list of PHI bindings. These are in the same format as for a PHI
* instruction. We have a set of bindings for each parameter.
*/
val phiBindings = PhiConversion.phiBindingsFromArgumentMap(inData)
/* Next, we determine which parameters are constant. A parameter is constant if the
* all the corresponding arguments from the predecessors are equal. The map contains
* an entry for each constant parameter, mapping the parameter name to the constant value.
*/
val liveInConstants = PhiConversion.constantMapFromPhiBindings(block.parameters, phiBindings)
/* Finally, we generate the output by updating the arguments to the successors. Any
* reference to a constant parameter is replaced by the constant value.
*/
val liveOutBindings = block.liveOutBindings(module)
(Map[Node, Data]() /: liveOutBindings) { (outData, kv) =>
val (blockName, arguments) = kv
val updatedArgs = arguments.map { v =>
v.mapValues(PhiConversion.replaceConstants(_, liveInConstants))
}
outData + (blockName -> updatedArgs)
}
}
}
object PhiConversion
extends Function1[tungsten.Module, tungsten.Module]
{
def apply(module: tungsten.Module): tungsten.Module = {
val functions = module.definitions.valuesIterator.collect { case f: tungsten.Function => f }
(module /: functions) { (module, function) =>
val blocks = module.getBlocks(function.blocks)
val graph = function.controlFlowGraphWithCatchBlocks(module)
val analysis = new PhiAnalysis(module)
val phiData = analysis(graph, function.blocks.headOption)
(module /: blocks) { (module, block) =>
val argumentMap = argumentMapFromData(block.name, graph, phiData)
val phiBindings = phiBindingsFromArgumentMap(argumentMap)
val constantMap = constantMapFromPhiBindings(block.parameters, phiBindings)
rewrite(block, phiBindings, constantMap, module)
}
}
}
def isConstant(bindings: List[(tungsten.Value, Symbol)]): Boolean = {
bindings.map(_._1) match {
case Nil => false
case h :: t => t.forall(_ == h)
}
}
/** Replaces defined values with the corresponding value in the constant map (if it
* exists). Use mapValues with this function if you want it to work recursively in
* aggregate values.
*/
def replaceConstants(value: tungsten.Value,
constants: Map[Symbol, tungsten.Value]): tungsten.Value =
{
value match {
case tungsten.DefinedValue(name, ty) => constants.getOrElse(name, value)
case _ => value
}
}
def argumentMapFromData(blockName: Symbol,
graph: Graph[Symbol],
phiData: Map[(Symbol, Symbol), List[tungsten.Value]]): Map[Symbol, List[tungsten.Value]] =
{
val predecessorNames = graph.incident(blockName)
val emptyMap = Map[Symbol, List[tungsten.Value]]()
(emptyMap /: predecessorNames) { (argumentMap, predecessorName) =>
val arguments = phiData((predecessorName, blockName))
argumentMap + (predecessorName -> arguments)
}
}
/** Returns bindings in the same format as Phi instructions require.
* @param argumentMap map from predecessor name to a list of live-in variables
* @return a list of bindings for each parameter
*/
def phiBindingsFromArgumentMap(argumentMap: Map[Symbol, List[tungsten.Value]]): List[List[(tungsten.Value, Symbol)]] =
{
if (argumentMap.isEmpty)
Nil
else {
val numParameters = argumentMap.values.head.size
val emptyPhiBindings = List.fill(numParameters)(List[(tungsten.Value, Symbol)]())
(emptyPhiBindings /: argumentMap) { (phiBindings, kv) =>
val (blockName, arguments) = kv
assert(arguments.size == numParameters)
(phiBindings zip arguments).map { pair =>
val (bindings, argument) = pair
(argument, blockName) :: bindings
}
}.map(_.reverse)
}
}
/** Returns a partial map of parameter names to constant values. A parameter has a constant
* value if it receives the same value from all predecessors. The returned map will only
* contain entries for parameters with constant values.
* @phiBindings list of bindings as returned by phiBindingsFromArgumentMap
*/
def constantMapFromPhiBindings(parameterNames: List[Symbol],
phiBindings: List[List[(tungsten.Value, Symbol)]]): Map[Symbol, tungsten.Value] = {
assert(parameterNames.size == phiBindings.size)
(Map[Symbol, tungsten.Value]() /: (parameterNames zip phiBindings)) { (constantMap, pair) =>
val (parameterName, bindings) = pair
if (isConstant(bindings)) {
val constantValue = bindings.head._1
constantMap + (parameterName -> constantValue)
} else
constantMap
}
}
def rewrite(block: tungsten.Block,
phiBindings: List[List[(tungsten.Value, Symbol)]],
constantMap: Map[Symbol, tungsten.Value],
module: tungsten.Module): tungsten.Module =
{
assert(block.parameters.size == phiBindings.size)
val phiNodes = (block.parameters zip phiBindings).collect {
case (name, bindings) if !isConstant(bindings) => {
val ty = module.getParameter(name).ty
TungstenPhiInstruction(name, ty, bindings)
}
}
val instructions = module.getInstructions(block.instructions)
val rewrittenInstructions = phiNodes ++ instructions.map { instruction =>
val rewritten = instruction.mapValues(replaceConstants(_, constantMap))
rewritten match {
case branch: tungsten.BranchInstruction => branch.copyWith("arguments" -> Nil)
case cond: tungsten.ConditionalBranchInstruction =>
cond.copyWith("trueArguments" -> Nil, "falseArguments" -> Nil)
case _ => rewritten
}
}
val rewrittenBlock = block.copyWith("parameters" -> Nil,
"instructions" -> rewrittenInstructions.map(_.name))
module.remove(block.parameters).replace((rewrittenBlock :: rewrittenInstructions): _*)
}
}
|
jayconrod/tungsten
|
llvm/src/main/scala/tungsten/llvm/PhiAnalysis.scala
|
Scala
|
gpl-2.0
| 8,581
|
package com.meteorcode.pathway
import scala.concurrent.Future
/**
* ==Pathway Graphics==
*
* Created by hawk on 9/21/15.
*/
package object graphics {
/**
* Creates a new [[GraphicsContext]]
* @return a [[Future]] on a [[GraphicsContext]]
*/
def createGraphicsContext: Future[GraphicsContext]
= ??? // TODO: Implement me
}
|
MeteorCode/Pathway
|
src/main/scala/com/meteorcode/pathway/graphics/package.scala
|
Scala
|
mit
| 346
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import kafka.log.Log
import kafka.zk.ZooKeeperTestHarness
import kafka.utils.TestUtils
import kafka.server.{KafkaConfig, KafkaServer}
import org.junit.Assert._
import org.junit.{After, Test}
import java.util.Properties
import kafka.common.TopicAlreadyMarkedForDeletionException
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException
class DeleteTopicTest extends ZooKeeperTestHarness {
var servers: Seq[KafkaServer] = Seq()
val expectedReplicaAssignment = Map(0 -> List(0, 1, 2))
@After
override def tearDown() {
TestUtils.shutdownServers(servers)
super.tearDown()
}
@Test
def testDeleteTopicWithAllAliveReplicas() {
val topic = "test"
servers = createTestTopicAndCluster(topic)
// start topic deletion
adminZkClient.deleteTopic(topic)
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
}
@Test
def testResumeDeleteTopicWithRecoveredFollower() {
val topicPartition = new TopicPartition("test", 0)
val topic = topicPartition.topic
servers = createTestTopicAndCluster(topic)
// shut down one follower replica
val leaderIdOpt = zkClient.getLeaderForPartition(new TopicPartition(topic, 0))
assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined)
val follower = servers.filter(s => s.config.brokerId != leaderIdOpt.get).last
follower.shutdown()
// start topic deletion
adminZkClient.deleteTopic(topic)
// check if all replicas but the one that is shut down has deleted the log
TestUtils.waitUntilTrue(() =>
servers.filter(s => s.config.brokerId != follower.config.brokerId)
.forall(_.getLogManager().getLog(topicPartition).isEmpty), "Replicas 0,1 have not deleted log.")
// ensure topic deletion is halted
TestUtils.waitUntilTrue(() => zkClient.isTopicMarkedForDeletion(topic),
"Admin path /admin/delete_topic/test path deleted even when a follower replica is down")
// restart follower replica
follower.startup()
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
}
@Test
def testResumeDeleteTopicOnControllerFailover() {
val topicPartition = new TopicPartition("test", 0)
val topic = topicPartition.topic
servers = createTestTopicAndCluster(topic)
val controllerId = zkClient.getControllerId.getOrElse(fail("Controller doesn't exist"))
val controller = servers.filter(s => s.config.brokerId == controllerId).head
val leaderIdOpt = zkClient.getLeaderForPartition(new TopicPartition(topic, 0))
val follower = servers.filter(s => s.config.brokerId != leaderIdOpt.get && s.config.brokerId != controllerId).last
follower.shutdown()
// start topic deletion
adminZkClient.deleteTopic(topic)
// shut down the controller to trigger controller failover during delete topic
controller.shutdown()
// ensure topic deletion is halted
TestUtils.waitUntilTrue(() => zkClient.isTopicMarkedForDeletion(topic),
"Admin path /admin/delete_topic/test path deleted even when a replica is down")
controller.startup()
follower.startup()
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
}
@Test
def testPartitionReassignmentDuringDeleteTopic() {
val expectedReplicaAssignment = Map(0 -> List(0, 1, 2))
val topic = "test"
val topicPartition = new TopicPartition(topic, 0)
val brokerConfigs = TestUtils.createBrokerConfigs(4, zkConnect, false)
brokerConfigs.foreach(p => p.setProperty("delete.topic.enable", "true"))
// create brokers
val allServers = brokerConfigs.map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
this.servers = allServers
val servers = allServers.filter(s => expectedReplicaAssignment(0).contains(s.config.brokerId))
// create the topic
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, expectedReplicaAssignment)
// wait until replica log is created on every broker
TestUtils.waitUntilTrue(() => servers.forall(_.getLogManager().getLog(topicPartition).isDefined),
"Replicas for topic test not created.")
val leaderIdOpt = zkClient.getLeaderForPartition(new TopicPartition(topic, 0))
assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined)
val follower = servers.filter(s => s.config.brokerId != leaderIdOpt.get).last
follower.shutdown()
// start topic deletion
adminZkClient.deleteTopic(topic)
// start partition reassignment at the same time right after delete topic. In this case, reassignment will fail since
// the topic is being deleted
// reassign partition 0
val oldAssignedReplicas = zkClient.getReplicasForPartition(new TopicPartition(topic, 0))
val newReplicas = Seq(1, 2, 3)
val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, None,
Map(topicPartition -> newReplicas), adminZkClient = adminZkClient)
assertTrue("Partition reassignment should fail for [test,0]", reassignPartitionsCommand.reassignPartitions())
// wait until reassignment is completed
TestUtils.waitUntilTrue(() => {
val partitionsBeingReassigned = zkClient.getPartitionReassignment
ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded(zkClient, topicPartition,
Map(topicPartition -> newReplicas), partitionsBeingReassigned) == ReassignmentFailed
}, "Partition reassignment shouldn't complete.")
val controllerId = zkClient.getControllerId.getOrElse(fail("Controller doesn't exist"))
val controller = servers.filter(s => s.config.brokerId == controllerId).head
assertFalse("Partition reassignment should fail",
controller.kafkaController.controllerContext.partitionsBeingReassigned.contains(topicPartition))
val assignedReplicas = zkClient.getReplicasForPartition(new TopicPartition(topic, 0))
assertEquals("Partition should not be reassigned to 0, 1, 2", oldAssignedReplicas, assignedReplicas)
follower.startup()
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
}
@Test
def testDeleteTopicDuringAddPartition() {
val topic = "test"
servers = createTestTopicAndCluster(topic)
val leaderIdOpt = zkClient.getLeaderForPartition(new TopicPartition(topic, 0))
assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined)
val follower = servers.filter(_.config.brokerId != leaderIdOpt.get).last
val newPartition = new TopicPartition(topic, 1)
// capture the brokers before we shutdown so that we don't fail validation in `addPartitions`
val brokers = adminZkClient.getBrokerMetadatas()
follower.shutdown()
// wait until the broker has been removed from ZK to reduce non-determinism
TestUtils.waitUntilTrue(() => zkClient.getBroker(follower.config.brokerId).isEmpty,
s"Follower ${follower.config.brokerId} was not removed from ZK")
// add partitions to topic
adminZkClient.addPartitions(topic, expectedReplicaAssignment, brokers, 2,
Some(Map(1 -> Seq(0, 1, 2), 2 -> Seq(0, 1, 2))))
// start topic deletion
adminZkClient.deleteTopic(topic)
follower.startup()
// test if topic deletion is resumed
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
// verify that new partition doesn't exist on any broker either
TestUtils.waitUntilTrue(() =>
servers.forall(_.getLogManager().getLog(newPartition).isEmpty),
"Replica logs not for new partition [test,1] not deleted after delete topic is complete.")
}
@Test
def testAddPartitionDuringDeleteTopic() {
zkClient.createTopLevelPaths()
val topic = "test"
servers = createTestTopicAndCluster(topic)
val brokers = adminZkClient.getBrokerMetadatas()
// start topic deletion
adminZkClient.deleteTopic(topic)
// add partitions to topic
val newPartition = new TopicPartition(topic, 1)
adminZkClient.addPartitions(topic, expectedReplicaAssignment, brokers, 2,
Some(Map(1 -> Seq(0, 1, 2), 2 -> Seq(0, 1, 2))))
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
// verify that new partition doesn't exist on any broker either
assertTrue("Replica logs not deleted after delete topic is complete",
servers.forall(_.getLogManager().getLog(newPartition).isEmpty))
}
@Test
def testRecreateTopicAfterDeletion() {
val expectedReplicaAssignment = Map(0 -> List(0, 1, 2))
val topic = "test"
val topicPartition = new TopicPartition(topic, 0)
servers = createTestTopicAndCluster(topic)
// start topic deletion
adminZkClient.deleteTopic(topic)
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
// re-create topic on same replicas
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, expectedReplicaAssignment)
// wait until leader is elected
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 1000)
// check if all replica logs are created
TestUtils.waitUntilTrue(() => servers.forall(_.getLogManager().getLog(topicPartition).isDefined),
"Replicas for topic test not created.")
}
@Test
def testDeleteNonExistingTopic() {
val topicPartition = new TopicPartition("test", 0)
val topic = topicPartition.topic
servers = createTestTopicAndCluster(topic)
// start topic deletion
try {
adminZkClient.deleteTopic("test2")
fail("Expected UnknownTopicOrPartitionException")
} catch {
case _: UnknownTopicOrPartitionException => // expected exception
}
// verify delete topic path for test2 is removed from ZooKeeper
TestUtils.verifyTopicDeletion(zkClient, "test2", 1, servers)
// verify that topic test is untouched
TestUtils.waitUntilTrue(() => servers.forall(_.getLogManager().getLog(topicPartition).isDefined),
"Replicas for topic test not created")
// test the topic path exists
assertTrue("Topic test mistakenly deleted", zkClient.topicExists(topic))
// topic test should have a leader
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 1000)
}
@Test
def testDeleteTopicWithCleaner() {
val topicName = "test"
val topicPartition = new TopicPartition(topicName, 0)
val topic = topicPartition.topic
val brokerConfigs = TestUtils.createBrokerConfigs(3, zkConnect, false)
brokerConfigs.head.setProperty("delete.topic.enable", "true")
brokerConfigs.head.setProperty("log.cleaner.enable","true")
brokerConfigs.head.setProperty("log.cleanup.policy","compact")
brokerConfigs.head.setProperty("log.segment.bytes","100")
brokerConfigs.head.setProperty("log.cleaner.dedupe.buffer.size","1048577")
servers = createTestTopicAndCluster(topic,brokerConfigs)
// for simplicity, we are validating cleaner offsets on a single broker
val server = servers.head
val log = server.logManager.getLog(topicPartition).get
// write to the topic to activate cleaner
writeDups(numKeys = 100, numDups = 3,log)
// wait for cleaner to clean
server.logManager.cleaner.awaitCleaned(new TopicPartition(topicName, 0), 0)
// delete topic
adminZkClient.deleteTopic("test")
TestUtils.verifyTopicDeletion(zkClient, "test", 1, servers)
}
@Test
def testDeleteTopicAlreadyMarkedAsDeleted() {
val topicPartition = new TopicPartition("test", 0)
val topic = topicPartition.topic
servers = createTestTopicAndCluster(topic)
try {
// start topic deletion
adminZkClient.deleteTopic(topic)
// try to delete topic marked as deleted
adminZkClient.deleteTopic(topic)
fail("Expected TopicAlreadyMarkedForDeletionException")
}
catch {
case _: TopicAlreadyMarkedForDeletionException => // expected exception
}
TestUtils.verifyTopicDeletion(zkClient, topic, 1, servers)
}
private def createTestTopicAndCluster(topic: String, deleteTopicEnabled: Boolean = true): Seq[KafkaServer] = {
val brokerConfigs = TestUtils.createBrokerConfigs(3, zkConnect, enableControlledShutdown = false)
brokerConfigs.foreach(_.setProperty("delete.topic.enable", deleteTopicEnabled.toString))
createTestTopicAndCluster(topic, brokerConfigs)
}
private def createTestTopicAndCluster(topic: String, brokerConfigs: Seq[Properties]): Seq[KafkaServer] = {
val topicPartition = new TopicPartition(topic, 0)
// create brokers
val servers = brokerConfigs.map(b => TestUtils.createServer(KafkaConfig.fromProps(b)))
// create the topic
adminZkClient.createOrUpdateTopicPartitionAssignmentPathInZK(topic, expectedReplicaAssignment)
// wait until replica log is created on every broker
TestUtils.waitUntilTrue(() => servers.forall(_.getLogManager().getLog(topicPartition).isDefined),
"Replicas for topic test not created")
servers
}
private def writeDups(numKeys: Int, numDups: Int, log: Log): Seq[(Int, Int)] = {
var counter = 0
for (_ <- 0 until numDups; key <- 0 until numKeys) yield {
val count = counter
log.appendAsLeader(TestUtils.singletonRecords(value = counter.toString.getBytes, key = key.toString.getBytes), leaderEpoch = 0)
counter += 1
(key, count)
}
}
@Test
def testDisableDeleteTopic() {
val topicPartition = new TopicPartition("test", 0)
val topic = topicPartition.topic
servers = createTestTopicAndCluster(topic, deleteTopicEnabled = false)
// mark the topic for deletion
adminZkClient.deleteTopic("test")
TestUtils.waitUntilTrue(() => !zkClient.isTopicMarkedForDeletion(topic),
"Admin path /admin/delete_topic/%s path not deleted even if deleteTopic is disabled".format(topic))
// verify that topic test is untouched
assertTrue(servers.forall(_.getLogManager().getLog(topicPartition).isDefined))
// test the topic path exists
assertTrue("Topic path disappeared", zkClient.topicExists(topic))
// topic test should have a leader
val leaderIdOpt = zkClient.getLeaderForPartition(new TopicPartition(topic, 0))
assertTrue("Leader should exist for topic test", leaderIdOpt.isDefined)
}
}
|
MyPureCloud/kafka
|
core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala
|
Scala
|
apache-2.0
| 14,875
|
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.eval.Task
import monix.execution.Ack
import monix.execution.Ack.Stop
import monix.execution.atomic.Atomic
import monix.reactive.Observable.Operator
import monix.reactive.observers.Subscriber
import scala.concurrent.Future
import scala.util.control.NonFatal
private[reactive] final class DoOnNextAckOperator[A](cb: (A, Ack) => Task[Unit]) extends Operator[A, A] {
def apply(out: Subscriber[A]): Subscriber[A] =
new Subscriber[A] { self =>
implicit val scheduler = out.scheduler
private[this] val isActive = Atomic(true)
def onNext(elem: A): Future[Ack] = {
// We are calling out.onNext directly, meaning that in onComplete/onError
// we don't have to do anything special to ensure that the last `onNext`
// has been sent (like we are doing in mapTask); we only need to apply
// back-pressure for the following onNext events
val f = out.onNext(elem)
val task = Task.fromFuture(f).flatMap { ack =>
val r =
try cb(elem, ack)
catch { case ex if NonFatal(ex) => Task.raiseError(ex) }
r.map(_ => ack).onErrorHandle { ex =>
onError(ex); Stop
}
}
// Execution might be immediate
task.runToFuture.syncTryFlatten
}
def onComplete(): Unit = {
if (isActive.getAndSet(false))
out.onComplete()
}
def onError(ex: Throwable): Unit = {
if (isActive.getAndSet(false))
out.onError(ex)
else
scheduler.reportFailure(ex)
}
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/DoOnNextAckOperator.scala
|
Scala
|
apache-2.0
| 2,292
|
package jp.co.bizreach.kinesisfirehose.action
import com.amazonaws.services.kinesisfirehose.model.ServiceUnavailableException
import jp.co.bizreach.kinesisfirehose._
import org.scalatest._
class PutRecordActionSpec extends FunSuite {
private def fixture = new {
var retryCount = 0
} with PutRecordAction {
override protected def sleepDuration(retry: Int, retryLimit: Int): Long = {
retryCount += 1
0
}
}
private val batchData = Seq("data1".getBytes, "data2".getBytes)
test("putRecord on success. returns Right"){
val action = fixture
val result = action.withPutRetry(){
PutRecordResult(recordId = "CGojNMJq3ms")
}
assert(result.isRight)
assert(action.retryCount == 0)
}
test("putRecord on failure. does perform retry and returns Left"){
val action = fixture
val result = action.withPutRetry(){
throw new ServiceUnavailableException("Error!!")
}
assert(result.isLeft)
assert(action.retryCount == 3)
}
test("putRecordBatch on success. returns Right"){
val action = fixture
val result = action.withPutBatchRetry(batchData){ _ =>
PutRecordBatchResult(failedPutCount = 0, records = Seq(
PutRecordBatchResponseEntry(recordId = "AJJBALlfiFN", errorCode = null, errorMessage = null),
PutRecordBatchResponseEntry(recordId = "goGaFS919Mm", errorCode = null, errorMessage = null)
))
}
assert(result.size == 2)
assert(result(0).right.exists(_.recordId == "AJJBALlfiFN"))
assert(result(1).right.exists(_.recordId == "goGaFS919Mm"))
assert(action.retryCount == 0)
}
test("putRecordBatch on failure. does perform retry and returns at least one Left"){
val action = fixture
val result = action.withPutBatchRetry(batchData){ entries =>
PutRecordBatchResult(failedPutCount = 1, records = Seq(
PutRecordBatchResponseEntry(recordId = null, errorCode = "ServiceUnavailable", errorMessage = "error"),
PutRecordBatchResponseEntry(recordId = "goGaFS919Mm", errorCode = null, errorMessage = null)
).take(entries.size))
}
assert(result.size == 2)
assert(result(0).left.exists(_.errorCode == "ServiceUnavailable"))
assert(result(1).right.exists(_.recordId == "goGaFS919Mm"))
assert(action.retryCount == 3)
}
}
|
bizreach/aws-kinesis-scala
|
core/src/test/scala/jp/co/bizreach/kinesisfirehose/action/PutRecordActionSpec.scala
|
Scala
|
apache-2.0
| 2,308
|
package repositories.onlinetesting
import java.util.UUID
import connectors.launchpadgateway.exchangeobjects.in.{ SetupProcessCallbackRequest, ViewPracticeQuestionCallbackRequest }
import model.ProgressStatuses.{ PHASE3_TESTS_PASSED_WITH_AMBER, _ }
import model.persisted.phase3tests.{ LaunchpadTest, LaunchpadTestCallbacks, Phase3TestGroup }
import model._
import org.joda.time.{ DateTime, DateTimeZone, LocalDate }
import reactivemongo.bson.BSONDocument
import testkit.MongoRepositorySpec
class Phase3TestRepositorySpec extends MongoRepositorySpec with ApplicationDataFixture {
val Now = DateTime.now(DateTimeZone.UTC)
val DatePlus7Days = Now.plusDays(7)
val Token = newToken
def newToken = UUID.randomUUID.toString
val phase3Test = LaunchpadTest(
interviewId = 123,
usedForResults = true,
token = Token,
testUrl = "test.com",
invitationDate = Now,
candidateId = "CND_123456",
customCandidateId = "FSCND_123",
startedDateTime = None,
completedDateTime = None,
callbacks = LaunchpadTestCallbacks()
)
val callbackToAppend = SetupProcessCallbackRequest(
DateTime.now(),
UUID.randomUUID().toString,
"FSCND-1234",
12345,
None,
"FSINV-456",
LocalDate.parse("2016-11-09")
)
val TestGroup = Phase3TestGroup(expirationDate = DatePlus7Days, tests = List(phase3Test))
def multiTestGroup(interviewOffset: Int = 0): Phase3TestGroup = TestGroup.copy(
tests = List(
phase3Test.copy(
interviewId = interviewOffset + 123,
token = newToken
),
phase3Test.copy(
usedForResults = false,
interviewId = interviewOffset + 456,
token = newToken
),
phase3Test.copy(
usedForResults = false,
interviewId = interviewOffset + 789,
token = newToken
)
)
)
val progressStatusesToResetInPhase3 = List(PHASE3_TESTS_EXPIRED, PHASE3_TESTS_STARTED, PHASE3_TESTS_FIRST_REMINDER,
PHASE3_TESTS_SECOND_REMINDER, PHASE3_TESTS_COMPLETED, PHASE3_TESTS_RESULTS_RECEIVED, PHASE3_TESTS_FAILED,
PHASE3_TESTS_FAILED_NOTIFIED, PHASE3_TESTS_PASSED, PHASE3_TESTS_PASSED_WITH_AMBER)
"Get online test" should {
"return None if there is no test for the specific user id" in {
val result = phase3TestRepo.getTestGroup("userId").futureValue
result mustBe None
}
"return an online test for the specific user id" in {
insertApplication("appId", "userId")
phase3TestRepo.insertOrUpdateTestGroup("appId", TestGroup).futureValue
val result = phase3TestRepo.getTestGroup("appId").futureValue
result mustBe Some(TestGroup)
}
}
"Append callbacks" should {
"create a one callback array when the key is not set" in new CallbackFixture {
insertApplication("appId", "userId")
phase3TestRepo.insertOrUpdateTestGroup("appId", TestGroup).futureValue
val token = TestGroup.tests.head.token
phase3TestRepo.appendCallback(token, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
val testWithCallback = phase3TestRepo.getTestGroup("appId").futureValue.get
val test = testWithCallback.tests.find(t => t.token == token).get
test.callbacks.setupProcess.length mustBe 1
inside(test.callbacks.setupProcess.head) { case SetupProcessCallbackRequest(received, candidateId, customCandidateId,
interviewId, customInterviewId, customInviteId, deadline) =>
received.getMillis mustBe callbackToAppend.received.getMillis
candidateId mustBe callbackToAppend.candidateId
customCandidateId mustBe callbackToAppend.customCandidateId
interviewId mustBe callbackToAppend.interviewId
customInterviewId mustBe callbackToAppend.customInterviewId
customInviteId mustBe callbackToAppend.customInviteId
deadline mustBe callbackToAppend.deadline
}
}
"Append a callback when at least one is already set" in new CallbackFixture {
insertApplication("appId", "userId")
phase3TestRepo.insertOrUpdateTestGroup("appId", TestGroup).futureValue
val token = TestGroup.tests.head.token
phase3TestRepo.appendCallback(token, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token, ViewPracticeQuestionCallbackRequest.key, callbackToAppend).futureValue
val testWithCallback = phase3TestRepo.getTestGroup("appId").futureValue.get
val test = testWithCallback.tests.find(t => t.token == token).get
assertCallbacks(test, 2, 1)
}
"Append callbacks to multiple tests in the same application" in new CallbackFixture {
insertApplication("appId", "userId")
val testGroup = multiTestGroup()
phase3TestRepo.insertOrUpdateTestGroup("appId", testGroup).futureValue
val token1 = testGroup.tests(0).token
val token2 = testGroup.tests(1).token
val token3 = testGroup.tests(2).token
phase3TestRepo.appendCallback(token1, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token1, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token2, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token3, ViewPracticeQuestionCallbackRequest.key, callbackToAppend).futureValue
val testWithCallback = phase3TestRepo.getTestGroup("appId").futureValue.get
val test1 = testWithCallback.tests.find(t => t.token == token1).get
assertCallbacks(test1, 2)
val test2 = testWithCallback.tests.find(t => t.token == token2).get
assertCallbacks(test2, 1)
val test3 = testWithCallback.tests.find(t => t.token == token3).get
assertCallbacks(test3, 0, 1)
}
"Append callbacks to multiple tests in multiple applications" in new CallbackFixture {
insertApplication("appId", "userId")
insertApplication("appId2", "userId2")
insertApplication("appId3", "userId3")
val testGroup1 = multiTestGroup(1)
val testGroup2 = multiTestGroup(2)
val testGroup3 = multiTestGroup(3)
phase3TestRepo.insertOrUpdateTestGroup("appId", testGroup1).futureValue
phase3TestRepo.insertOrUpdateTestGroup("appId2", testGroup2).futureValue
phase3TestRepo.insertOrUpdateTestGroup("appId3", testGroup3).futureValue
val token1 = testGroup1.tests.head.token
val token2 = testGroup2.tests.head.token
val token3 = testGroup3.tests.head.token
phase3TestRepo.appendCallback(token1, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token1, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token2, SetupProcessCallbackRequest.key, callbackToAppend).futureValue
phase3TestRepo.appendCallback(token3, ViewPracticeQuestionCallbackRequest.key, callbackToAppend).futureValue
val testWithCallback1 = phase3TestRepo.getTestGroup("appId").futureValue.get
val testWithCallback2 = phase3TestRepo.getTestGroup("appId2").futureValue.get
val testWithCallback3 = phase3TestRepo.getTestGroup("appId3").futureValue.get
val test1 = testWithCallback1.tests.find(t => t.token == token1).get
assertCallbacks(test1, 2)
val test2 = testWithCallback2.tests.find(t => t.token == token2).get
assertCallbacks(test2, 1)
val test3 = testWithCallback3.tests.find(t => t.token == token3).get
assertCallbacks(test3, 0, 1)
}
}
"Next application ready for online testing" should {
"exclude applications with SDIP or EDIP application routes" in {
createApplicationWithAllFields("userId0", "appId0","testAccountId", "frameworkId", "PHASE2_TESTS_PASSED",
additionalProgressStatuses = List((PHASE2_TESTS_PASSED, true)), applicationRoute = "Sdip").futureValue
createApplicationWithAllFields("userId1", "appId1","testAccountId", "frameworkId", "PHASE2_TESTS_PASSED",
additionalProgressStatuses = List((PHASE2_TESTS_PASSED, true)), applicationRoute = "Edip").futureValue
createApplicationWithAllFields("userId2", "appId2", "testAccountId","frameworkId", "PHASE2_TESTS_PASSED",
additionalProgressStatuses = List((PHASE2_TESTS_PASSED, true))).futureValue
val results = phase3TestRepo.nextApplicationsReadyForOnlineTesting(1).futureValue
results.length mustBe 1
results.head.applicationId mustBe "appId2"
results.head.userId mustBe "userId2"
}
"return one application if there is only one" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", "frameworkId", "PHASE2_TESTS_PASSED",
additionalProgressStatuses = List((model.ProgressStatuses.PHASE2_TESTS_PASSED, true))
).futureValue
val result = phase3TestRepo.nextApplicationsReadyForOnlineTesting(1).futureValue
result.size mustBe 1
result.head.applicationId mustBe "appId"
result.head.userId mustBe "userId"
}
}
"Insert a phase 3 test" should {
"correctly insert a test" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", "frameworkId", "PHASE2_TESTS_PASSED").futureValue
phase3TestRepo.insertOrUpdateTestGroup("appId", TestGroup).futureValue
val result = phase3TestRepo.getTestGroup("appId").futureValue
result.isDefined mustBe true
result.get.expirationDate mustBe TestGroup.expirationDate
result.get.tests mustBe TestGroup.tests
}
}
"Remove a phase 3 test" should {
"remove test when requested" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", "frameworkId", "PHASE3",
additionalProgressStatuses = List((ProgressStatuses.PHASE3_TESTS_INVITED, true))
).futureValue
phase3TestRepo.insertOrUpdateTestGroup("appId", TestGroup).futureValue
val result1 = phase3TestRepo.getTestGroup("appId").futureValue
result1.isDefined mustBe true
phase3TestRepo.removeTestGroup("appId").futureValue
val result2 = phase3TestRepo.getTestGroup("appId").futureValue
result2.isDefined mustBe false
}
}
"nextTestForReminder" should {
"return one result" when {
"there is an application in PHASE3_TESTS and is about to expire in the next 72 hours" in {
val date = DateTime.now().plusHours(Phase3FirstReminder.hoursBeforeReminder - 1).plusMinutes(55)
val testGroup = Phase3TestGroup(expirationDate = date, tests = List(phase3Test))
createApplicationWithAllFields(UserId, AppId, TestAccountId,"frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(AppId, testGroup).futureValue
val notification = phase3TestRepo.nextTestForReminder(Phase3FirstReminder).futureValue
notification.isDefined mustBe true
notification.get.applicationId mustBe AppId
notification.get.userId mustBe UserId
notification.get.preferredName mustBe "Georgy"
notification.get.expiryDate.getMillis mustBe date.getMillis
// Because we are far away from the 24h reminder's window
phase3TestRepo.nextTestForReminder(Phase3SecondReminder).futureValue mustBe None
}
"there is an application in PHASE3_TESTS and is about to expire in the next 24 hours" in {
val date = DateTime.now().plusHours(Phase3SecondReminder.hoursBeforeReminder - 1).plusMinutes(55)
val testGroup = Phase3TestGroup(expirationDate = date, tests = List(phase3Test))
createApplicationWithAllFields(UserId, AppId, TestAccountId, "frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(AppId, testGroup).futureValue
val notification = phase3TestRepo.nextTestForReminder(Phase3SecondReminder).futureValue
notification.isDefined mustBe true
notification.get.applicationId mustBe AppId
notification.get.userId mustBe UserId
notification.get.preferredName mustBe "Georgy"
notification.get.expiryDate.getMillis mustBe date.getMillis
}
}
"return no results" when {
val date = DateTime.now().plusHours(22)
val testProfile = Phase3TestGroup(expirationDate = date, tests = List(phase3Test))
"there are no applications in PHASE3_TESTS" in {
createApplicationWithAllFields(UserId, AppId, TestAccountId,"frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(AppId, testProfile).futureValue
updateApplication(BSONDocument("applicationStatus" -> ApplicationStatus.IN_PROGRESS), AppId).futureValue
phase3TestRepo.nextTestForReminder(Phase3FirstReminder).futureValue mustBe None
}
"the expiration date is in 26h but we send the second reminder only after 24h" in {
createApplicationWithAllFields(UserId, AppId, TestAccountId, "frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(
AppId,
Phase3TestGroup(expirationDate = new DateTime().plusHours(30), tests = List(phase3Test))).futureValue
phase3TestRepo.nextTestForReminder(Phase3SecondReminder).futureValue mustBe None
}
"the test is expired" in {
import repositories.BSONDateTimeHandler
createApplicationWithAllFields(UserId, AppId, TestAccountId, "frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(AppId, testProfile).futureValue
updateApplication(BSONDocument("$set" -> BSONDocument(
"applicationStatus" -> PHASE3_TESTS_EXPIRED.applicationStatus,
s"progress-status.$PHASE3_TESTS_EXPIRED" -> true,
s"progress-status-timestamp.$PHASE3_TESTS_EXPIRED" -> DateTime.now()
)), AppId).futureValue
phase3TestRepo.nextTestForReminder(Phase3SecondReminder).futureValue mustBe None
}
"the test is completed" in {
import repositories.BSONDateTimeHandler
createApplicationWithAllFields(UserId, AppId, TestAccountId,"frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(AppId, testProfile).futureValue
updateApplication(BSONDocument("$set" -> BSONDocument(
"applicationStatus" -> PHASE3_TESTS_COMPLETED.applicationStatus,
s"progress-status.$PHASE3_TESTS_COMPLETED" -> true,
s"progress-status-timestamp.$PHASE3_TESTS_COMPLETED" -> DateTime.now()
)), AppId).futureValue
phase3TestRepo.nextTestForReminder(Phase3SecondReminder).futureValue mustBe None
}
"we already sent a second reminder" in {
createApplicationWithAllFields(UserId, AppId, TestAccountId,"frameworkId", "SUBMITTED").futureValue
phase3TestRepo.insertOrUpdateTestGroup(AppId, testProfile).futureValue
updateApplication(BSONDocument("$set" -> BSONDocument(
s"progress-status.$PHASE3_TESTS_SECOND_REMINDER" -> true
)), AppId).futureValue
phase3TestRepo.nextTestForReminder(Phase3SecondReminder).futureValue mustBe None
}
}
}
"reset progress statuses" should {
"reset PHASE3_TESTS status for an application at PHASE3_TESTS_RESULTS_RECEIVED" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", appStatus = ApplicationStatus.PHASE3_TESTS,
additionalProgressStatuses = List(
ProgressStatuses.PHASE3_TESTS_INVITED -> true,
ProgressStatuses.PHASE3_TESTS_STARTED -> true,
ProgressStatuses.PHASE3_TESTS_COMPLETED -> true,
ProgressStatuses.PHASE3_TESTS_RESULTS_RECEIVED -> true
)).futureValue
phase3TestRepo.resetTestProfileProgresses("appId", progressStatusesToResetInPhase3).futureValue
val app = helperRepo.findByUserId("userId", "frameworkId").futureValue
assertResetPhase3ApplicationAndProgressStatus(app)
}
"reset PHASE3_TESTS_PASSED status" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", appStatus = ApplicationStatus.PHASE3_TESTS_PASSED,
additionalProgressStatuses = List(
ProgressStatuses.PHASE3_TESTS_INVITED -> true,
ProgressStatuses.PHASE3_TESTS_STARTED -> true,
ProgressStatuses.PHASE3_TESTS_COMPLETED -> true,
ProgressStatuses.PHASE3_TESTS_RESULTS_RECEIVED -> true,
ProgressStatuses.PHASE3_TESTS_PASSED -> true
)).futureValue
phase3TestRepo.resetTestProfileProgresses("appId", progressStatusesToResetInPhase3).futureValue
val app = helperRepo.findByUserId("userId", "frameworkId").futureValue
assertResetPhase3ApplicationAndProgressStatus(app)
}
"reset PHASE3_TESTS_FAILED status at PHASE3_TESTS_FAILED_NOTIFIED" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", appStatus = ApplicationStatus.PHASE3_TESTS_FAILED,
additionalProgressStatuses = List(
ProgressStatuses.PHASE3_TESTS_INVITED -> true,
ProgressStatuses.PHASE3_TESTS_STARTED -> true,
ProgressStatuses.PHASE3_TESTS_COMPLETED -> true,
ProgressStatuses.PHASE3_TESTS_RESULTS_RECEIVED -> true,
ProgressStatuses.PHASE3_TESTS_FAILED -> true,
ProgressStatuses.PHASE3_TESTS_FAILED_NOTIFIED -> true
)).futureValue
phase3TestRepo.resetTestProfileProgresses("appId", progressStatusesToResetInPhase3).futureValue
val app = helperRepo.findByUserId("userId", "frameworkId").futureValue
assertResetPhase3ApplicationAndProgressStatus(app)
}
"reset PHASE3_TESTS_PASSED_WITH_AMBER status" in {
createApplicationWithAllFields("userId", "appId", "testAccountId", appStatus = ApplicationStatus.PHASE3_TESTS_PASSED_WITH_AMBER,
additionalProgressStatuses = List(
ProgressStatuses.PHASE3_TESTS_INVITED -> true,
ProgressStatuses.PHASE3_TESTS_STARTED -> true,
ProgressStatuses.PHASE3_TESTS_COMPLETED -> true,
ProgressStatuses.PHASE3_TESTS_RESULTS_RECEIVED -> true,
ProgressStatuses.PHASE3_TESTS_PASSED_WITH_AMBER -> true
)).futureValue
phase3TestRepo.resetTestProfileProgresses("appId", progressStatusesToResetInPhase3).futureValue
val app = helperRepo.findByUserId("userId", "frameworkId").futureValue
assertResetPhase3ApplicationAndProgressStatus(app)
}
}
trait CallbackFixture {
def assertCallbacks(test: LaunchpadTest, setupProcesses: Int = 0, viewPracticeQuestions: Int = 0,
finalCallbacks: Int = 0, finished: Int = 0, viewBrandedVideo: Int = 0, questions: Int = 0) = {
test.callbacks.setupProcess.length mustBe setupProcesses
test.callbacks.viewPracticeQuestion.length mustBe viewPracticeQuestions
test.callbacks.finalCallback.length mustBe finalCallbacks
test.callbacks.finished.length mustBe finished
test.callbacks.viewBrandedVideo.length mustBe viewBrandedVideo
test.callbacks.question.length mustBe questions
}
}
private def assertResetPhase3ApplicationAndProgressStatus(app: ApplicationResponse) = {
app.applicationStatus mustBe ApplicationStatus.PHASE3_TESTS.toString
app.progressResponse.phase3ProgressResponse.phase3TestsInvited mustBe true // reset always imply re invite
app.progressResponse.phase3ProgressResponse.phase3TestsStarted mustBe false
app.progressResponse.phase3ProgressResponse.phase3TestsCompleted mustBe false
app.progressResponse.phase3ProgressResponse.phase3TestsResultsReceived mustBe false
app.progressResponse.phase3ProgressResponse.phase3TestsPassed mustBe false
}
}
|
hmrc/fset-faststream
|
it/repositories/onlinetesting/Phase3TestRepositorySpec.scala
|
Scala
|
apache-2.0
| 19,600
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.api.r
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import scala.collection.mutable.HashMap
import scala.language.existentials
import io.netty.channel.ChannelHandler.Sharable
import io.netty.channel.{ChannelHandlerContext, SimpleChannelInboundHandler}
import org.apache.spark.Logging
import org.apache.spark.api.r.SerDe._
import org.apache.spark.util.Utils
/**
* Handler for RBackend
* TODO: This is marked as sharable to get a handle to RBackend. Is it safe to re-use
* this across connections ?
*/
@Sharable
private[r] class RBackendHandler(server: RBackend)
extends SimpleChannelInboundHandler[Array[Byte]] with Logging {
override def channelRead0(ctx: ChannelHandlerContext, msg: Array[Byte]): Unit = {
val bis = new ByteArrayInputStream(msg)
val dis = new DataInputStream(bis)
val bos = new ByteArrayOutputStream()
val dos = new DataOutputStream(bos)
// First bit is isStatic
val isStatic = readBoolean(dis)
val objId = readString(dis)
val methodName = readString(dis)
val numArgs = readInt(dis)
if (objId == "SparkRHandler") {
methodName match {
// This function is for test-purpose only
case "echo" =>
val args = readArgs(numArgs, dis)
assert(numArgs == 1)
writeInt(dos, 0)
writeObject(dos, args(0))
case "stopBackend" =>
writeInt(dos, 0)
writeType(dos, "void")
server.close()
case "rm" =>
try {
val t = readObjectType(dis)
assert(t == 'c')
val objToRemove = readString(dis)
JVMObjectTracker.remove(objToRemove)
writeInt(dos, 0)
writeObject(dos, null)
} catch {
case e: Exception =>
logError(s"Removing $objId failed", e)
writeInt(dos, -1)
writeString(dos, s"Removing $objId failed: ${e.getMessage}")
}
case _ =>
dos.writeInt(-1)
writeString(dos, s"Error: unknown method $methodName")
}
} else {
handleMethodCall(isStatic, objId, methodName, numArgs, dis, dos)
}
val reply = bos.toByteArray
ctx.write(reply)
}
override def channelReadComplete(ctx: ChannelHandlerContext): Unit = {
ctx.flush()
}
override def exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable): Unit = {
// Close the connection when an exception is raised.
cause.printStackTrace()
ctx.close()
}
def handleMethodCall(
isStatic: Boolean,
objId: String,
methodName: String,
numArgs: Int,
dis: DataInputStream,
dos: DataOutputStream): Unit = {
var obj: Object = null
try {
val cls = if (isStatic) {
Utils.classForName(objId)
} else {
JVMObjectTracker.get(objId) match {
case None => throw new IllegalArgumentException("Object not found " + objId)
case Some(o) =>
obj = o
o.getClass
}
}
val args = readArgs(numArgs, dis)
val methods = cls.getMethods
val selectedMethods = methods.filter(m => m.getName == methodName)
if (selectedMethods.length > 0) {
val index = findMatchedSignature(
selectedMethods.map(_.getParameterTypes),
args)
if (index.isEmpty) {
logWarning(s"cannot find matching method ${cls}.$methodName. "
+ s"Candidates are:")
selectedMethods.foreach { method =>
logWarning(s"$methodName(${method.getParameterTypes.mkString(",")})")
}
throw new Exception(s"No matched method found for $cls.$methodName")
}
val ret = selectedMethods(index.get).invoke(obj, args : _*)
// Write status bit
writeInt(dos, 0)
writeObject(dos, ret.asInstanceOf[AnyRef])
} else if (methodName == "<init>") {
// methodName should be "<init>" for constructor
val ctors = cls.getConstructors
val index = findMatchedSignature(
ctors.map(_.getParameterTypes),
args)
if (index.isEmpty) {
logWarning(s"cannot find matching constructor for ${cls}. "
+ s"Candidates are:")
ctors.foreach { ctor =>
logWarning(s"$cls(${ctor.getParameterTypes.mkString(",")})")
}
throw new Exception(s"No matched constructor found for $cls")
}
val obj = ctors(index.get).newInstance(args : _*)
writeInt(dos, 0)
writeObject(dos, obj.asInstanceOf[AnyRef])
} else {
throw new IllegalArgumentException("invalid method " + methodName + " for object " + objId)
}
} catch {
case e: Exception =>
logError(s"$methodName on $objId failed")
writeInt(dos, -1)
// Writing the error message of the cause for the exception. This will be returned
// to user in the R process.
writeString(dos, Utils.exceptionString(e.getCause))
}
}
// Read a number of arguments from the data input stream
def readArgs(numArgs: Int, dis: DataInputStream): Array[java.lang.Object] = {
(0 until numArgs).map { _ =>
readObject(dis)
}.toArray
}
// Find a matching method signature in an array of signatures of constructors
// or methods of the same name according to the passed arguments. Arguments
// may be converted in order to match a signature.
//
// Note that in Java reflection, constructors and normal methods are of different
// classes, and share no parent class that provides methods for reflection uses.
// There is no unified way to handle them in this function. So an array of signatures
// is passed in instead of an array of candidate constructors or methods.
//
// Returns an Option[Int] which is the index of the matched signature in the array.
def findMatchedSignature(
parameterTypesOfMethods: Array[Array[Class[_]]],
args: Array[Object]): Option[Int] = {
val numArgs = args.length
for (index <- 0 until parameterTypesOfMethods.length) {
val parameterTypes = parameterTypesOfMethods(index)
if (parameterTypes.length == numArgs) {
var argMatched = true
var i = 0
while (i < numArgs && argMatched) {
val parameterType = parameterTypes(i)
if (parameterType == classOf[Seq[Any]] && args(i).getClass.isArray) {
// The case that the parameter type is a Scala Seq and the argument
// is a Java array is considered matching. The array will be converted
// to a Seq later if this method is matched.
} else {
var parameterWrapperType = parameterType
// Convert native parameters to Object types as args is Array[Object] here
if (parameterType.isPrimitive) {
parameterWrapperType = parameterType match {
case java.lang.Integer.TYPE => classOf[java.lang.Integer]
case java.lang.Long.TYPE => classOf[java.lang.Integer]
case java.lang.Double.TYPE => classOf[java.lang.Double]
case java.lang.Boolean.TYPE => classOf[java.lang.Boolean]
case _ => parameterType
}
}
if (!parameterWrapperType.isInstance(args(i))) {
argMatched = false
}
}
i = i + 1
}
if (argMatched) {
// For now, we return the first matching method.
// TODO: find best method in matching methods.
// Convert args if needed
val parameterTypes = parameterTypesOfMethods(index)
(0 until numArgs).map { i =>
if (parameterTypes(i) == classOf[Seq[Any]] && args(i).getClass.isArray) {
// Convert a Java array to scala Seq
args(i) = args(i).asInstanceOf[Array[_]].toSeq
}
}
return Some(index)
}
}
}
None
}
}
/**
* Helper singleton that tracks JVM objects returned to R.
* This is useful for referencing these objects in RPC calls.
*/
private[r] object JVMObjectTracker {
// TODO: This map should be thread-safe if we want to support multiple
// connections at the same time
private[this] val objMap = new HashMap[String, Object]
// TODO: We support only one connection now, so an integer is fine.
// Investigate using use atomic integer in the future.
private[this] var objCounter: Int = 0
def getObject(id: String): Object = {
objMap(id)
}
def get(id: String): Option[Object] = {
objMap.get(id)
}
def put(obj: Object): String = {
val objId = objCounter.toString
objCounter = objCounter + 1
objMap.put(objId, obj)
objId
}
def remove(id: String): Option[Object] = {
objMap.remove(id)
}
}
|
pronix/spark
|
core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
|
Scala
|
apache-2.0
| 9,684
|
import leon.lang._
import leon.annotation._
object PropositionalLogic {
sealed abstract class Formula
case class And(lhs: Formula, rhs: Formula) extends Formula
case class Or(lhs: Formula, rhs: Formula) extends Formula
case class Implies(lhs: Formula, rhs: Formula) extends Formula
case class Not(f: Formula) extends Formula
case class Literal(id: BigInt) extends Formula
def simplify(f: Formula): Formula = (f match {
case And(lhs, rhs) => And(simplify(lhs), simplify(rhs))
case Or(lhs, rhs) => Or(simplify(lhs), simplify(rhs))
case Implies(lhs, rhs) => Or(Not(simplify(lhs)), simplify(rhs))
case Not(f) => Not(simplify(f))
case Literal(_) => f
}) ensuring(isSimplified(_))
def isSimplified(f: Formula): Boolean = f match {
case And(lhs, rhs) => isSimplified(lhs) && isSimplified(rhs)
case Or(lhs, rhs) => isSimplified(lhs) && isSimplified(rhs)
case Implies(_,_) => false
case Not(f) => isSimplified(f)
case Literal(_) => true
}
def nnf(formula: Formula): Formula = (formula match {
case And(lhs, rhs) => And(nnf(lhs), nnf(rhs))
case Or(lhs, rhs) => Or(nnf(lhs), nnf(rhs))
case Implies(lhs, rhs) => nnf(Or(Not(lhs), rhs))
case Not(And(lhs, rhs)) => Or(nnf(Not(lhs)), nnf(Not(rhs)))
case Not(Or(lhs, rhs)) => And(nnf(Not(lhs)), nnf(Not(rhs)))
case Not(Implies(lhs, rhs)) => And(nnf(lhs), nnf(Not(rhs)))
case Not(Not(f)) => nnf(f)
case Not(Literal(_)) => formula
case Literal(_) => formula
}) ensuring(isNNF(_))
def isNNF(f: Formula): Boolean = f match {
case And(lhs, rhs) => isNNF(lhs) && isNNF(rhs)
case Or(lhs, rhs) => isNNF(lhs) && isNNF(rhs)
case Implies(lhs, rhs) => false
case Not(Literal(_)) => true
case Not(_) => false
case Literal(_) => true
}
def evalLit(id : BigInt) : Boolean = (id == 42) // could be any function
def eval(f: Formula) : Boolean = f match {
case And(lhs, rhs) => eval(lhs) && eval(rhs)
case Or(lhs, rhs) => eval(lhs) || eval(rhs)
case Implies(lhs, rhs) => !eval(lhs) || eval(rhs)
case Not(f) => !eval(f)
case Literal(id) => evalLit(id)
}
@induct
def simplifySemantics(f: Formula) : Boolean = {
eval(f) == eval(simplify(f))
} holds
// Note that matching is exhaustive due to precondition.
def vars(f: Formula): Set[BigInt] = {
require(isNNF(f))
f match {
case And(lhs, rhs) => vars(lhs) ++ vars(rhs)
case Or(lhs, rhs) => vars(lhs) ++ vars(rhs)
case Not(Literal(i)) => Set[BigInt](i)
case Literal(i) => Set[BigInt](i)
}
}
def fv(f : Formula) = { vars(nnf(f)) }
@induct
def wrongCommutative(f: Formula) : Boolean = {
nnf(simplify(f)) == simplify(nnf(f))
} holds
@induct
def simplifyPreservesNNF(f: Formula) : Boolean = {
require(isNNF(f))
isNNF(simplify(f))
} holds
@induct
def nnfIsStable(f: Formula) : Boolean = {
require(isNNF(f))
nnf(f) == f
} holds
@induct
def simplifyIsStable(f: Formula) : Boolean = {
require(isSimplified(f))
simplify(f) == f
} holds
}
|
ericpony/scala-examples
|
testcases/web/verification/05_Propositional_Logic.scala
|
Scala
|
mit
| 3,062
|
package view
import util.StringUtil
import util.ControlUtil._
import util.Directory._
import org.parboiled.common.StringUtils
import org.pegdown._
import org.pegdown.ast._
import org.pegdown.LinkRenderer.Rendering
import java.text.Normalizer
import java.util.Locale
import scala.collection.JavaConverters._
import service.{RequestCache, WikiService}
object Markdown {
/**
* Converts Markdown of Wiki pages to HTML.
*/
def toHtml(markdown: String, repository: service.RepositoryService.RepositoryInfo,
enableWikiLink: Boolean, enableRefsLink: Boolean)(implicit context: app.Context): String = {
// escape issue id
val source = if(enableRefsLink){
markdown.replaceAll("(?<=(\\\\W|^))#(\\\\d+)(?=(\\\\W|$))", "issue:$2")
} else markdown
val rootNode = new PegDownProcessor(
Extensions.AUTOLINKS | Extensions.WIKILINKS | Extensions.FENCED_CODE_BLOCKS | Extensions.TABLES | Extensions.HARDWRAPS
).parseMarkdown(source.toCharArray)
new GitBucketHtmlSerializer(markdown, repository, enableWikiLink, enableRefsLink).toHtml(rootNode)
}
}
class GitBucketLinkRender(context: app.Context, repository: service.RepositoryService.RepositoryInfo,
enableWikiLink: Boolean) extends LinkRenderer with WikiService {
override def render(node: WikiLinkNode): Rendering = {
if(enableWikiLink){
try {
val text = node.getText
val (label, page) = if(text.contains('|')){
val i = text.indexOf('|')
(text.substring(0, i), text.substring(i + 1))
} else {
(text, text)
}
val url = repository.httpUrl.replaceFirst("/git/", "/").stripSuffix(".git") + "/wiki/" + StringUtil.urlEncode(page)
if(getWikiPage(repository.owner, repository.name, page).isDefined){
new Rendering(url, label)
} else {
new Rendering(url, label).withAttribute("class", "absent")
}
} catch {
case e: java.io.UnsupportedEncodingException => throw new IllegalStateException
}
} else {
super.render(node)
}
}
}
class GitBucketVerbatimSerializer extends VerbatimSerializer {
def serialize(node: VerbatimNode, printer: Printer): Unit = {
printer.println.print("<pre")
if (!StringUtils.isEmpty(node.getType)) {
printer.print(" class=").print('"').print("prettyprint ").print(node.getType).print('"')
}
printer.print(">")
var text: String = node.getText
while (text.charAt(0) == '\\n') {
printer.print("<br/>")
text = text.substring(1)
}
printer.printEncoded(text)
printer.print("</pre>")
}
}
class GitBucketHtmlSerializer(
markdown: String,
repository: service.RepositoryService.RepositoryInfo,
enableWikiLink: Boolean,
enableRefsLink: Boolean
)(implicit val context: app.Context) extends ToHtmlSerializer(
new GitBucketLinkRender(context, repository, enableWikiLink),
Map[String, VerbatimSerializer](VerbatimSerializer.DEFAULT -> new GitBucketVerbatimSerializer).asJava
) with LinkConverter with RequestCache {
override protected def printImageTag(imageNode: SuperNode, url: String): Unit =
printer.print("<a target=\\"_blank\\" href=\\"").print(fixUrl(url)).print("\\">")
.print("<img src=\\"").print(fixUrl(url)).print("\\" alt=\\"").printEncoded(printChildrenToString(imageNode)).print("\\"/></a>")
override protected def printLink(rendering: LinkRenderer.Rendering): Unit = {
printer.print('<').print('a')
printAttribute("href", fixUrl(rendering.href))
for (attr <- rendering.attributes.asScala) {
printAttribute(attr.name, attr.value)
}
printer.print('>').print(rendering.text).print("</a>")
}
private def fixUrl(url: String): String = {
if(!enableWikiLink || url.startsWith("http://") || url.startsWith("https://") || url.startsWith("#")){
url
} else {
repository.httpUrl.replaceFirst("/git/", "/").stripSuffix(".git") + "/wiki/_blob/" + url
}
}
private def printAttribute(name: String, value: String): Unit = {
printer.print(' ').print(name).print('=').print('"').print(value).print('"')
}
private def printHeaderTag(node: HeaderNode): Unit = {
val tag = s"h${node.getLevel}"
val headerTextString = printChildrenToString(node)
val anchorName = GitBucketHtmlSerializer.generateAnchorName(headerTextString)
printer.print(s"""<$tag class="markdown-head">""")
printer.print(s"""<a class="markdown-anchor-link" href="#$anchorName"></a>""")
printer.print(s"""<a class="markdown-anchor" name="$anchorName"></a>""")
visitChildren(node)
printer.print(s"</$tag>")
}
override def visit(node: HeaderNode): Unit = {
printHeaderTag(node)
}
override def visit(node: TextNode): Unit = {
// convert commit id and username to link.
val text = if(enableRefsLink) convertRefsLinks(node.getText, repository, "issue:") else node.getText
if (abbreviations.isEmpty) {
printer.print(text)
} else {
printWithAbbreviations(text)
}
}
}
object GitBucketHtmlSerializer {
private val Whitespace = "[\\\\s]".r
def generateAnchorName(text: String): String = {
val noWhitespace = Whitespace.replaceAllIn(text, "-")
val normalized = Normalizer.normalize(noWhitespace, Normalizer.Form.NFD)
val noSpecialChars = StringUtil.urlEncode(normalized)
noSpecialChars.toLowerCase(Locale.ENGLISH)
}
}
|
flyh2004/gitbucket
|
src/main/scala/view/Markdown.scala
|
Scala
|
apache-2.0
| 5,422
|
package dawn.flow.trajectory
import dawn.flow.spatial._
import dawn.flow._
import breeze.linalg.{norm, DenseVector}
import spire.math.Quaternion
object MiniFilterIndoor extends FlowApp[Trajectory, TrajInit] {
//****** Model ******
val dtIMU = 0.1
val dtVicon = (dtIMU * 5)
val N = 100
//filter parameter
val covAcc2d = 0.1
val covGPS = 0.001
val clockIMU = new TrajectoryClock(dtIMU).stop(dtIMU*101)
val clockGPS = new TrajectoryClock(dtVicon).stop(dtIMU*101)
/* ****** Gen mini particle filter data **** */
val points2d = clockIMU.map(LambdaWithModel((t: Time, traj: Trajectory) => {val p = traj.getPoint(t); Vec2(p.p(0), p.p(1))}), "toPoints")
val vel2d = clockIMU.map(LambdaWithModel((t: Time, traj: Trajectory) => {val p = traj.getPoint(t); p.v(0)}), "toVel0")
val acc2d = clockIMU.map(Accelerometer2D(eye(2) * covAcc2d))
val gps2d = clockGPS
.map(PositionSensor(eye(3) * covGPS))
.map(x => Vec2(x(0), x(1)))
val mini = SpatialMiniParticleFilter(acc2d, gps2d, dtIMU, N, covAcc2d, covGPS)
// acc2d.map(_(0)).debug
// vel2d.debug
// points2d.debug
// mini.debug
Plot(points2d, mini)
drawExpandedGraph()
val trajs = TrajFactory.generate(1)
trajs.foreach(traj => {
run(traj, traj.trajInit)
})
System.exit(0)
}
|
rubenfiszel/scala-flow
|
drone/src/main/scala/MiniParticleFilter.scala
|
Scala
|
mit
| 1,299
|
package mesosphere.marathon.metrics
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{ ExponentiallyDecayingReservoir, MetricRegistry }
import com.google.inject.{ AbstractModule, Guice }
import com.google.inject.matcher.{ AbstractMatcher, Matchers }
import mesosphere.marathon.core.task.tracker.TaskTracker
import mesosphere.marathon.metrics.Metrics._
import mesosphere.marathon.test.MarathonSpec
import org.aopalliance.intercept.{ MethodInterceptor, MethodInvocation }
import org.mockito.ArgumentCaptor
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
class FooBar {
def dummy(): Unit = {}
}
class MetricsTest
extends MarathonSpec
with MockitoSugar {
private var metrics: Metrics = _
class TestModule extends AbstractModule {
class DummyBehavior extends MethodInterceptor {
override def invoke(invocation: MethodInvocation): AnyRef = {
invocation.proceed()
}
}
object MarathonMatcher extends AbstractMatcher[Class[_]] {
override def matches(t: Class[_]): Boolean = t == classOf[FooBar]
}
override def configure(): Unit = {
bindInterceptor(Matchers.any(), Matchers.any(), new DummyBehavior())
}
}
before {
metrics = new Metrics(new MetricRegistry())
}
test("Metrics#className should strip 'EnhancerByGuice' from the metric names") {
val instance = Guice.createInjector(new TestModule).getInstance(classOf[FooBar])
assert(instance.getClass.getName.contains("EnhancerByGuice"))
assert(metrics.className(instance.getClass) == "mesosphere.marathon.metrics.FooBar")
}
test("Metrics#name should replace $ with .") {
val instance = new Serializable {}
assert(instance.getClass.getName.contains('$'))
assert(metrics.name("test$prefix", instance.getClass, "test$method") ==
"test.prefix.mesosphere.marathon.metrics.MetricsTest.anonfun.3.anon.1.test.method")
}
test("Metrics caches the class names") {
val metricsSpy = spy(metrics)
metricsSpy.name("prefix", classOf[FooBar], "method1")
metricsSpy.name("prefix", classOf[FooBar], "method2")
metricsSpy.name("prefix", classOf[MetricsTest], "method1")
verify(metricsSpy, times(1)).stripGuiceMarksFromClassName(classOf[FooBar])
verify(metricsSpy, times(2)).stripGuiceMarksFromClassName(any())
}
test("Metrics#name should use a dot to separate the class name and the method name") {
val expectedName = "service.mesosphere.marathon.core.task.tracker.TaskTracker.write-request-time"
val actualName = metrics.name("service", classOf[TaskTracker], "write-request-time")
assert(expectedName.equals(actualName))
}
test("The Histogram wrapper should properly proxy updates") {
val origHistogram = new com.codahale.metrics.Histogram(new ExponentiallyDecayingReservoir())
val histogram = new Histogram(origHistogram)
histogram.update(10L)
histogram.update(1)
assert(origHistogram.getSnapshot.getMax == 10)
assert(origHistogram.getSnapshot.getMin == 1)
}
test("The Meter wrapper should properly proxy marks") {
val origMeter = new com.codahale.metrics.Meter
val meter = new Meter(origMeter)
meter.mark()
meter.mark(10)
assert(origMeter.getCount == 11)
}
test("The Timer wrapper should properly time method calls and proxy the updates") {
val origTimer = mock[com.codahale.metrics.Timer]
val timer = new Timer(origTimer)
timer {}
val durationCaptor = ArgumentCaptor.forClass(classOf[Long])
verify(origTimer).update(durationCaptor.capture(), org.mockito.Matchers.eq(TimeUnit.NANOSECONDS))
assert(durationCaptor.getValue > 0)
}
}
|
timcharper/marathon
|
src/test/scala/mesosphere/marathon/metrics/MetricsTest.scala
|
Scala
|
apache-2.0
| 3,691
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
import java.net.SocketTimeoutException
import java.util.Properties
import junit.framework.Assert
import kafka.admin.CreateTopicCommand
import kafka.integration.KafkaServerTestHarness
import kafka.message._
import kafka.server.KafkaConfig
import kafka.utils._
import org.junit.Test
import org.scalatest.junit.JUnit3Suite
import kafka.api.ProducerResponseStatus
import kafka.common.{TopicAndPartition, ErrorMapping}
class SyncProducerTest extends JUnit3Suite with KafkaServerTestHarness {
private var messageBytes = new Array[Byte](2);
val configs = List(new KafkaConfig(TestUtils.createBrokerConfigs(1).head))
val zookeeperConnect = TestZKUtils.zookeeperConnect
@Test
def testReachableServer() {
val server = servers.head
val props = new Properties()
props.put("host", "localhost")
props.put("port", server.socketServer.port.toString)
props.put("buffer.size", "102400")
props.put("connect.timeout.ms", "500")
props.put("reconnect.interval", "1000")
val producer = new SyncProducer(new SyncProducerConfig(props))
val firstStart = SystemTime.milliseconds
try {
val response = producer.send(TestUtils.produceRequest("test", 0, new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes))))
Assert.assertNotNull(response)
} catch {
case e: Exception => Assert.fail("Unexpected failure sending message to broker. " + e.getMessage)
}
val firstEnd = SystemTime.milliseconds
Assert.assertTrue((firstEnd-firstStart) < 500)
val secondStart = SystemTime.milliseconds
try {
val response = producer.send(TestUtils.produceRequest("test", 0, new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes))))
Assert.assertNotNull(response)
} catch {
case e: Exception => Assert.fail("Unexpected failure sending message to broker. " + e.getMessage)
}
val secondEnd = SystemTime.milliseconds
Assert.assertTrue((secondEnd-secondStart) < 500)
try {
val response = producer.send(TestUtils.produceRequest("test", 0, new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes))))
Assert.assertNotNull(response)
} catch {
case e: Exception => Assert.fail("Unexpected failure sending message to broker. " + e.getMessage)
}
}
@Test
def testEmptyProduceRequest() {
val server = servers.head
val props = new Properties()
props.put("host", "localhost")
props.put("port", server.socketServer.port.toString)
props.put("buffer.size", "102400")
props.put("connect.timeout.ms", "300")
props.put("reconnect.interval", "500")
props.put("max.message.size", "100")
val correlationId = 0
val clientId = SyncProducerConfig.DefaultClientId
val ackTimeoutMs = SyncProducerConfig.DefaultAckTimeoutMs
val ack = SyncProducerConfig.DefaultRequiredAcks
val emptyRequest = new kafka.api.ProducerRequest(correlationId, clientId, ack, ackTimeoutMs, Map[TopicAndPartition, ByteBufferMessageSet]())
val producer = new SyncProducer(new SyncProducerConfig(props))
val response = producer.send(emptyRequest)
Assert.assertTrue(!response.hasError && response.status.size == 0)
}
@Test
def testMessageSizeTooLarge() {
val server = servers.head
val props = new Properties()
props.put("host", "localhost")
props.put("port", server.socketServer.port.toString)
props.put("max.message.size", 50000.toString)
val producer = new SyncProducer(new SyncProducerConfig(props))
CreateTopicCommand.createTopic(zkClient, "test", 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "test", 0, 500)
val message1 = new Message(new Array[Byte](configs(0).maxMessageSize + 1))
val messageSet1 = new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = message1)
val response1 = producer.send(TestUtils.produceRequest("test", 0, messageSet1))
Assert.assertEquals(1, response1.status.count(_._2.error != ErrorMapping.NoError))
Assert.assertEquals(ErrorMapping.MessageSizeTooLargeCode, response1.status(TopicAndPartition("test", 0)).error)
Assert.assertEquals(-1L, response1.status(TopicAndPartition("test", 0)).offset)
val safeSize = configs(0).maxMessageSize - Message.MessageOverhead - MessageSet.LogOverhead - 1
val message2 = new Message(new Array[Byte](safeSize))
val messageSet2 = new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = message2)
val response2 = producer.send(TestUtils.produceRequest("test", 0, messageSet2))
Assert.assertEquals(1, response1.status.count(_._2.error != ErrorMapping.NoError))
Assert.assertEquals(ErrorMapping.NoError, response2.status(TopicAndPartition("test", 0)).error)
Assert.assertEquals(0, response2.status(TopicAndPartition("test", 0)).offset)
}
@Test
def testProduceCorrectlyReceivesResponse() {
val server = servers.head
val props = new Properties()
props.put("host", "localhost")
props.put("port", server.socketServer.port.toString)
props.put("buffer.size", "102400")
props.put("connect.timeout.ms", "300")
props.put("reconnect.interval", "500")
props.put("max.message.size", "100")
val producer = new SyncProducer(new SyncProducerConfig(props))
val messages = new ByteBufferMessageSet(NoCompressionCodec, new Message(messageBytes))
// #1 - test that we get an error when partition does not belong to broker in response
val request = TestUtils.produceRequestWithAcks(Array("topic1", "topic2", "topic3"), Array(0), messages, 1)
val response = producer.send(request)
Assert.assertNotNull(response)
Assert.assertEquals(request.correlationId, response.correlationId)
Assert.assertEquals(3, response.status.size)
response.status.values.foreach {
case ProducerResponseStatus(error, nextOffset) =>
Assert.assertEquals(ErrorMapping.UnknownTopicOrPartitionCode.toShort, error)
Assert.assertEquals(-1L, nextOffset)
}
// #2 - test that we get correct offsets when partition is owned by broker
CreateTopicCommand.createTopic(zkClient, "topic1", 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "topic1", 0, 500)
CreateTopicCommand.createTopic(zkClient, "topic3", 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "topic3", 0, 500)
val response2 = producer.send(request)
Assert.assertNotNull(response2)
Assert.assertEquals(request.correlationId, response2.correlationId)
Assert.assertEquals(3, response2.status.size)
// the first and last message should have been accepted by broker
Assert.assertEquals(ErrorMapping.NoError, response2.status(TopicAndPartition("topic1", 0)).error)
Assert.assertEquals(ErrorMapping.NoError, response2.status(TopicAndPartition("topic3", 0)).error)
Assert.assertEquals(0, response2.status(TopicAndPartition("topic1", 0)).offset)
Assert.assertEquals(0, response2.status(TopicAndPartition("topic3", 0)).offset)
// the middle message should have been rejected because broker doesn't lead partition
Assert.assertEquals(ErrorMapping.UnknownTopicOrPartitionCode.toShort,
response2.status(TopicAndPartition("topic2", 0)).error)
Assert.assertEquals(-1, response2.status(TopicAndPartition("topic2", 0)).offset)
}
@Test
def testProducerCanTimeout() {
val timeoutMs = 500
val server = servers.head
val props = new Properties()
props.put("host", "localhost")
props.put("port", server.socketServer.port.toString)
props.put("buffer.size", "102400")
props.put("producer.request.timeout.ms", String.valueOf(timeoutMs))
val producer = new SyncProducer(new SyncProducerConfig(props))
val messages = new ByteBufferMessageSet(NoCompressionCodec, new Message(messageBytes))
val request = TestUtils.produceRequest("topic1", 0, messages)
// stop IO threads and request handling, but leave networking operational
// any requests should be accepted and queue up, but not handled
server.requestHandlerPool.shutdown()
val t1 = SystemTime.milliseconds
try {
producer.send(request)
Assert.fail("Should have received timeout exception since request handling is stopped.")
} catch {
case e: SocketTimeoutException => /* success */
case e => Assert.fail("Unexpected exception when expecting timeout: " + e)
}
val t2 = SystemTime.milliseconds
// make sure we don't wait fewer than timeoutMs for a response
Assert.assertTrue((t2-t1) >= timeoutMs)
}
}
|
dchenbecker/kafka-sbt
|
core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala
|
Scala
|
apache-2.0
| 9,460
|
package scalpel
import org.scalameter._
import org.scalameter.utils._
object SMRunner {
def computeClasspath = this.getClass.getClassLoader match {
case urlcl: java.net.URLClassLoader => extractClasspath(urlcl)
case cl => sys.props("java.class.path")
}
def extractClasspath(urlclassloader: java.net.URLClassLoader): String = {
val fileResource = "file:(.*)".r
val files = urlclassloader.getURLs.map(_.toString) collect {
case fileResource(file) => file
}
files.mkString(":")
}
def run(perfTest:PerformanceTest) = {
val args = Array[String]()
val testcp = computeClasspath
for {
// _ <- dyn.log.using(complog)
// _ <- dyn.events.using(tievents)
_ <- dyn.initialContext.using(initialContext ++ Seq((Key.verbose,false))
++ Main.Configuration.fromCommandLineArgs(args).context + (Key.classpath -> testcp)
++ Seq((Key.dsl.scope,List("WHAT IS THIS","THING"))))
} {
val datestart = new java.util.Date
DSL.setupzipper.value = Tree.Zipper.root[Setup[_]]
LoopBenchmark.testbody.value.apply()
val setuptree = DSL.setupzipper.value.result
val resulttree = perfTest.executor.run(setuptree.asInstanceOf[Tree[Setup[LoopBenchmark.SameType]]], perfTest.reporter, perfTest.persistor)
// Print results
// for(curve <- resulttree) {
// for(measurement <- curve.measurements) {
// println(s"Scalameter time data: ${measurement.time}")
// }
// }
val dateend = new java.util.Date
val datedtree = resulttree.copy(context = resulttree.context + (Key.reports.startDate -> datestart) + (Key.reports.endDate -> dateend))
perfTest.reporter.report(datedtree, perfTest.persistor)
}
}
}
|
lossyrob/scalpel
|
src/main/scala/scalpel/SMRunner.scala
|
Scala
|
bsd-3-clause
| 1,886
|
package dotty.tools
package dotc
package parsing
import org.junit.Test
import org.junit.Assert._
import ast.Trees.mods
import ast.untpd._
import ast.{ Trees => d }
import Parsers.Parser
import util.SourceFile
import core.Contexts._
import core.Flags
object ModifiersParsingTest {
given Context = (new ContextBase).initialCtx
def parse(code: String): Tree = {
val (_, stats) = new Parser(SourceFile.virtual("<meta>", code)).templateStatSeq()
stats match { case List(stat) => stat; case stats => Thicket(stats) }
}
extension (code: Tree) {
def firstConstrValDef: ValDef = code match {
case d.TypeDef(_, d.Template(constr, _, _, _)) =>
constr.termParamss.head.head
}
def firstTypeParam: TypeDef = code match {
case d.TypeDef(_, d.Template(constr, _, _, _)) =>
constr.leadingTypeParams.head
}
def defParam(i: Int): ValDef = code match {
case code @ d.DefDef(_, _, _, _) =>
code.termParamss.head.toArray.apply(i)
}
def defParam(i: Int, j: Int): ValDef = code match {
case code @ d.DefDef(_, _, _, _) =>
code.termParamss.toArray.apply(i).toArray.apply(j)
}
def funParam(i: Int): Tree = code match {
case Function(params, _) =>
params.toArray.apply(i)
}
def field(i: Int): Tree = code match {
case d.TypeDef(_, t: Template) =>
t.body.toArray.apply(i)
}
def field(name: String): Tree = code match {
case d.TypeDef(_, t: Template) =>
t.body.find({
case m: MemberDef => m.name.show == name
case _ => false
}).get
}
def stat(i: Int): Tree = code match {
case d.Block(stats, expr) =>
if (i < stats.length) stats.toArray.apply(i)
else expr
}
def modifiers: List[Mod] = code match {
case t: MemberDef => t.mods.mods
}
}
}
class ModifiersParsingTest {
import ModifiersParsingTest.{_, given}
@Test def valDef = {
var source = parse("class A(var a: Int)")
assert(source.firstConstrValDef.modifiers == List(Mod.Var()))
source = parse("class A(val a: Int)")
assert(source.firstConstrValDef.modifiers == List())
source = parse("class A(private val a: Int)")
assert(source.firstConstrValDef.modifiers == List(Mod.Private()))
source = parse("class A(protected var a: Int)")
assert(source.firstConstrValDef.modifiers == List(Mod.Protected(), Mod.Var()))
source = parse("class A(protected implicit val a: Int)")
assert(source.firstConstrValDef.modifiers == List(Mod.Protected(), Mod.Implicit()))
source = parse("class A[T]")
assert(source.firstTypeParam.modifiers == List())
}
@Test def typeDef = {
var source = parse("class A")
assert(source.modifiers == List())
source = parse("sealed class A")
assert(source.modifiers == List(Mod.Sealed()))
source = parse("implicit class A")
assert(source.modifiers == List(Mod.Implicit()))
source = parse("abstract sealed class A")
assert(source.modifiers == List(Mod.Abstract(), Mod.Sealed()))
}
@Test def fieldDef = {
val source =
parse("""
| class A {
| lazy var a = ???
| lazy private val b = ???
| final val c = ???
|
| abstract override def f: Boolean
| inline def g(n: Int) = ???
| }
""".stripMargin)
assert(source.field("a").modifiers == List(Mod.Lazy(), Mod.Var()))
assert(source.field("b").modifiers == List(Mod.Lazy(), Mod.Private()))
assert(source.field("c").modifiers == List(Mod.Final()))
assert(source.field("f").modifiers == List(Mod.Abstract(), Mod.Override()))
assert(source.field("g").modifiers == List(Mod.Inline()))
}
@Test def paramDef = {
var source: Tree = parse("def f(inline a: Int) = ???")
assert(source.defParam(0).modifiers == List(Mod.Inline()))
source = parse("def f(implicit a: Int, b: Int) = ???")
assert(source.defParam(0).modifiers == List(Mod.Implicit()))
assert(source.defParam(1).modifiers == List())
source = parse("def f(x: Int, y: Int)(implicit a: Int, b: Int) = ???")
assert(source.defParam(0, 0).modifiers == List())
assert(source.defParam(1, 0).modifiers == List(Mod.Implicit()))
}
@Test def blockDef = {
var source: Tree = parse("implicit val x : A = ???")
assert(source.modifiers == List(Mod.Implicit()))
source = parse("implicit var x : A = ???")
assert(source.modifiers == List(Mod.Implicit(), Mod.Var()))
source = parse("{ implicit var x : A = ??? }")
assert(source.stat(0).modifiers == List(Mod.Implicit(), Mod.Var()))
source = parse("{ implicit x => x * x }")
assert(source.stat(0).funParam(0).modifiers == List(Mod.Implicit()))
}
}
|
dotty-staging/dotty
|
compiler/test/dotty/tools/dotc/parsing/ModifiersParsingTest.scala
|
Scala
|
apache-2.0
| 4,753
|
package com.github.ldaniels528.trifecta.io.zookeeper
import com.github.ldaniels528.trifecta.messages.codec.MessageDecoder
import com.github.ldaniels528.trifecta.messages.codec.avro.AvroDecoder
import com.github.ldaniels528.trifecta.messages.{KeyAndMessage, MessageOutputSource}
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Success}
/**
* Zookeeper Output Source
* @author lawrence.daniels@gmail.com
*/
class ZookeeperMessageOutputSource(zk: ZKProxy, rootPath: String) extends MessageOutputSource {
/**
* Returns the binary encoding
* @return the binary encoding
*/
val encoding: String = "UTF8"
override def open(): Unit = ()
override def write(data: KeyAndMessage, decoder: Option[MessageDecoder[_]])(implicit ec: ExecutionContext) {
decoder match {
case Some(av: AvroDecoder) =>
av.decode(data.message) match {
case Success(record) =>
val path = s"$rootPath/${new String(data.key, encoding)}"
zk.create(path, data.message)
()
case Failure(e) =>
throw new IllegalStateException(e.getMessage, e)
}
case Some(unhandled) =>
throw new IllegalStateException(s"Unhandled decoder '$unhandled'")
case None =>
val path = s"$rootPath/${new String(data.key, encoding)}"
zk.create(path, data.message)
()
}
}
override def close(): Unit = ()
}
|
ldaniels528/trifecta
|
src/main/scala/com/github/ldaniels528/trifecta/io/zookeeper/ZookeeperMessageOutputSource.scala
|
Scala
|
apache-2.0
| 1,429
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.test
import java.io.File
import java.net.URI
import java.util.{Set => JavaSet}
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.ql.exec.FunctionRegistry
import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config
import org.apache.spark.internal.config.UI._
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession, SQLContext}
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener
import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode
import org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
import org.apache.spark.sql.execution.{QueryExecution, SQLExecution}
import org.apache.spark.sql.execution.command.CacheTableCommand
import org.apache.spark.sql.hive._
import org.apache.spark.sql.hive.client.HiveClient
import org.apache.spark.sql.internal.{SessionState, SharedState, SQLConf, WithTestConf}
import org.apache.spark.sql.internal.StaticSQLConf.{CATALOG_IMPLEMENTATION, WAREHOUSE_PATH}
import org.apache.spark.util.{ShutdownHookManager, Utils}
// SPARK-3729: Test key required to check for initialization errors with config.
object TestHive
extends TestHiveContext(
new SparkContext(
System.getProperty("spark.sql.test.master", "local[1]"),
"TestSQLContext",
new SparkConf()
.set("spark.sql.test", "")
.set(SQLConf.CODEGEN_FALLBACK.key, "false")
.set(SQLConf.CODEGEN_FACTORY_MODE.key, CodegenObjectFactoryMode.CODEGEN_ONLY.toString)
.set(HiveUtils.HIVE_METASTORE_BARRIER_PREFIXES.key,
"org.apache.spark.sql.hive.execution.PairSerDe")
.set(WAREHOUSE_PATH.key, TestHiveContext.makeWarehouseDir().toURI.getPath)
// SPARK-8910
.set(UI_ENABLED, false)
.set(config.UNSAFE_EXCEPTION_ON_MEMORY_LEAK, true)
// Hive changed the default of hive.metastore.disallow.incompatible.col.type.changes
// from false to true. For details, see the JIRA HIVE-12320 and HIVE-17764.
.set("spark.hadoop.hive.metastore.disallow.incompatible.col.type.changes", "false")
// Disable ConvertToLocalRelation for better test coverage. Test cases built on
// LocalRelation will exercise the optimization rules better by disabling it as
// this rule may potentially block testing of other optimization rules such as
// ConstantPropagation etc.
.set(SQLConf.OPTIMIZER_EXCLUDED_RULES.key, ConvertToLocalRelation.ruleName)))
case class TestHiveVersion(hiveClient: HiveClient)
extends TestHiveContext(TestHive.sparkContext, hiveClient)
private[hive] class TestHiveExternalCatalog(
conf: SparkConf,
hadoopConf: Configuration,
hiveClient: Option[HiveClient] = None)
extends HiveExternalCatalog(conf, hadoopConf) with Logging {
override lazy val client: HiveClient =
hiveClient.getOrElse {
HiveUtils.newClientForMetadata(conf, hadoopConf)
}
}
private[hive] class TestHiveSharedState(
sc: SparkContext,
hiveClient: Option[HiveClient] = None)
extends SharedState(sc, initialConfigs = Map.empty[String, String]) {
// The set of loaded tables should be kept in shared state, since there may be multiple sessions
// created that want to use the same tables.
val loadedTables = new collection.mutable.HashSet[String]
override lazy val externalCatalog: ExternalCatalogWithListener = {
new ExternalCatalogWithListener(new TestHiveExternalCatalog(
sc.conf,
sc.hadoopConfiguration,
hiveClient))
}
}
/**
* A locally running test instance of Spark's Hive execution engine.
*
* Data from [[testTables]] will be automatically loaded whenever a query is run over those tables.
* Calling [[reset]] will delete all tables and other state in the database, leaving the database
* in a "clean" state.
*
* TestHive is singleton object version of this class because instantiating multiple copies of the
* hive metastore seems to lead to weird non-deterministic failures. Therefore, the execution of
* test cases that rely on TestHive must be serialized.
*/
class TestHiveContext(
@transient override val sparkSession: TestHiveSparkSession)
extends SQLContext(sparkSession) {
/**
* If loadTestTables is false, no test tables are loaded. Note that this flag can only be true
* when running in the JVM, i.e. it needs to be false when calling from Python.
*/
def this(sc: SparkContext, loadTestTables: Boolean = true) = {
this(new TestHiveSparkSession(HiveUtils.withHiveExternalCatalog(sc), loadTestTables))
}
def this(sc: SparkContext, hiveClient: HiveClient) = {
this(new TestHiveSparkSession(HiveUtils.withHiveExternalCatalog(sc),
hiveClient,
loadTestTables = false))
}
override def newSession(): TestHiveContext = {
new TestHiveContext(sparkSession.newSession())
}
def setCacheTables(c: Boolean): Unit = {
sparkSession.setCacheTables(c)
}
def getHiveFile(path: String): File = {
sparkSession.getHiveFile(path)
}
def loadTestTable(name: String): Unit = {
sparkSession.loadTestTable(name)
}
def reset(): Unit = {
sparkSession.reset()
}
}
/**
* A [[SparkSession]] used in [[TestHiveContext]].
*
* @param sc SparkContext
* @param existingSharedState optional [[SharedState]]
* @param parentSessionState optional parent [[SessionState]]
* @param loadTestTables if true, load the test tables. They can only be loaded when running
* in the JVM, i.e when calling from Python this flag has to be false.
*/
private[hive] class TestHiveSparkSession(
@transient private val sc: SparkContext,
@transient private val existingSharedState: Option[TestHiveSharedState],
@transient private val parentSessionState: Option[SessionState],
private val loadTestTables: Boolean)
extends SparkSession(sc) with Logging { self =>
def this(sc: SparkContext, loadTestTables: Boolean) = {
this(
sc,
existingSharedState = None,
parentSessionState = None,
loadTestTables)
}
def this(sc: SparkContext, hiveClient: HiveClient, loadTestTables: Boolean) = {
this(
sc,
existingSharedState = Some(new TestHiveSharedState(sc, Some(hiveClient))),
parentSessionState = None,
loadTestTables)
}
SparkSession.setDefaultSession(this)
SparkSession.setActiveSession(this)
{ // set the metastore temporary configuration
val metastoreTempConf = HiveUtils.newTemporaryConfiguration(useInMemoryDerby = false) ++ Map(
ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN.varname -> "true",
// scratch directory used by Hive's metastore client
ConfVars.SCRATCHDIR.varname -> TestHiveContext.makeScratchDir().toURI.toString,
ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY.varname -> "1") ++
// After session cloning, the JDBC connect string for a JDBC metastore should not be changed.
existingSharedState.map { state =>
val connKey =
state.sparkContext.hadoopConfiguration.get(ConfVars.METASTORECONNECTURLKEY.varname)
ConfVars.METASTORECONNECTURLKEY.varname -> connKey
}
metastoreTempConf.foreach { case (k, v) =>
sc.hadoopConfiguration.set(k, v)
}
}
assert(sc.conf.get(CATALOG_IMPLEMENTATION) == "hive")
@transient
override lazy val sharedState: TestHiveSharedState = {
existingSharedState.getOrElse(new TestHiveSharedState(sc))
}
@transient
override lazy val sessionState: SessionState = {
new TestHiveSessionStateBuilder(this, parentSessionState, Map.empty).build()
}
lazy val metadataHive: HiveClient = {
sharedState.externalCatalog.unwrapped.asInstanceOf[HiveExternalCatalog].client.newSession()
}
/**
* This is a temporary hack to override SparkSession.sql so we can still use the version of
* Dataset.ofRows that creates a TestHiveQueryExecution (rather than a normal QueryExecution
* which wouldn't load all the test tables).
*/
override def sql(sqlText: String): DataFrame = withActive {
val plan = sessionState.sqlParser.parsePlan(sqlText)
Dataset.ofRows(self, plan)
}
override def newSession(): TestHiveSparkSession = withActive {
new TestHiveSparkSession(sc, Some(sharedState), None, loadTestTables)
}
override def cloneSession(): SparkSession = withActive {
val result = new TestHiveSparkSession(
sparkContext,
Some(sharedState),
Some(sessionState),
loadTestTables)
result.sessionState // force copy of SessionState
result
}
private var cacheTables: Boolean = false
def setCacheTables(c: Boolean): Unit = {
cacheTables = c
}
// By clearing the port we force Spark to pick a new one. This allows us to rerun tests
// without restarting the JVM.
System.clearProperty("spark.hostPort")
// For some hive test case which contain ${system:test.tmp.dir}
// Make sure it is not called again when cloning sessions.
if (parentSessionState.isEmpty) {
System.setProperty("test.tmp.dir", Utils.createTempDir().toURI.getPath)
}
/** The location of the compiled hive distribution */
lazy val hiveHome = envVarToFile("HIVE_HOME")
/** The location of the hive source code. */
lazy val hiveDevHome = envVarToFile("HIVE_DEV_HOME")
/**
* Returns the value of specified environmental variable as a [[java.io.File]] after checking
* to ensure it exists
*/
private def envVarToFile(envVar: String): Option[File] = {
Option(System.getenv(envVar)).map(new File(_))
}
val hiveFilesTemp = File.createTempFile("catalystHiveFiles", "")
hiveFilesTemp.delete()
hiveFilesTemp.mkdir()
ShutdownHookManager.registerShutdownDeleteDir(hiveFilesTemp)
def getHiveFile(path: String): File = {
new File(Thread.currentThread().getContextClassLoader.getResource(path).getFile)
}
private def quoteHiveFile(path : String) = if (Utils.isWindows) {
getHiveFile(path).getPath.replace('\\', '/')
} else {
getHiveFile(path).getPath
}
def getWarehousePath(): String = {
val tempConf = new SQLConf
sc.conf.getAll.foreach { case (k, v) => tempConf.setConfString(k, v) }
tempConf.warehousePath
}
val describedTable = "DESCRIBE (\\w+)".r
case class TestTable(name: String, commands: (() => Unit)*)
protected[hive] implicit class SqlCmd(sql: String) {
def cmd: () => Unit = {
() => new TestHiveQueryExecution(sql).executedPlan.executeCollect(): Unit
}
}
/**
* A list of test tables and the DDL required to initialize them. A test table is loaded on
* demand when a query are run against it.
*/
@transient
lazy val testTables = new mutable.HashMap[String, TestTable]()
def registerTestTable(testTable: TestTable): Unit = {
testTables += (testTable.name -> testTable)
}
if (loadTestTables) {
// The test tables that are defined in the Hive QTestUtil.
// /itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
// https://github.com/apache/hive/blob/branch-0.13/data/scripts/q_test_init.sql
@transient
val hiveQTestUtilTables: Seq[TestTable] = Seq(
TestTable("src",
"CREATE TABLE src (key INT, value STRING) STORED AS TEXTFILE".cmd,
s"LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/kv1.txt")}' INTO TABLE src".cmd),
TestTable("src1",
"CREATE TABLE src1 (key INT, value STRING) STORED AS TEXTFILE".cmd,
s"LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/kv3.txt")}' INTO TABLE src1".cmd),
TestTable("srcpart", () => {
"CREATE TABLE srcpart (key INT, value STRING) PARTITIONED BY (ds STRING, hr STRING)"
.cmd.apply()
for (ds <- Seq("2008-04-08", "2008-04-09"); hr <- Seq("11", "12")) {
s"""
|LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/kv1.txt")}'
|OVERWRITE INTO TABLE srcpart PARTITION (ds='$ds',hr='$hr')
""".stripMargin.cmd.apply()
}
}),
TestTable("srcpart1", () => {
"CREATE TABLE srcpart1 (key INT, value STRING) PARTITIONED BY (ds STRING, hr INT)"
.cmd.apply()
for (ds <- Seq("2008-04-08", "2008-04-09"); hr <- 11 to 12) {
s"""
|LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/kv1.txt")}'
|OVERWRITE INTO TABLE srcpart1 PARTITION (ds='$ds',hr='$hr')
""".stripMargin.cmd.apply()
}
}),
TestTable("src_thrift", () => {
import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer
import org.apache.hadoop.mapred.{SequenceFileInputFormat, SequenceFileOutputFormat}
import org.apache.thrift.protocol.TBinaryProtocol
s"""
|CREATE TABLE src_thrift(fake INT)
|ROW FORMAT SERDE '${classOf[ThriftDeserializer].getName}'
|WITH SERDEPROPERTIES(
| 'serialization.class'='org.apache.spark.sql.hive.test.Complex',
| 'serialization.format'='${classOf[TBinaryProtocol].getName}'
|)
|STORED AS
|INPUTFORMAT '${classOf[SequenceFileInputFormat[_, _]].getName}'
|OUTPUTFORMAT '${classOf[SequenceFileOutputFormat[_, _]].getName}'
""".stripMargin.cmd.apply()
s"""
|LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/complex.seq")}'
|INTO TABLE src_thrift
""".stripMargin.cmd.apply()
}),
TestTable("serdeins",
s"""CREATE TABLE serdeins (key INT, value STRING)
|ROW FORMAT SERDE '${classOf[LazySimpleSerDe].getCanonicalName}'
|WITH SERDEPROPERTIES ('field.delim'='\\t')
""".stripMargin.cmd,
"INSERT OVERWRITE TABLE serdeins SELECT * FROM src".cmd),
TestTable("episodes",
s"""CREATE TABLE episodes (title STRING, air_date STRING, doctor INT)
|STORED AS avro
|TBLPROPERTIES (
| 'avro.schema.literal'='{
| "type": "record",
| "name": "episodes",
| "namespace": "testing.hive.avro.serde",
| "fields": [
| {
| "name": "title",
| "type": "string",
| "doc": "episode title"
| },
| {
| "name": "air_date",
| "type": "string",
| "doc": "initial date"
| },
| {
| "name": "doctor",
| "type": "int",
| "doc": "main actor playing the Doctor in episode"
| }
| ]
| }'
|)
""".stripMargin.cmd,
s"""
|LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/episodes.avro")}'
|INTO TABLE episodes
""".stripMargin.cmd
),
// THIS TABLE IS NOT THE SAME AS THE HIVE TEST TABLE episodes_partitioned AS DYNAMIC
// PARTITIONING IS NOT YET SUPPORTED
TestTable("episodes_part",
s"""CREATE TABLE episodes_part (title STRING, air_date STRING, doctor INT)
|PARTITIONED BY (doctor_pt INT)
|STORED AS avro
|TBLPROPERTIES (
| 'avro.schema.literal'='{
| "type": "record",
| "name": "episodes",
| "namespace": "testing.hive.avro.serde",
| "fields": [
| {
| "name": "title",
| "type": "string",
| "doc": "episode title"
| },
| {
| "name": "air_date",
| "type": "string",
| "doc": "initial date"
| },
| {
| "name": "doctor",
| "type": "int",
| "doc": "main actor playing the Doctor in episode"
| }
| ]
| }'
|)
""".stripMargin.cmd,
// WORKAROUND: Required to pass schema to SerDe for partitioned tables.
// TODO: Pass this automatically from the table to partitions.
s"""
|ALTER TABLE episodes_part SET SERDEPROPERTIES (
| 'avro.schema.literal'='{
| "type": "record",
| "name": "episodes",
| "namespace": "testing.hive.avro.serde",
| "fields": [
| {
| "name": "title",
| "type": "string",
| "doc": "episode title"
| },
| {
| "name": "air_date",
| "type": "string",
| "doc": "initial date"
| },
| {
| "name": "doctor",
| "type": "int",
| "doc": "main actor playing the Doctor in episode"
| }
| ]
| }'
|)
""".stripMargin.cmd,
s"""
INSERT OVERWRITE TABLE episodes_part PARTITION (doctor_pt=1)
SELECT title, air_date, doctor FROM episodes
""".cmd
),
TestTable("src_json",
s"""CREATE TABLE src_json (json STRING) STORED AS TEXTFILE
""".stripMargin.cmd,
s"LOAD DATA LOCAL INPATH '${quoteHiveFile("data/files/json.txt")}' INTO TABLE src_json".cmd)
)
hiveQTestUtilTables.foreach(registerTestTable)
}
def getLoadedTables: collection.mutable.HashSet[String] = sharedState.loadedTables
def loadTestTable(name: String): Unit = {
// LOAD DATA does not work on temporary views. Since temporary views are resolved first,
// skip loading if there exists a temporary view with the given name.
if (sessionState.catalog.getTempView(name).isEmpty &&
!sharedState.loadedTables.contains(name)) {
// Marks the table as loaded first to prevent infinite mutually recursive table loading.
sharedState.loadedTables += name
logDebug(s"Loading test table $name")
val createCmds =
testTables.get(name).map(_.commands).getOrElse(sys.error(s"Unknown test table $name"))
// test tables are loaded lazily, so they may be loaded in the middle a query execution which
// has already set the execution id.
if (sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) == null) {
// We don't actually have a `QueryExecution` here, use a fake one instead.
SQLExecution.withNewExecutionId(new QueryExecution(this, OneRowRelation())) {
createCmds.foreach(_())
}
} else {
createCmds.foreach(_())
}
if (cacheTables) {
new SQLContext(self).cacheTable(name)
}
}
}
/**
* Records the UDFs present when the server starts, so we can delete ones that are created by
* tests.
*/
protected val originalUDFs: JavaSet[String] = FunctionRegistry.getFunctionNames
/**
* Resets the test instance by deleting any table, view, temp view, and UDF that have been created
*/
def reset(): Unit = {
try {
// HACK: Hive is too noisy by default.
org.apache.log4j.LogManager.getCurrentLoggers.asScala.foreach { log =>
val logger = log.asInstanceOf[org.apache.log4j.Logger]
if (!logger.getName.contains("org.apache.spark")) {
logger.setLevel(org.apache.log4j.Level.WARN)
}
}
// Clean out the Hive warehouse between each suite
val warehouseDir = new File(new URI(sparkContext.conf.get(WAREHOUSE_PATH.key)).getPath)
Utils.deleteRecursively(warehouseDir)
warehouseDir.mkdir()
sharedState.cacheManager.clearCache()
sharedState.loadedTables.clear()
sessionState.catalog.reset()
metadataHive.reset()
// HDFS root scratch dir requires the write all (733) permission. For each connecting user,
// an HDFS scratch dir: ${hive.exec.scratchdir}/<username> is created, with
// ${hive.scratch.dir.permission}. To resolve the permission issue, the simplest way is to
// delete it. Later, it will be re-created with the right permission.
val hadoopConf = sessionState.newHadoopConf()
val location = new Path(hadoopConf.get(ConfVars.SCRATCHDIR.varname))
val fs = location.getFileSystem(hadoopConf)
fs.delete(location, true)
// Some tests corrupt this value on purpose, which breaks the RESET call below.
sessionState.conf.setConfString("fs.defaultFS", new File(".").toURI.toString)
// It is important that we RESET first as broken hooks that might have been set could break
// other sql exec here.
metadataHive.runSqlHive("RESET")
// For some reason, RESET does not reset the following variables...
// https://issues.apache.org/jira/browse/HIVE-9004
metadataHive.runSqlHive("set hive.table.parameters.default=")
// Lots of tests fail if we do not change the partition whitelist from the default.
metadataHive.runSqlHive("set hive.metastore.partition.name.whitelist.pattern=.*")
sessionState.catalog.setCurrentDatabase("default")
} catch {
case e: Exception =>
logError("FATAL ERROR: Failed to reset TestDB state.", e)
}
}
}
private[hive] class TestHiveQueryExecution(
sparkSession: TestHiveSparkSession,
logicalPlan: LogicalPlan)
extends QueryExecution(sparkSession, logicalPlan) with Logging {
def this(sparkSession: TestHiveSparkSession, sql: String) = {
this(sparkSession, sparkSession.sessionState.sqlParser.parsePlan(sql))
}
def this(sql: String) = {
this(TestHive.sparkSession, sql)
}
override lazy val analyzed: LogicalPlan = sparkSession.withActive {
val describedTables = logical match {
case CacheTableCommand(tbl, _, _, _) => tbl :: Nil
case _ => Nil
}
// Make sure any test tables referenced are loaded.
val referencedTables =
describedTables ++
logical.collect { case UnresolvedRelation(ident, _, _) => ident.asTableIdentifier }
val resolver = sparkSession.sessionState.conf.resolver
val referencedTestTables = referencedTables.flatMap { tbl =>
val testTableOpt = sparkSession.testTables.keys.find(resolver(_, tbl.table))
testTableOpt.map(testTable => tbl.copy(table = testTable))
}
logDebug(s"Query references test tables: ${referencedTestTables.map(_.table).mkString(", ")}")
referencedTestTables.foreach { tbl =>
val curDB = sparkSession.catalog.currentDatabase
try {
tbl.database.foreach(db => sparkSession.catalog.setCurrentDatabase(db))
sparkSession.loadTestTable(tbl.table)
} finally {
tbl.database.foreach(_ => sparkSession.catalog.setCurrentDatabase(curDB))
}
}
// Proceed with analysis.
sparkSession.sessionState.analyzer.executeAndCheck(logical, tracker)
}
}
private[hive] object TestHiveContext {
/**
* A map used to store all confs that need to be overridden in sql/hive unit tests.
*/
val overrideConfs: Map[String, String] =
Map(
// Fewer shuffle partitions to speed up testing.
SQLConf.SHUFFLE_PARTITIONS.key -> "5"
)
def makeWarehouseDir(): File = {
val warehouseDir = Utils.createTempDir(namePrefix = "warehouse")
warehouseDir.delete()
warehouseDir
}
def makeScratchDir(): File = {
val scratchDir = Utils.createTempDir(namePrefix = "scratch")
scratchDir.delete()
scratchDir
}
}
private[sql] class TestHiveSessionStateBuilder(
session: SparkSession,
state: Option[SessionState],
options: Map[String, String])
extends HiveSessionStateBuilder(session, state, options)
with WithTestConf {
override def overrideConfs: Map[String, String] = TestHiveContext.overrideConfs
override def createQueryExecution: (LogicalPlan) => QueryExecution = { plan =>
new TestHiveQueryExecution(session.asInstanceOf[TestHiveSparkSession], plan)
}
override protected def newBuilder: NewBuilder = new TestHiveSessionStateBuilder(_, _, Map.empty)
}
private[hive] object HiveTestJars {
private val repository = SQLConf.ADDITIONAL_REMOTE_REPOSITORIES.defaultValueString.split(",")(0)
private val hiveTestJarsDir = Utils.createTempDir()
def getHiveContribJar(version: String = HiveUtils.builtinHiveVersion): File =
getJarFromUrl(s"${repository}org/apache/hive/hive-contrib/" +
s"$version/hive-contrib-$version.jar")
def getHiveHcatalogCoreJar(version: String = HiveUtils.builtinHiveVersion): File =
getJarFromUrl(s"${repository}org/apache/hive/hcatalog/hive-hcatalog-core/" +
s"$version/hive-hcatalog-core-$version.jar")
private def getJarFromUrl(urlString: String): File = {
val fileName = urlString.split("/").last
val targetFile = new File(hiveTestJarsDir, fileName)
if (!targetFile.exists()) {
Utils.doFetchFile(urlString, hiveTestJarsDir, fileName, new SparkConf, null, null)
}
targetFile
}
}
|
shuangshuangwang/spark
|
sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
|
Scala
|
apache-2.0
| 26,175
|
package examples
import io.gatling.core.Predef._
import io.gatling.http.Predef._
/**
* @author <a href="mailto:mstrukel@redhat.com">Marko Strukelj</a>
*/
class SimpleExample2 extends Simulation {
// Create two scenarios
// First one called Simple with three steps:
// - first perform an HTTP GET
// - then pause for 10 seconds
// - then perform a different HTTP GET
val scn = scenario("Simple")
.exec(http("Home")
.get("http://localhost:8080")
.check(status is 200))
.pause(10)
.exec(http("Auth Home")
.get("http://localhost:8080/auth")
.check(status is 200))
// The second scenario called Account with only one step:
// - perform an HTTP GET
val scn2 = scenario("Account")
.exec(http("Account")
.get("http://localhost:8080/auth/realms/master/account")
.check(status is 200))
// Run both scenarios:
// - first scenario with 100 parallel users, starting all at the same time
// - second scenario with 50 parallel users, starting all at the same time
setUp(
scn.inject(atOnceUsers(100)),
scn2.inject(atOnceUsers(50))
)
}
|
thomasdarimont/keycloak
|
testsuite/performance/tests/src/test/scala/examples/SimpleExample2.scala
|
Scala
|
apache-2.0
| 1,132
|
package ch.ethz.inf.da.tipstersearch
import java.io.{File, FileInputStream, InputStream, PrintWriter}
import ch.ethz.inf.da.tipstersearch.io.{QueryReader, RelevanceReader, ZipIterator}
import ch.ethz.inf.da.tipstersearch.scoring.{RelevanceModel, TfidfModel, LanguageModel}
import ch.ethz.inf.da.tipstersearch.util.Stopwatch
import ch.ethz.inf.da.tipstersearch.metrics.PrecisionRecall
/**
* Defines the command line options
*/
case class Config(
n: Int = 100,
tipsterDirectory: String = "dataset/tipster",
topicsFile: String = "dataset/topics",
qrelsFile: String = "dataset/qrels",
model: String = "tfidf"
)
/**
* Main application object, execution starts here
*/
object Main {
/**
* Entry point of the application
* This parses the command line options and executes the run method
*
* @param args The command line arguments
*/
def main(args:Array[String]) {
val parser = new scopt.OptionParser[Config]("tipstersearch") {
head("TipsterSearch", "0.1")
opt[Int]('n', "n") action { (x, c) => c.copy(n = x) } text("The number of results to return per query (default: 100)")
opt[String]('d', "tipsterDirectory") action { (x, c) => c.copy(tipsterDirectory = x) } text("The directory where the tipster zips are placed (default: 'dataset/tipster')")
opt[String]('t', "topicsFile") action { (x, c) => c.copy(topicsFile = x) } text("The topics file (default: 'dataset/topics')")
opt[String]('q', "qrelsFile") action { (x, c) => c.copy(qrelsFile = x) } text("The qrels file (default: 'dataset/qrels')")
opt[String]('m', "model") action { (x, c) => c.copy(model = x) } validate {
x => if(x == "tfidf" || x == "language") success else failure("Value <model> must be either 'tfidf' or 'language'")
} text("The model to use, valid values: [language|tfidf] (default: 'tfidf')")
}
parser.parse(args, Config()) map (run)
}
/**
* Runs the application with the options specified in the config.
*
* @param config the configuration to use
*/
def run(config:Config) {
// Start a timer, so we will know how much time has passed at the end
val stopwatch = new Stopwatch()
// Read queries and binary relevance truth values from files
val queries:List[Query] = QueryReader.read(config.topicsFile)
RelevanceReader.read(config.qrelsFile, queries)
// Collect statistics about the document collection
println("Computing document collection statistics")
val cs:CollectionStatistics = new CollectionStatistics()
cs.compute(documentIterator(config.tipsterDirectory))
// Set up the relevance model to use, either TfidfModel or LanguageModel
var model:RelevanceModel = null
if(config.model == "tfidf") {
println("Using tfidf model")
model = new TfidfModel(cs)
} else {
println("Using language model")
model = new LanguageModel(cs)
}
// Create the search engine with the chosen relevance model
val searchEngine:SearchEngine = new SearchEngine(model)
// Run the search, this will take a long time...
println("Running search")
searchEngine.search(queries, documentIterator(config.tipsterDirectory), config.n)
// After the search is complete, open the output file for the rankings
writeResultsToFile(queries, config)
// Display the search performance
displayPerformance(queries)
// Display the total time spent
println("Total time: " + stopwatch)
}
/**
* Returns an iterator over the tipster documents found in given directory
*
* @param directory the directory to search in
* @return an iterator over all documents
*/
def documentIterator(directory:String) : Iterator[Document] = {
new File(directory).listFiles.iterator
.filter(f => f.getName.endsWith(".zip"))
.flatMap(f =>
new ZipIterator(new FileInputStream(f.getAbsolutePath)).map{
case (name:String, is:InputStream) => new Document(is)
}
)
}
/**
* Writes the results of the search to an output file
*
* @param queries the list of queries to write the results for
* @param config the config which contains the model which determines the filename
*/
def writeResultsToFile(queries:List[Query], config:Config) {
// Open a writer to an appropriate output file
var outputFile:File = null
if(config.model == "tfidf") {
outputFile = new File("ranking-t-rolf-jagerman.run")
} else {
outputFile = new File("ranking-l-rolf-jagerman.run")
}
val output = new PrintWriter(outputFile)
// Write results to file
for(query <- queries) {
var count = 0
for(result <- query.results.ordered) {
count += 1
output.println(query.id + " " + count + " " + result.id.replaceAll("[^a-zA-Z0-9]+", ""))
}
}
// Close output
output.flush()
output.close()
}
/**
* Displays the search performance over the given list of queries
*
* @param queries the list of queries to perform metric over
*/
def displayPerformance(queries:List[Query]) {
var MAP:Double = 0.0
for( query <- queries ) {
val pr = new PrecisionRecall(query)
MAP += pr.precision
println(query.id + " ('" + query + "')")
println(" Precision: %.3f".format(pr.precision))
println(" Recall: %.3f".format(pr.recall))
println(" Avg Precision: %.3f".format(pr.averagePrecision))
}
// Compute and display the global metric (MAP)
MAP /= queries.size.toDouble
println("MAP: %.3f".format(MAP))
}
}
|
rjagerman/TipsterSearch
|
src/main/scala/Main.scala
|
Scala
|
mit
| 6,256
|
/*
/*
* Copyright 2014 DataGenerator Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.datagenerator
import java.io.InputStream
import java.util
import org.finra.datagenerator.distributor.SearchDistributor
import org.finra.datagenerator.engine.{Engine, Frontier}
import scala.io
/**
* Engine Implementation for generating data
*
* Created by Brijesh on 5/26/2015.
*/
class RandomNumberEngine extends Engine {
var totalCount: Int = 0
var frontierList = new util.LinkedList[Frontier]()
/**
* Iterate for loop from 0 to Number of Split and create instance of Frontier
* Add them to FrontierList
*
* Call distribute method which distribute data to Spark using Map and Reduce
*
* @param distributor SearchDistributor
*/
def process(distributor: SearchDistributor): Unit = {
for (i <- 0 to RandomNumberEngine.numSplit) {
val frontierImplementation = new RandomNumberFrontier
frontierList.add(frontierImplementation)
}
distributor.distribute(frontierList)
}
/**
* Read the lines from text file using InputStream
* Store these two values to Total Number of Count and Number of Split
*
* @param inputFileStream the model input stream
*/
def setModelByInputFileStream(inputFileStream : InputStream) : Unit = {
val fileLines = io.Source.fromInputStream(inputFileStream).getLines()
try{
totalCount = fileLines.next().toInt
RandomNumberEngine.numSplit = fileLines.next().toInt
}catch {
case e: NumberFormatException => throw new RuntimeException("File should have two lines, one int in each.")
}
/*
try { (totalCount, RandomNumberEngine.numSplit) ;
(fileLines.next().toInt, fileLines.next().toInt)
} catch {
case e: NumberFormatException => throw new RuntimeException("File should have two lines, one int in each.")
}
*/
RandomNumberEngine.numberInEachFrontier = totalCount / RandomNumberEngine.numSplit
}
/**
* Set the model with a string
*
* @param model the model text
*/
def setModelByText(model: String) : Unit = {
// TODO set model with a string
???
}
/**
* bootstrapMin setter
*
* @param min set the desired bootstrap min
* @return this
*/
def setBootstrapMin(min: Int) : Engine = {
???
this
}
}
object RandomNumberEngine {
//Declare static variable in Object RNEngine
var numberInEachFrontier: Int = 0
var numSplit: Int = 0
}
*/
|
shraddha-patel/DGWithSpark
|
dg-spark/src/main/code/org/finra/datagenerator/RandomNumberEngine.scala
|
Scala
|
apache-2.0
| 3,001
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.api.python.PythonEvalType
import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.types.{IntegerType, StringType}
import org.apache.spark.unsafe.types.CalendarInterval
class FilterPushdownSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
EliminateSubqueryAliases) ::
Batch("Filter Pushdown", FixedPoint(10),
CombineFilters,
PushPredicateThroughNonJoin,
BooleanSimplification,
PushPredicateThroughJoin,
CollapseProject) ::
Batch("Push extra predicate through join", FixedPoint(10),
PushExtraPredicateThroughJoin,
PushDownPredicates) :: Nil
}
val attrA = 'a.int
val attrB = 'b.int
val attrC = 'c.int
val attrD = 'd.int
val testRelation = LocalRelation(attrA, attrB, attrC)
val testRelation1 = LocalRelation(attrD)
val simpleDisjunctivePredicate =
("x.a".attr > 3) && ("y.a".attr > 13) || ("x.a".attr > 1) && ("y.a".attr > 11)
val expectedPredicatePushDownResult = {
val left = testRelation.where(('a > 3 || 'a > 1)).subquery('x)
val right = testRelation.where('a > 13 || 'a > 11).subquery('y)
left.join(right, condition = Some("x.b".attr === "y.b".attr
&& (("x.a".attr > 3) && ("y.a".attr > 13) || ("x.a".attr > 1) && ("y.a".attr > 11)))).analyze
}
// This test already passes.
test("eliminate subqueries") {
val originalQuery =
testRelation
.subquery('y)
.select('a)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.select('a.attr)
.analyze
comparePlans(optimized, correctAnswer)
}
// After this line is unimplemented.
test("simple push down") {
val originalQuery =
testRelation
.select('a)
.where('a === 1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a === 1)
.select('a)
.analyze
comparePlans(optimized, correctAnswer)
}
test("combine redundant filters") {
val originalQuery =
testRelation
.where('a === 1 && 'b === 1)
.where('a === 1 && 'c === 1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a === 1 && 'b === 1 && 'c === 1)
.analyze
comparePlans(optimized, correctAnswer)
}
test("do not combine non-deterministic filters even if they are identical") {
val originalQuery =
testRelation
.where(Rand(0) > 0.1 && 'a === 1)
.where(Rand(0) > 0.1 && 'a === 1).analyze
val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
test("SPARK-16164: Filter pushdown should keep the ordering in the logical plan") {
val originalQuery =
testRelation
.where('a === 1)
.select('a, 'b)
.where('b === 1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a === 1 && 'b === 1)
.select('a, 'b)
.analyze
// We can not use comparePlans here because it normalized the plan.
assert(optimized == correctAnswer)
}
test("SPARK-16994: filter should not be pushed through limit") {
val originalQuery = testRelation.limit(10).where('a === 1).analyze
val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
test("can't push without rewrite") {
val originalQuery =
testRelation
.select('a + 'b as 'e)
.where('e === 1)
.analyze
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a + 'b === 1)
.select('a + 'b as 'e)
.analyze
comparePlans(optimized, correctAnswer)
}
test("nondeterministic: can always push down filter through project with deterministic field") {
val originalQuery = testRelation
.select('a)
.where(Rand(10) > 5 || 'a > 5)
.analyze
val optimized = Optimize.execute(originalQuery)
val correctAnswer = testRelation
.where(Rand(10) > 5 || 'a > 5)
.select('a)
.analyze
comparePlans(optimized, correctAnswer)
}
test("nondeterministic: can't push down filter through project with nondeterministic field") {
val originalQuery = testRelation
.select(Rand(10).as('rand), 'a)
.where('a > 5)
.analyze
val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
test("nondeterministic: can't push down filter through aggregate with nondeterministic field") {
val originalQuery = testRelation
.groupBy('a)('a, Rand(10).as('rand))
.where('a > 5)
.analyze
val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
test("nondeterministic: push down part of filter through aggregate with deterministic field") {
val originalQuery = testRelation
.groupBy('a)('a)
.where('a > 5 && Rand(10) > 5)
.analyze
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where('a > 5)
.groupBy('a)('a)
.where(Rand(10) > 5)
.analyze
comparePlans(optimized, correctAnswer)
}
test("filters: combines filters") {
val originalQuery = testRelation
.select('a)
.where('a === 1)
.where('a === 2)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer =
testRelation
.where('a === 1 && 'a === 2)
.select('a).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push to either side") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y)
.where("x.b".attr === 1)
.where("y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val right = testRelation.where('b === 2)
val correctAnswer =
left.join(right).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push to one side") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y)
.where("x.b".attr === 1)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val right = testRelation
val correctAnswer =
left.join(right).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: do not push down non-deterministic filters into join condition") {
val x = testRelation.subquery('x)
val y = testRelation1.subquery('y)
val originalQuery = x.join(y).where(Rand(10) > 5.0).analyze
val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
test("joins: push to one side after transformCondition") {
val x = testRelation.subquery('x)
val y = testRelation1.subquery('y)
val originalQuery = {
x.join(y)
.where(("x.a".attr === 1 && "y.d".attr === "x.b".attr) ||
("x.a".attr === 1 && "y.d".attr === "x.c".attr))
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 1)
val right = testRelation1
val correctAnswer =
left.join(right, condition = Some("d".attr === "b".attr || "d".attr === "c".attr)).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: rewrite filter to push to either side") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y)
.where("x.b".attr === 1 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val right = testRelation.where('b === 2)
val correctAnswer =
left.join(right).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down left semi join") {
val x = testRelation.subquery('x)
val y = testRelation1.subquery('y)
val originalQuery = {
x.join(y, LeftSemi, Option("x.a".attr === "y.d".attr && "x.b".attr >= 1 && "y.d".attr >= 2))
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b >= 1)
val right = testRelation1.where('d >= 2)
val correctAnswer =
left.join(right, LeftSemi, Option("a".attr === "d".attr)).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down left outer join #1") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, LeftOuter)
.where("x.b".attr === 1 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 1)
val correctAnswer =
left.join(y, LeftOuter).where("y.b".attr === 2).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down right outer join #1") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, RightOuter)
.where("x.b".attr === 1 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val right = testRelation.where('b === 2).subquery('d)
val correctAnswer =
x.join(right, RightOuter).where("x.b".attr === 1).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down left outer join #2") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, LeftOuter, Some("x.b".attr === 1))
.where("x.b".attr === 2 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('d)
val correctAnswer =
left.join(y, LeftOuter, Some("d.b".attr === 1)).where("y.b".attr === 2).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down right outer join #2") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, RightOuter, Some("y.b".attr === 1))
.where("x.b".attr === 2 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val right = testRelation.where('b === 2).subquery('d)
val correctAnswer =
x.join(right, RightOuter, Some("d.b".attr === 1)).where("x.b".attr === 2).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down left outer join #3") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, LeftOuter, Some("y.b".attr === 1))
.where("x.b".attr === 2 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('l)
val right = testRelation.where('b === 1).subquery('r)
val correctAnswer =
left.join(right, LeftOuter).where("r.b".attr === 2).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down right outer join #3") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, RightOuter, Some("y.b".attr === 1))
.where("x.b".attr === 2 && "y.b".attr === 2)
}
val optimized = Optimize.execute(originalQuery.analyze)
val right = testRelation.where('b === 2).subquery('r)
val correctAnswer =
x.join(right, RightOuter, Some("r.b".attr === 1)).where("x.b".attr === 2).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down left outer join #4") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, LeftOuter, Some("y.b".attr === 1))
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('l)
val right = testRelation.where('b === 1).subquery('r)
val correctAnswer =
left.join(right, LeftOuter).where("r.b".attr === 2 && "l.c".attr === "r.c".attr).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down right outer join #4") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, RightOuter, Some("y.b".attr === 1))
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.subquery('l)
val right = testRelation.where('b === 2).subquery('r)
val correctAnswer =
left.join(right, RightOuter, Some("r.b".attr === 1)).
where("l.b".attr === 2 && "l.c".attr === "r.c".attr).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down left outer join #5") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, LeftOuter, Some("y.b".attr === 1 && "x.a".attr === 3))
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('b === 2).subquery('l)
val right = testRelation.where('b === 1).subquery('r)
val correctAnswer =
left.join(right, LeftOuter, Some("l.a".attr===3)).
where("r.b".attr === 2 && "l.c".attr === "r.c".attr).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down right outer join #5") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, RightOuter, Some("y.b".attr === 1 && "x.a".attr === 3))
.where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === "y.c".attr)
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 3).subquery('l)
val right = testRelation.where('b === 2).subquery('r)
val correctAnswer =
left.join(right, RightOuter, Some("r.b".attr === 1)).
where("l.b".attr === 2 && "l.c".attr === "r.c".attr).analyze
comparePlans(optimized, correctAnswer)
}
test("joins: can't push down") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y, condition = Some("x.b".attr === "y.b".attr))
}
val optimized = Optimize.execute(originalQuery.analyze)
comparePlans(originalQuery.analyze, optimized)
}
test("joins: conjunctive predicates") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y)
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && ("y.a".attr === 1))
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 1).subquery('x)
val right = testRelation.where('a === 1).subquery('y)
val correctAnswer =
left.join(right, condition = Some("x.b".attr === "y.b".attr))
.analyze
comparePlans(optimized, correctAnswer)
}
test("joins: conjunctive predicates #2") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = {
x.join(y)
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1))
}
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a === 1).subquery('x)
val right = testRelation.subquery('y)
val correctAnswer =
left.join(right, condition = Some("x.b".attr === "y.b".attr))
.analyze
comparePlans(optimized, correctAnswer)
}
test("joins: conjunctive predicates #3") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val z = testRelation.subquery('z)
val originalQuery = {
z.join(x.join(y))
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) &&
("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
}
val optimized = Optimize.execute(originalQuery.analyze)
val lleft = testRelation.where('a >= 3).subquery('z)
val left = testRelation.where('a === 1).subquery('x)
val right = testRelation.subquery('y)
val correctAnswer =
lleft.join(
left.join(right, condition = Some("x.b".attr === "y.b".attr)),
condition = Some("z.a".attr === "x.b".attr))
.analyze
comparePlans(optimized, correctAnswer)
}
test("joins: push down where clause into left anti join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y, LeftAnti, Some("x.b".attr === "y.b".attr))
.where("x.a".attr > 10)
.analyze
val optimized = Optimize.execute(originalQuery)
val correctAnswer =
x.where("x.a".attr > 10)
.join(y, LeftAnti, Some("x.b".attr === "y.b".attr))
.analyze
comparePlans(optimized, correctAnswer)
}
test("joins: only push down join conditions to the right of a left anti join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y,
LeftAnti,
Some("x.b".attr === "y.b".attr && "y.a".attr > 10 && "x.a".attr > 10)).analyze
val optimized = Optimize.execute(originalQuery)
val correctAnswer =
x.join(
y.where("y.a".attr > 10),
LeftAnti,
Some("x.b".attr === "y.b".attr && "x.a".attr > 10))
.analyze
comparePlans(optimized, correctAnswer)
}
test("joins: only push down join conditions to the right of an existence join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val fillerVal = 'val.boolean
val originalQuery =
x.join(y,
ExistenceJoin(fillerVal),
Some("x.a".attr > 1 && "y.b".attr > 2)).analyze
val optimized = Optimize.execute(originalQuery)
val correctAnswer =
x.join(
y.where("y.b".attr > 2),
ExistenceJoin(fillerVal),
Some("x.a".attr > 1))
.analyze
comparePlans(optimized, correctAnswer)
}
val testRelationWithArrayType = LocalRelation('a.int, 'b.int, 'c_arr.array(IntegerType))
test("generate: predicate referenced no generated column") {
val originalQuery = {
testRelationWithArrayType
.generate(Explode('c_arr), alias = Some("arr"))
.where(('b >= 5) && ('a > 6))
}
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = {
testRelationWithArrayType
.where(('b >= 5) && ('a > 6))
.generate(Explode('c_arr), alias = Some("arr")).analyze
}
comparePlans(optimized, correctAnswer)
}
test("generate: non-deterministic predicate referenced no generated column") {
val originalQuery = {
testRelationWithArrayType
.generate(Explode('c_arr), alias = Some("arr"))
.where(('b >= 5) && ('a + Rand(10).as("rnd") > 6) && ('col > 6))
}
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = {
testRelationWithArrayType
.where('b >= 5)
.generate(Explode('c_arr), alias = Some("arr"))
.where('a + Rand(10).as("rnd") > 6 && 'col > 6)
.analyze
}
comparePlans(optimized, correctAnswer)
}
test("generate: part of conjuncts referenced generated column") {
val generator = Explode('c_arr)
val originalQuery = {
testRelationWithArrayType
.generate(generator, alias = Some("arr"), outputNames = Seq("c"))
.where(('b >= 5) && ('c > 6))
}
val optimized = Optimize.execute(originalQuery.analyze)
val referenceResult = {
testRelationWithArrayType
.where('b >= 5)
.generate(generator, alias = Some("arr"), outputNames = Seq("c"))
.where('c > 6).analyze
}
// Since newly generated columns get different ids every time being analyzed
// e.g. comparePlans(originalQuery.analyze, originalQuery.analyze) fails.
// So we check operators manually here.
// Filter("c" > 6)
assertResult(classOf[Filter])(optimized.getClass)
assertResult(1)(optimized.asInstanceOf[Filter].condition.references.size)
assertResult("c") {
optimized.asInstanceOf[Filter].condition.references.toSeq(0).name
}
// the rest part
comparePlans(optimized.children(0), referenceResult.children(0))
}
test("generate: all conjuncts referenced generated column") {
val originalQuery = {
testRelationWithArrayType
.generate(Explode('c_arr), alias = Some("arr"))
.where(('col > 6) || ('b > 5)).analyze
}
val optimized = Optimize.execute(originalQuery)
comparePlans(optimized, originalQuery)
}
test("aggregate: push down filter when filter on group by expression") {
val originalQuery = testRelation
.groupBy('a)('a, count('b) as 'c)
.select('a, 'c)
.where('a === 2)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where('a === 2)
.groupBy('a)('a, count('b) as 'c)
.analyze
comparePlans(optimized, correctAnswer)
}
test("aggregate: don't push down filter when filter not on group by expression") {
val originalQuery = testRelation
.select('a, 'b)
.groupBy('a)('a, count('b) as 'c)
.where('c === 2L)
val optimized = Optimize.execute(originalQuery.analyze)
comparePlans(optimized, originalQuery.analyze)
}
test("aggregate: push down filters partially which are subset of group by expressions") {
val originalQuery = testRelation
.select('a, 'b)
.groupBy('a)('a, count('b) as 'c)
.where('c === 2L && 'a === 3)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where('a === 3)
.select('a, 'b)
.groupBy('a)('a, count('b) as 'c)
.where('c === 2L)
.analyze
comparePlans(optimized, correctAnswer)
}
test("aggregate: push down filters with alias") {
val originalQuery = testRelation
.select('a, 'b)
.groupBy('a)(('a + 1) as 'aa, count('b) as 'c)
.where(('c === 2L || 'aa > 4) && 'aa < 3)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where('a + 1 < 3)
.select('a, 'b)
.groupBy('a)(('a + 1) as 'aa, count('b) as 'c)
.where('c === 2L || 'aa > 4)
.analyze
comparePlans(optimized, correctAnswer)
}
test("aggregate: push down filters with literal") {
val originalQuery = testRelation
.select('a, 'b)
.groupBy('a)('a, count('b) as 'c, "s" as 'd)
.where('c === 2L && 'd === "s")
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.where("s" === "s")
.select('a, 'b)
.groupBy('a)('a, count('b) as 'c, "s" as 'd)
.where('c === 2L)
.analyze
comparePlans(optimized, correctAnswer)
}
test("aggregate: don't push down filters that are nondeterministic") {
val originalQuery = testRelation
.select('a, 'b)
.groupBy('a)('a + Rand(10) as 'aa, count('b) as 'c, Rand(11).as("rnd"))
.where('c === 2L && 'aa + Rand(10).as("rnd") === 3 && 'rnd === 5)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.select('a, 'b)
.groupBy('a)('a + Rand(10) as 'aa, count('b) as 'c, Rand(11).as("rnd"))
.where('c === 2L && 'aa + Rand(10).as("rnd") === 3 && 'rnd === 5)
.analyze
comparePlans(optimized, correctAnswer)
}
test("SPARK-17712: aggregate: don't push down filters that are data-independent") {
val originalQuery = LocalRelation.apply(testRelation.output, Seq.empty)
.select('a, 'b)
.groupBy('a)(count('a))
.where(false)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = testRelation
.select('a, 'b)
.groupBy('a)(count('a))
.where(false)
.analyze
comparePlans(optimized, correctAnswer)
}
test("aggregate: don't push filters if the aggregate has no grouping expressions") {
val originalQuery = LocalRelation.apply(testRelation.output, Seq.empty)
.select('a, 'b)
.groupBy()(count(1))
.where(false)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = originalQuery.analyze
comparePlans(optimized, correctAnswer)
}
test("SPARK-32940: aggregate: push filters through first, last and collect") {
Seq(
first(_: Expression),
last(_: Expression),
collectList(_: Expression),
collectSet(_: Expression)
).foreach { agg =>
val originalQuery = testRelation
.groupBy('a)(agg('b))
.where('a > 42)
.analyze
val optimized = Optimize.execute(originalQuery)
val correctAnswer = testRelation
.where('a > 42)
.groupBy('a)(agg('b))
.analyze
comparePlans(optimized, correctAnswer)
}
}
test("union") {
val testRelation2 = LocalRelation('d.int, 'e.int, 'f.int)
val originalQuery = Union(Seq(testRelation, testRelation2))
.where('a === 2L && 'b + Rand(10).as("rnd") === 3 && 'c > 5L)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Union(Seq(
testRelation.where('a === 2L && 'c > 5L),
testRelation2.where('d === 2L && 'f > 5L)))
.where('b + Rand(10).as("rnd") === 3)
.analyze
comparePlans(optimized, correctAnswer)
}
test("expand") {
val agg = testRelation
.groupBy(Cube(Seq(Seq('a), Seq('b))))('a, 'b, sum('c))
.analyze
.asInstanceOf[Aggregate]
val a = agg.output(0)
val b = agg.output(1)
val query = agg.where(a > 1 && b > 2)
val optimized = Optimize.execute(query)
val correctedAnswer = agg.copy(child = agg.child.where(a > 1 && b > 2)).analyze
comparePlans(optimized, correctedAnswer)
}
test("predicate subquery: push down simple") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val z = LocalRelation('a.int, 'b.int, 'c.int).subquery('z)
val query = x
.join(y, Inner, Option("x.a".attr === "y.a".attr))
.where(Exists(z.where("x.a".attr === "z.a".attr)))
.analyze
val answer = x
.where(Exists(z.where("x.a".attr === "z.a".attr)))
.join(y, Inner, Option("x.a".attr === "y.a".attr))
.analyze
val optimized = Optimize.execute(Optimize.execute(query))
comparePlans(optimized, answer)
}
test("predicate subquery: push down complex") {
val w = testRelation.subquery('w)
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val z = LocalRelation('a.int, 'b.int, 'c.int).subquery('z)
val query = w
.join(x, Inner, Option("w.a".attr === "x.a".attr))
.join(y, LeftOuter, Option("x.a".attr === "y.a".attr))
.where(Exists(z.where("w.a".attr === "z.a".attr)))
.analyze
val answer = w
.where(Exists(z.where("w.a".attr === "z.a".attr)))
.join(x, Inner, Option("w.a".attr === "x.a".attr))
.join(y, LeftOuter, Option("x.a".attr === "y.a".attr))
.analyze
val optimized = Optimize.execute(Optimize.execute(query))
comparePlans(optimized, answer)
}
test("SPARK-20094: don't push predicate with IN subquery into join condition") {
val x = testRelation.subquery('x)
val z = testRelation.subquery('z)
val w = testRelation1.subquery('w)
val queryPlan = x
.join(z)
.where(("x.b".attr === "z.b".attr) &&
("x.a".attr > 1 || "z.c".attr.in(ListQuery(w.select("w.d".attr)))))
.analyze
val expectedPlan = x
.join(z, Inner, Some("x.b".attr === "z.b".attr))
.where("x.a".attr > 1 || "z.c".attr.in(ListQuery(w.select("w.d".attr))))
.analyze
val optimized = Optimize.execute(queryPlan)
comparePlans(optimized, expectedPlan)
}
test("Window: predicate push down -- basic") {
val winExpr = windowExpr(count('b), windowSpec('a :: Nil, 'b.asc :: Nil, UnspecifiedFrame))
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a > 1)
val correctAnswer = testRelation
.where('a > 1).select('a, 'b, 'c)
.window(winExpr.as('window) :: Nil, 'a :: Nil, 'b.asc :: Nil)
.select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("Window: predicate push down -- predicates with compound predicate using only one column") {
val winExpr =
windowExpr(count('b), windowSpec('a.attr :: 'b.attr :: Nil, 'b.asc :: Nil, UnspecifiedFrame))
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a * 3 > 15)
val correctAnswer = testRelation
.where('a * 3 > 15).select('a, 'b, 'c)
.window(winExpr.as('window) :: Nil, 'a.attr :: 'b.attr :: Nil, 'b.asc :: Nil)
.select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("Window: predicate push down -- multi window expressions with the same window spec") {
val winSpec = windowSpec('a.attr :: 'b.attr :: Nil, 'b.asc :: Nil, UnspecifiedFrame)
val winExpr1 = windowExpr(count('b), winSpec)
val winExpr2 = windowExpr(sum('b), winSpec)
val originalQuery = testRelation
.select('a, 'b, 'c, winExpr1.as('window1), winExpr2.as('window2)).where('a > 1)
val correctAnswer = testRelation
.where('a > 1).select('a, 'b, 'c)
.window(winExpr1.as('window1) :: winExpr2.as('window2) :: Nil,
'a.attr :: 'b.attr :: Nil, 'b.asc :: Nil)
.select('a, 'b, 'c, 'window1, 'window2).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("Window: predicate push down -- multi window specification - 1") {
// order by clauses are different between winSpec1 and winSpec2
val winSpec1 = windowSpec('a.attr :: 'b.attr :: Nil, 'b.asc :: Nil, UnspecifiedFrame)
val winExpr1 = windowExpr(count('b), winSpec1)
val winSpec2 = windowSpec('a.attr :: 'b.attr :: Nil, 'a.asc :: Nil, UnspecifiedFrame)
val winExpr2 = windowExpr(count('b), winSpec2)
val originalQuery = testRelation
.select('a, 'b, 'c, winExpr1.as('window1), winExpr2.as('window2)).where('a > 1)
val correctAnswer1 = testRelation
.where('a > 1).select('a, 'b, 'c)
.window(winExpr1.as('window1) :: Nil, 'a.attr :: 'b.attr :: Nil, 'b.asc :: Nil)
.window(winExpr2.as('window2) :: Nil, 'a.attr :: 'b.attr :: Nil, 'a.asc :: Nil)
.select('a, 'b, 'c, 'window1, 'window2).analyze
val correctAnswer2 = testRelation
.where('a > 1).select('a, 'b, 'c)
.window(winExpr2.as('window2) :: Nil, 'a.attr :: 'b.attr :: Nil, 'a.asc :: Nil)
.window(winExpr1.as('window1) :: Nil, 'a.attr :: 'b.attr :: Nil, 'b.asc :: Nil)
.select('a, 'b, 'c, 'window1, 'window2).analyze
// When Analyzer adding Window operators after grouping the extracted Window Expressions
// based on their Partition and Order Specs, the order of Window operators is
// non-deterministic. Thus, we have two correct plans
val optimizedQuery = Optimize.execute(originalQuery.analyze)
try {
comparePlans(optimizedQuery, correctAnswer1)
} catch {
case ae: Throwable => comparePlans(optimizedQuery, correctAnswer2)
}
}
test("Window: predicate push down -- multi window specification - 2") {
// partitioning clauses are different between winSpec1 and winSpec2
val winSpec1 = windowSpec('a.attr :: Nil, 'b.asc :: Nil, UnspecifiedFrame)
val winExpr1 = windowExpr(count('b), winSpec1)
val winSpec2 = windowSpec('b.attr :: Nil, 'b.asc :: Nil, UnspecifiedFrame)
val winExpr2 = windowExpr(count('a), winSpec2)
val originalQuery = testRelation
.select('a, winExpr1.as('window1), 'b, 'c, winExpr2.as('window2)).where('b > 1)
val correctAnswer1 = testRelation.select('a, 'b, 'c)
.window(winExpr1.as('window1) :: Nil, 'a.attr :: Nil, 'b.asc :: Nil)
.where('b > 1)
.window(winExpr2.as('window2) :: Nil, 'b.attr :: Nil, 'b.asc :: Nil)
.select('a, 'window1, 'b, 'c, 'window2).analyze
val correctAnswer2 = testRelation.select('a, 'b, 'c)
.window(winExpr2.as('window2) :: Nil, 'b.attr :: Nil, 'b.asc :: Nil)
.window(winExpr1.as('window1) :: Nil, 'a.attr :: Nil, 'b.asc :: Nil)
.where('b > 1)
.select('a, 'window1, 'b, 'c, 'window2).analyze
val optimizedQuery = Optimize.execute(originalQuery.analyze)
// When Analyzer adding Window operators after grouping the extracted Window Expressions
// based on their Partition and Order Specs, the order of Window operators is
// non-deterministic. Thus, we have two correct plans
try {
comparePlans(optimizedQuery, correctAnswer1)
} catch {
case ae: Throwable => comparePlans(optimizedQuery, correctAnswer2)
}
}
test("Window: predicate push down -- predicates with multiple partitioning columns") {
val winExpr =
windowExpr(count('b), windowSpec('a.attr :: 'b.attr :: Nil, 'b.asc :: Nil, UnspecifiedFrame))
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a + 'b > 1)
val correctAnswer = testRelation
.where('a + 'b > 1).select('a, 'b, 'c)
.window(winExpr.as('window) :: Nil, 'a.attr :: 'b.attr :: Nil, 'b.asc :: Nil)
.select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
// complex predicates with the same references but the same expressions
// Todo: in Analyzer, to enable it, we need to convert the expression in conditions
// to the alias that is defined as the same expression
ignore("Window: predicate push down -- complex predicate with the same expressions") {
val winSpec = windowSpec(
partitionSpec = 'a.attr + 'b.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExpr = windowExpr(count('b), winSpec)
val winSpecAnalyzed = windowSpec(
partitionSpec = '_w0.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExprAnalyzed = windowExpr(count('b), winSpecAnalyzed)
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a + 'b > 1)
val correctAnswer = testRelation
.where('a + 'b > 1).select('a, 'b, 'c, ('a + 'b).as("_w0"))
.window(winExprAnalyzed.as('window) :: Nil, '_w0 :: Nil, 'b.asc :: Nil)
.select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("Window: no predicate push down -- predicates are not from partitioning keys") {
val winSpec = windowSpec(
partitionSpec = 'a.attr :: 'b.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExpr = windowExpr(count('b), winSpec)
// No push down: the predicate is c > 1, but the partitioning key is (a, b).
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('c > 1)
val correctAnswer = testRelation.select('a, 'b, 'c)
.window(winExpr.as('window) :: Nil, 'a.attr :: 'b.attr :: Nil, 'b.asc :: Nil)
.where('c > 1).select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("Window: no predicate push down -- partial compound partition key") {
val winSpec = windowSpec(
partitionSpec = 'a.attr + 'b.attr :: 'b.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExpr = windowExpr(count('b), winSpec)
// No push down: the predicate is a > 1, but the partitioning key is (a + b, b)
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a > 1)
val winSpecAnalyzed = windowSpec(
partitionSpec = '_w0.attr :: 'b.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExprAnalyzed = windowExpr(count('b), winSpecAnalyzed)
val correctAnswer = testRelation.select('a, 'b, 'c, ('a + 'b).as("_w0"))
.window(winExprAnalyzed.as('window) :: Nil, '_w0 :: 'b.attr :: Nil, 'b.asc :: Nil)
.where('a > 1).select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("Window: no predicate push down -- complex predicates containing non partitioning columns") {
val winSpec =
windowSpec(partitionSpec = 'b.attr :: Nil, orderSpec = 'b.asc :: Nil, UnspecifiedFrame)
val winExpr = windowExpr(count('b), winSpec)
// No push down: the predicate is a + b > 1, but the partitioning key is b.
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a + 'b > 1)
val correctAnswer = testRelation
.select('a, 'b, 'c)
.window(winExpr.as('window) :: Nil, 'b.attr :: Nil, 'b.asc :: Nil)
.where('a + 'b > 1).select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
// complex predicates with the same references but different expressions
test("Window: no predicate push down -- complex predicate with different expressions") {
val winSpec = windowSpec(
partitionSpec = 'a.attr + 'b.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExpr = windowExpr(count('b), winSpec)
val winSpecAnalyzed = windowSpec(
partitionSpec = '_w0.attr :: Nil,
orderSpec = 'b.asc :: Nil,
UnspecifiedFrame)
val winExprAnalyzed = windowExpr(count('b), winSpecAnalyzed)
// No push down: the predicate is a + b > 1, but the partitioning key is a + b.
val originalQuery = testRelation.select('a, 'b, 'c, winExpr.as('window)).where('a - 'b > 1)
val correctAnswer = testRelation.select('a, 'b, 'c, ('a + 'b).as("_w0"))
.window(winExprAnalyzed.as('window) :: Nil, '_w0 :: Nil, 'b.asc :: Nil)
.where('a - 'b > 1).select('a, 'b, 'c, 'window).analyze
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer)
}
test("watermark pushdown: no pushdown on watermark attribute #1") {
val interval = new CalendarInterval(2, 2, 2000L)
val relation = LocalRelation(attrA, 'b.timestamp, attrC)
// Verify that all conditions except the watermark touching condition are pushed down
// by the optimizer and others are not.
val originalQuery = EventTimeWatermark('b, interval, relation)
.where('a === 5 && 'b === new java.sql.Timestamp(0) && 'c === 5)
val correctAnswer = EventTimeWatermark(
'b, interval, relation.where('a === 5 && 'c === 5))
.where('b === new java.sql.Timestamp(0))
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer.analyze)
}
test("watermark pushdown: no pushdown for nondeterministic filter") {
val interval = new CalendarInterval(2, 2, 2000L)
val relation = LocalRelation(attrA, attrB, 'c.timestamp)
// Verify that all conditions except the watermark touching condition are pushed down
// by the optimizer and others are not.
val originalQuery = EventTimeWatermark('c, interval, relation)
.where('a === 5 && 'b === Rand(10) && 'c === new java.sql.Timestamp(0))
val correctAnswer = EventTimeWatermark(
'c, interval, relation.where('a === 5))
.where('b === Rand(10) && 'c === new java.sql.Timestamp(0))
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer.analyze,
checkAnalysis = false)
}
test("watermark pushdown: full pushdown") {
val interval = new CalendarInterval(2, 2, 2000L)
val relation = LocalRelation(attrA, attrB, 'c.timestamp)
// Verify that all conditions except the watermark touching condition are pushed down
// by the optimizer and others are not.
val originalQuery = EventTimeWatermark('c, interval, relation)
.where('a === 5 && 'b === 10)
val correctAnswer = EventTimeWatermark(
'c, interval, relation.where('a === 5 && 'b === 10))
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer.analyze,
checkAnalysis = false)
}
test("watermark pushdown: no pushdown on watermark attribute #2") {
val interval = new CalendarInterval(2, 2, 2000L)
val relation = LocalRelation('a.timestamp, attrB, attrC)
val originalQuery = EventTimeWatermark('a, interval, relation)
.where('a === new java.sql.Timestamp(0) && 'b === 10)
val correctAnswer = EventTimeWatermark(
'a, interval, relation.where('b === 10)).where('a === new java.sql.Timestamp(0))
comparePlans(Optimize.execute(originalQuery.analyze), correctAnswer.analyze,
checkAnalysis = false)
}
test("push down predicate through expand") {
val query =
Filter('a > 1,
Expand(
Seq(
Seq('a, 'b, 'c, Literal.create(null, StringType), 1),
Seq('a, 'b, 'c, 'a, 2)),
Seq('a, 'b, 'c),
testRelation)).analyze
val optimized = Optimize.execute(query)
val expected =
Expand(
Seq(
Seq('a, 'b, 'c, Literal.create(null, StringType), 1),
Seq('a, 'b, 'c, 'a, 2)),
Seq('a, 'b, 'c),
Filter('a > 1, testRelation)).analyze
comparePlans(optimized, expected)
}
test("SPARK-28345: PythonUDF predicate should be able to pushdown to join") {
val pythonUDFJoinCond = {
val pythonUDF = PythonUDF("pythonUDF", null,
IntegerType,
Seq(attrA),
PythonEvalType.SQL_BATCHED_UDF,
udfDeterministic = true)
pythonUDF === attrD
}
val query = testRelation.join(
testRelation1,
joinType = Cross).where(pythonUDFJoinCond)
val expected = testRelation.join(
testRelation1,
joinType = Cross,
condition = Some(pythonUDFJoinCond)).analyze
comparePlans(Optimize.execute(query.analyze), expected)
}
test("push down filter predicates through inner join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery = x.join(y).where(("x.b".attr === "y.b".attr) && (simpleDisjunctivePredicate))
val optimized = Optimize.execute(originalQuery.analyze)
comparePlans(optimized, expectedPredicatePushDownResult)
}
test("push down join predicates through inner join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y, condition = Some(("x.b".attr === "y.b".attr) && (simpleDisjunctivePredicate)))
val optimized = Optimize.execute(originalQuery.analyze)
comparePlans(optimized, expectedPredicatePushDownResult)
}
test("push down complex predicates through inner join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val joinCondition = (("x.b".attr === "y.b".attr)
&& ((("x.a".attr === 5) && ("y.a".attr >= 2) && ("y.a".attr <= 3))
|| (("x.a".attr === 2) && ("y.a".attr >= 1) && ("y.a".attr <= 14))
|| (("x.a".attr === 1) && ("y.a".attr >= 9) && ("y.a".attr <= 27))))
val originalQuery = x.join(y, condition = Some(joinCondition))
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where(
('a === 5 || 'a === 2 || 'a === 1)).subquery('x)
val right = testRelation.where(
('a >= 2 && 'a <= 3) || ('a >= 1 && 'a <= 14) || ('a >= 9 && 'a <= 27)).subquery('y)
val correctAnswer = left.join(right, condition = Some(joinCondition)).analyze
comparePlans(optimized, correctAnswer)
}
test("push down predicates(with NOT predicate) through inner join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y, condition = Some(("x.b".attr === "y.b".attr)
&& Not(("x.a".attr > 3)
&& ("x.a".attr < 2 || ("y.a".attr > 13)) || ("x.a".attr > 1) && ("y.a".attr > 11))))
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a <= 3 || 'a >= 2).subquery('x)
val right = testRelation.subquery('y)
val correctAnswer =
left.join(right, condition = Some("x.b".attr === "y.b".attr
&& (("x.a".attr <= 3) || (("x.a".attr >= 2) && ("y.a".attr <= 13)))
&& (("x.a".attr <= 1) || ("y.a".attr <= 11))))
.analyze
comparePlans(optimized, correctAnswer)
}
test("push down predicates through left join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y, joinType = LeftOuter, condition = Some(("x.b".attr === "y.b".attr)
&& simpleDisjunctivePredicate))
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.subquery('x)
val right = testRelation.where('a > 13 || 'a > 11).subquery('y)
val correctAnswer =
left.join(right, joinType = LeftOuter, condition = Some("x.b".attr === "y.b".attr
&& (("x.a".attr > 3) && ("y.a".attr > 13) || ("x.a".attr > 1) && ("y.a".attr > 11))))
.analyze
comparePlans(optimized, correctAnswer)
}
test("push down predicates through right join") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y, joinType = RightOuter, condition = Some(("x.b".attr === "y.b".attr)
&& simpleDisjunctivePredicate))
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.where('a > 3 || 'a > 1).subquery('x)
val right = testRelation.subquery('y)
val correctAnswer =
left.join(right, joinType = RightOuter, condition = Some("x.b".attr === "y.b".attr
&& (("x.a".attr > 3) && ("y.a".attr > 13) || ("x.a".attr > 1) && ("y.a".attr > 11))))
.analyze
comparePlans(optimized, correctAnswer)
}
test("SPARK-32302: avoid generating too many predicates") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val originalQuery =
x.join(y, condition = Some(("x.b".attr === "y.b".attr) && ((("x.a".attr > 3) &&
("x.a".attr < 13) && ("y.c".attr <= 5)) || (("y.a".attr > 2) && ("y.c".attr < 1)))))
val optimized = Optimize.execute(originalQuery.analyze)
val left = testRelation.subquery('x)
val right = testRelation.where('c <= 5 || ('a > 2 && 'c < 1)).subquery('y)
val correctAnswer = left.join(right, condition = Some("x.b".attr === "y.b".attr &&
((("x.a".attr > 3) && ("x.a".attr < 13) && ("y.c".attr <= 5)) ||
(("y.a".attr > 2) && ("y.c".attr < 1))))).analyze
comparePlans(optimized, correctAnswer)
}
test("push down predicate through multiple joins") {
val x = testRelation.subquery('x)
val y = testRelation.subquery('y)
val z = testRelation.subquery('z)
val xJoinY = x.join(y, condition = Some("x.b".attr === "y.b".attr))
val originalQuery = z.join(xJoinY,
condition = Some("x.a".attr === "z.a".attr && simpleDisjunctivePredicate))
val optimized = Optimize.execute(originalQuery.analyze)
val left = x.where('a > 3 || 'a > 1)
val right = y.where('a > 13 || 'a > 11)
val correctAnswer = z.join(left.join(right,
condition = Some("x.b".attr === "y.b".attr && simpleDisjunctivePredicate)),
condition = Some("x.a".attr === "z.a".attr)).analyze
comparePlans(optimized, correctAnswer)
}
test("SPARK-37828: Push down filters through RebalancePartitions") {
val originalQuery = RebalancePartitions(Seq.empty, testRelation).where('a > 3)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = RebalancePartitions(Seq.empty, testRelation.where('a > 3)).analyze
comparePlans(optimized, correctAnswer)
}
}
|
ueshin/apache-spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
|
Scala
|
apache-2.0
| 49,425
|
package org.jetbrains.plugins.scala
package base
import com.intellij.testFramework.fixtures.{CodeInsightFixtureTestCase, CodeInsightTestFixture}
import org.jetbrains.plugins.scala.base.libraryLoaders.{JdkLoader, LibraryLoader, ScalaLibraryLoader}
import org.jetbrains.plugins.scala.debugger.DefaultScalaSdkOwner
/**
* User: Alexander Podkhalyuzin
* Date: 03.08.2009
*/
abstract class ScalaFixtureTestCase
extends CodeInsightFixtureTestCase with DefaultScalaSdkOwner {
protected val includeReflectLibrary: Boolean = false
override def getFixture: CodeInsightTestFixture = myFixture
override def librariesLoaders: Seq[LibraryLoader] = Seq(
ScalaLibraryLoader(includeReflectLibrary),
JdkLoader()
)
override protected def setUp(): Unit = {
super.setUp()
setUpLibraries()
}
override def tearDown(): Unit = {
tearDownLibraries()
super.tearDown()
}
}
|
loskutov/intellij-scala
|
test/org/jetbrains/plugins/scala/base/ScalaFixtureTestCase.scala
|
Scala
|
apache-2.0
| 899
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources.v2
import java.util.{ArrayList, List => JList}
import test.org.apache.spark.sql.sources.v2._
import org.apache.spark.SparkException
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.sources.{Filter, GreaterThan}
import org.apache.spark.sql.sources.v2.reader._
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types.StructType
class DataSourceV2Suite extends QueryTest with SharedSQLContext {
import testImplicits._
test("simplest implementation") {
Seq(classOf[SimpleDataSourceV2], classOf[JavaSimpleDataSourceV2]).foreach { cls =>
withClue(cls.getName) {
val df = spark.read.format(cls.getName).load()
checkAnswer(df, (0 until 10).map(i => Row(i, -i)))
checkAnswer(df.select('j), (0 until 10).map(i => Row(-i)))
checkAnswer(df.filter('i > 5), (6 until 10).map(i => Row(i, -i)))
}
}
}
test("advanced implementation") {
Seq(classOf[AdvancedDataSourceV2], classOf[JavaAdvancedDataSourceV2]).foreach { cls =>
withClue(cls.getName) {
val df = spark.read.format(cls.getName).load()
checkAnswer(df, (0 until 10).map(i => Row(i, -i)))
checkAnswer(df.select('j), (0 until 10).map(i => Row(-i)))
checkAnswer(df.filter('i > 3), (4 until 10).map(i => Row(i, -i)))
checkAnswer(df.select('j).filter('i > 6), (7 until 10).map(i => Row(-i)))
checkAnswer(df.select('i).filter('i > 10), Nil)
}
}
}
test("unsafe row implementation") {
Seq(classOf[UnsafeRowDataSourceV2], classOf[JavaUnsafeRowDataSourceV2]).foreach { cls =>
withClue(cls.getName) {
val df = spark.read.format(cls.getName).load()
checkAnswer(df, (0 until 10).map(i => Row(i, -i)))
checkAnswer(df.select('j), (0 until 10).map(i => Row(-i)))
checkAnswer(df.filter('i > 5), (6 until 10).map(i => Row(i, -i)))
}
}
}
test("schema required data source") {
Seq(classOf[SchemaRequiredDataSource], classOf[JavaSchemaRequiredDataSource]).foreach { cls =>
withClue(cls.getName) {
val e = intercept[AnalysisException](spark.read.format(cls.getName).load())
assert(e.message.contains("A schema needs to be specified"))
val schema = new StructType().add("i", "int").add("s", "string")
val df = spark.read.format(cls.getName).schema(schema).load()
assert(df.schema == schema)
assert(df.collect().isEmpty)
}
}
}
test("simple writable data source") {
// TODO: java implementation.
Seq(classOf[SimpleWritableDataSource]).foreach { cls =>
withTempPath { file =>
val path = file.getCanonicalPath
assert(spark.read.format(cls.getName).option("path", path).load().collect().isEmpty)
spark.range(10).select('id, -'id).write.format(cls.getName)
.option("path", path).save()
checkAnswer(
spark.read.format(cls.getName).option("path", path).load(),
spark.range(10).select('id, -'id))
// test with different save modes
spark.range(10).select('id, -'id).write.format(cls.getName)
.option("path", path).mode("append").save()
checkAnswer(
spark.read.format(cls.getName).option("path", path).load(),
spark.range(10).union(spark.range(10)).select('id, -'id))
spark.range(5).select('id, -'id).write.format(cls.getName)
.option("path", path).mode("overwrite").save()
checkAnswer(
spark.read.format(cls.getName).option("path", path).load(),
spark.range(5).select('id, -'id))
spark.range(5).select('id, -'id).write.format(cls.getName)
.option("path", path).mode("ignore").save()
checkAnswer(
spark.read.format(cls.getName).option("path", path).load(),
spark.range(5).select('id, -'id))
val e = intercept[Exception] {
spark.range(5).select('id, -'id).write.format(cls.getName)
.option("path", path).mode("error").save()
}
assert(e.getMessage.contains("data already exists"))
// test transaction
val failingUdf = org.apache.spark.sql.functions.udf {
var count = 0
(id: Long) => {
if (count > 5) {
throw new RuntimeException("testing error")
}
count += 1
id
}
}
// this input data will fail to read middle way.
val input = spark.range(10).select(failingUdf('id).as('i)).select('i, -'i)
val e2 = intercept[SparkException] {
input.write.format(cls.getName).option("path", path).mode("overwrite").save()
}
assert(e2.getMessage.contains("Writing job aborted"))
// make sure we don't have partial data.
assert(spark.read.format(cls.getName).option("path", path).load().collect().isEmpty)
// test internal row writer
spark.range(5).select('id, -'id).write.format(cls.getName)
.option("path", path).option("internal", "true").mode("overwrite").save()
checkAnswer(
spark.read.format(cls.getName).option("path", path).load(),
spark.range(5).select('id, -'id))
}
}
}
}
class SimpleDataSourceV2 extends DataSourceV2 with ReadSupport {
class Reader extends DataSourceV2Reader {
override def readSchema(): StructType = new StructType().add("i", "int").add("j", "int")
override def createReadTasks(): JList[ReadTask[Row]] = {
java.util.Arrays.asList(new SimpleReadTask(0, 5), new SimpleReadTask(5, 10))
}
}
override def createReader(options: DataSourceV2Options): DataSourceV2Reader = new Reader
}
class SimpleReadTask(start: Int, end: Int) extends ReadTask[Row] with DataReader[Row] {
private var current = start - 1
override def createReader(): DataReader[Row] = new SimpleReadTask(start, end)
override def next(): Boolean = {
current += 1
current < end
}
override def get(): Row = Row(current, -current)
override def close(): Unit = {}
}
class AdvancedDataSourceV2 extends DataSourceV2 with ReadSupport {
class Reader extends DataSourceV2Reader
with SupportsPushDownRequiredColumns with SupportsPushDownFilters {
var requiredSchema = new StructType().add("i", "int").add("j", "int")
var filters = Array.empty[Filter]
override def pruneColumns(requiredSchema: StructType): Unit = {
this.requiredSchema = requiredSchema
}
override def pushFilters(filters: Array[Filter]): Array[Filter] = {
this.filters = filters
Array.empty
}
override def pushedFilters(): Array[Filter] = filters
override def readSchema(): StructType = {
requiredSchema
}
override def createReadTasks(): JList[ReadTask[Row]] = {
val lowerBound = filters.collect {
case GreaterThan("i", v: Int) => v
}.headOption
val res = new ArrayList[ReadTask[Row]]
if (lowerBound.isEmpty) {
res.add(new AdvancedReadTask(0, 5, requiredSchema))
res.add(new AdvancedReadTask(5, 10, requiredSchema))
} else if (lowerBound.get < 4) {
res.add(new AdvancedReadTask(lowerBound.get + 1, 5, requiredSchema))
res.add(new AdvancedReadTask(5, 10, requiredSchema))
} else if (lowerBound.get < 9) {
res.add(new AdvancedReadTask(lowerBound.get + 1, 10, requiredSchema))
}
res
}
}
override def createReader(options: DataSourceV2Options): DataSourceV2Reader = new Reader
}
class AdvancedReadTask(start: Int, end: Int, requiredSchema: StructType)
extends ReadTask[Row] with DataReader[Row] {
private var current = start - 1
override def createReader(): DataReader[Row] = new AdvancedReadTask(start, end, requiredSchema)
override def close(): Unit = {}
override def next(): Boolean = {
current += 1
current < end
}
override def get(): Row = {
val values = requiredSchema.map(_.name).map {
case "i" => current
case "j" => -current
}
Row.fromSeq(values)
}
}
class UnsafeRowDataSourceV2 extends DataSourceV2 with ReadSupport {
class Reader extends DataSourceV2Reader with SupportsScanUnsafeRow {
override def readSchema(): StructType = new StructType().add("i", "int").add("j", "int")
override def createUnsafeRowReadTasks(): JList[ReadTask[UnsafeRow]] = {
java.util.Arrays.asList(new UnsafeRowReadTask(0, 5), new UnsafeRowReadTask(5, 10))
}
}
override def createReader(options: DataSourceV2Options): DataSourceV2Reader = new Reader
}
class UnsafeRowReadTask(start: Int, end: Int)
extends ReadTask[UnsafeRow] with DataReader[UnsafeRow] {
private val row = new UnsafeRow(2)
row.pointTo(new Array[Byte](8 * 3), 8 * 3)
private var current = start - 1
override def createReader(): DataReader[UnsafeRow] = new UnsafeRowReadTask(start, end)
override def next(): Boolean = {
current += 1
current < end
}
override def get(): UnsafeRow = {
row.setInt(0, current)
row.setInt(1, -current)
row
}
override def close(): Unit = {}
}
class SchemaRequiredDataSource extends DataSourceV2 with ReadSupportWithSchema {
class Reader(val readSchema: StructType) extends DataSourceV2Reader {
override def createReadTasks(): JList[ReadTask[Row]] =
java.util.Collections.emptyList()
}
override def createReader(schema: StructType, options: DataSourceV2Options): DataSourceV2Reader =
new Reader(schema)
}
|
akopich/spark
|
sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2Suite.scala
|
Scala
|
apache-2.0
| 10,361
|
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.e2e.v1
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.BasicHttpCredentials
import com.typesafe.config.ConfigFactory
import org.knora.webapi._
import org.knora.webapi.messages.store.triplestoremessages._
import org.knora.webapi.testing.tags.E2ETest
object PermissionsHandlingV1E2ESpec {
val config = ConfigFactory.parseString(
"""
akka.loglevel = "DEBUG"
akka.stdout-loglevel = "DEBUG"
""".stripMargin)
}
/**
* End-to-end test specification for testing the handling of permissions.
*/
@E2ETest
class PermissionsHandlingV1E2ESpec extends E2ESpec(PermissionsHandlingV1E2ESpec.config) with TriplestoreJsonProtocol {
private val rootUser = SharedTestDataV1.rootUser
private val rootUserEmail = rootUser.userData.email.get
private val imagesUser = SharedTestDataV1.imagesUser01
private val imagesUserEmail = imagesUser.userData.email.get
private val incunabulaUser = SharedTestDataV1.incunabulaProjectAdminUser
private val incunabulaUserEmail = incunabulaUser.userData.email.get
private val password = "test"
override lazy val rdfDataObjects: List[RdfDataObject] = List(
RdfDataObject(path = "_test_data/all_data/incunabula-data.ttl", name = "http://www.knora.org/data/0803/incunabula"),
RdfDataObject(path = "_test_data/demo_data/images-demo-data.ttl", name = "http://www.knora.org/data/00FF/images"),
RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/0001/anything")
)
"The Permissions Handling" should {
"allow a project member to create a resource" in {
val params =
"""
|{
| "restype_id": "http://www.knora.org/ontology/00FF/images#person",
| "label": "Testperson",
| "project_id": "http://rdfh.ch/projects/00FF",
| "properties": {
| "http://www.knora.org/ontology/00FF/images#lastname": [{"richtext_value":{"utf8str":"Testname"}}],
| "http://www.knora.org/ontology/00FF/images#firstname": [{"richtext_value":{"utf8str":"Name"}}]
| }
|}
""".stripMargin
val request = Post(baseApiUrl + s"/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(imagesUserEmail, password))
val response: HttpResponse = singleAwaitingRequest(request)
assert(response.status === StatusCodes.OK)
}
"allow a system admin user not in the project to create a resource" in {
val params =
"""
|{
| "restype_id": "http://www.knora.org/ontology/00FF/images#person",
| "label": "Testperson",
| "project_id": "http://rdfh.ch/projects/00FF",
| "properties": {
| "http://www.knora.org/ontology/00FF/images#lastname": [{"richtext_value":{"utf8str":"Testname"}}],
| "http://www.knora.org/ontology/00FF/images#firstname": [{"richtext_value":{"utf8str":"Name"}}]
| }
|}
""".stripMargin
val request = Post(baseApiUrl + s"/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(rootUserEmail, password))
val response: HttpResponse = singleAwaitingRequest(request)
}
"not allow a user from another project to create a resource" in {
val params =
"""
|{
| "restype_id": "http://www.knora.org/ontology/00FF/images#person",
| "label": "Testperson",
| "project_id": "http://rdfh.ch/projects/00FF",
| "properties": {
| "http://www.knora.org/ontology/00FF/images#lastname": [{"richtext_value":{"utf8str":"Testname"}}],
| "http://www.knora.org/ontology/00FF/images#firstname": [{"richtext_value":{"utf8str":"Name"}}]
| }
|}
""".stripMargin
val request = Post(baseApiUrl + s"/v1/resources", HttpEntity(ContentTypes.`application/json`, params)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password))
val response: HttpResponse = singleAwaitingRequest(request)
}
}
}
|
musicEnfanthen/Knora
|
webapi/src/test/scala/org/knora/webapi/e2e/v1/PermissionsHandlingV1E2ESpec.scala
|
Scala
|
agpl-3.0
| 5,361
|
/*
* Copyright 2018 Vladimir Konstantinov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.illfaku.korro.dto.ws
import com.github.illfaku.korro.config.HttpInstruction
import com.github.illfaku.korro.dto.{HttpParams, HttpRequest}
import akka.actor.ActorRef
import java.net.URL
/**
* Request for a WebSocket handshake.
*
* @param actor Actor that will process handshake response and WebSocket frames.
* @param uri URI of handshake request.
* @param headers HTTP headers of a handshake request.
*/
case class WsHandshakeRequest(
actor: ActorRef,
uri: HttpRequest.Uri = HttpRequest.Uri(""),
headers: HttpParams = HttpParams.empty
) {
/**
* Creates [[com.github.illfaku.korro.dto.HttpRequest.Outgoing HttpRequest.Outgoing]] command for HTTP client.
* Concatenates path from it with uri from this request.
*/
def to(url: URL, instructions: List[HttpInstruction] = Nil): WsHandshakeRequest.Outgoing = {
val req = copy(uri = uri.withPrefix(url.getPath))
new WsHandshakeRequest.Outgoing(req, url, instructions)
}
}
object WsHandshakeRequest {
/**
* Command for HTTP client created by `WsHandshakeRequest#to` methods.
*/
class Outgoing private[korro] (val req: WsHandshakeRequest, val url: URL, val instructions: List[HttpInstruction])
private[korro] object Outgoing {
def unapply(out: Outgoing): Option[(WsHandshakeRequest, URL, List[HttpInstruction])] = {
Some(out.req, out.url, out.instructions)
}
}
}
|
oxy-development/korro
|
src/main/scala/com/github/illfaku/korro/dto/ws/WsHandshakeRequest.scala
|
Scala
|
apache-2.0
| 1,999
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.tailhq.dynaml.tensorflow.utils
import io.github.tailhq.dynaml.pipes.{DataPipe, MetaPipe12}
import io.github.tailhq.dynaml.tensorflow.data.AbstractDataSet
import io.github.tailhq.dynaml.tensorflow.Learn
import io.github.tailhq.dynaml.tensorflow.layers._
import org.platanios.tensorflow.api.learn.estimators.Estimator
import org.platanios.tensorflow.api.core.types.{IsFloatOrDouble, IsReal, IsNotQuantized, TF}
import org.platanios.tensorflow.api.{Tensor, Shape, Output, tfi, tf}
import org.platanios.tensorflow.api.implicits.helpers.{OutputToDataType, OutputToShape, OutputToTensor}
import org.platanios.tensorflow.api.learn.layers.{Compose, Concatenate, Layer, Linear, Conv2D, Map => MapTF, MapSeq}
object Utils {
type NNPROP = (Seq[Int], Seq[Shape], Seq[String], Seq[String])
/**
* Convert a float tensor to a Sequence.
* */
def toDoubleSeq[D : TF: IsReal](t: Tensor[D]): Iterator[Double] = t.castTo[Double].entriesIterator
def process_scope(s: String): String = if(s.isEmpty) "" else s"$s/"
/**
* Find out the name scope of a layer which is part
* of a larger architecture.
*
* @param architecture A Neural network architecture
* @param layer_name The constituent layer to search for
* @return The name scope for the layer
* */
def get_scope(
architecture: Layer[_, _])(
layer_name: String): String = {
def scope_search(lstack: Seq[Layer[_, _]], scopesAcc: Seq[String]): String = lstack match {
case Seq() => scopesAcc.headOption.getOrElse("")
case Linear(name, _, _, _, _) :: tail =>
if(name == layer_name) s"${scopesAcc.head}" else scope_search(tail, scopesAcc.tail)
case Conv2D(name, _, _, _, _, _, _, _, _) :: tail =>
if(name == layer_name) s"${scopesAcc.head}" else scope_search(tail, scopesAcc.tail)
case FiniteHorizonCTRNN(name, _, _, _, _, _, _, _) :: tail =>
if(name == layer_name) s"${scopesAcc.head}" else scope_search(tail, scopesAcc.tail)
case FiniteHorizonLinear(name, _, _, _, _) :: tail =>
if(name == layer_name) s"${scopesAcc.head}" else scope_search(tail, scopesAcc.tail)
case DynamicTimeStepCTRNN(name, _, _, _, _, _, _) :: tail =>
if(name == layer_name) s"${scopesAcc.head}" else scope_search(tail, scopesAcc.tail)
case Compose(name, l1, l2) :: tail =>
scope_search(Seq(l1, l2) ++ tail, Seq.fill(2)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case Concatenate(name, ls) :: tail =>
scope_search(ls ++ tail, Seq.fill(ls.length)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case MapTF(name, ls, _) :: tail =>
scope_search(Seq(ls) ++ tail, Seq(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case MapSeq(name, ls, l) :: tail =>
scope_search((ls :: tail), Seq.fill(2)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case SeqLayer(name, ls) :: tail =>
scope_search(ls ++ tail, Seq.fill(ls.length)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case ArrayLayer(name, ls) :: tail =>
scope_search(ls ++ tail, Seq.fill(ls.length)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case CombinedLayer(name, ls) :: tail =>
scope_search(ls ++ tail, Seq.fill(ls.length)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case CombinedArrayLayer(name, ls) :: tail =>
scope_search(ls ++ tail, Seq.fill(ls.length)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case MapLayer(name, ls) :: tail =>
scope_search(ls.values.toSeq ++ tail, Seq.fill(ls.size)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
//TODO: Need to test this part!!
case ScopedMapLayer(name, ls, scopes) :: tail =>
scope_search(ls.values.toSeq ++ tail, scopes ++ scopesAcc.tail)
case BifurcationLayer(name, l1, l2) :: tail =>
scope_search(Seq(l1, l2) ++ tail, Seq.fill(2)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case Tuple2Layer(name, l1, l2) :: tail =>
scope_search(Seq(l1, l2) ++ tail, Seq.fill(2)(s"${process_scope(scopesAcc.head)}$name") ++ scopesAcc.tail)
case head :: tail =>
if(head.name == layer_name) s"${scopesAcc.head}" else scope_search(tail, scopesAcc.tail)
}
scope_search(Seq(architecture), Seq(""))
}
/**
* Returns the properties [[NNPROP]] (i.e. layer sizes, shapes, parameter names, & data types)
* of a feed-forward/dense neural stack which consists of layers of unequal size.
*
* @param d The dimensionality of the input (assumed to be a rank 1 tensor).
* @param num_pred_dims The dimensionality of the network output.
* @param layer_sizes The size of each hidden layer.
* @param dType The data type of the layer weights and biases.
* @param starting_index The numeric index of the first layer, defaults to 1.
*
* */
def get_ffstack_properties(
d: Int, num_pred_dims: Int,
layer_sizes: Seq[Int],
dType: String = "FLOAT64",
starting_index: Int = 1): NNPROP = {
val net_layer_sizes = Seq(d) ++ layer_sizes ++ Seq(num_pred_dims)
val layer_shapes = net_layer_sizes.sliding(2).toSeq.map(c => Shape(c.head, c.last))
val size = net_layer_sizes.tail.length
val layer_parameter_names = (starting_index until starting_index + size).map(i => s"Linear_$i/Weights")
val layer_datatypes = Seq.fill(net_layer_sizes.tail.length)(dType)
(net_layer_sizes, layer_shapes, layer_parameter_names, layer_datatypes)
}
/**
* Calculate the Kullback Leibler divergence of
* a probability density from a prior density.
* */
def kl[D : TF: IsNotQuantized](prior: Output[D], p: Output[D]): Output[D] =
prior.divide(p).log.multiply(prior).sum(axes = 1).mean[Int]()
def kl[D : TF: IsNotQuantized](prior: Tensor[D], p: Tensor[D]): Tensor[D] =
prior.divide(p).log.multiply(prior).sum(axes = 1).mean[Int]()
/**
* Calculate the Jensen Shannon divergence
* between a probability and a target probability.
* */
def js[D : TF: IsNotQuantized](target_prob: Output[D], prob: Output[D]): Output[D] = {
val m = target_prob.add(prob).divide(Tensor(2.0).toOutput.castTo[D])
kl(target_prob, m).add(kl(prob, m)).multiply(Tensor(0.5).toOutput.castTo[D])
}
def js[D : TF: IsNotQuantized](target_prob: Tensor[D], prob: Tensor[D]): Tensor[D] = {
val two = Tensor(2).castTo[D]
val m = tfi.divide[D](target_prob.add(prob), two)
tfi.divide[D](kl(target_prob, m).add(kl(prob, m)), two)
}
/**
* Calculate the Hellinger distance between two
* probability distributions.
* */
def hellinger[D : TF: IsNotQuantized](target_prob: Output[D], prob: Output[D]): Output[D] =
target_prob.sqrt.subtract(prob.sqrt).square.sum(axes = 1).sqrt.divide(Tensor(math.sqrt(2.0)).toOutput.castTo[D])
def hellinger[D : TF: IsNotQuantized](target_prob: Tensor[D], prob: Tensor[D]): Tensor[D] =
target_prob.sqrt.subtract(prob.sqrt).square.sum(axes = 1).sqrt
def cross_entropy[D : TF: IsNotQuantized](target_prob: Output[D], prob: Output[D]): Output[D] =
target_prob.multiply(prob.log).sum(axes = 1).multiply(Tensor(-1.0).toOutput.castTo[D]).mean[Int]()
/**
* Calculate the cross-entropy of two
* probability distributions.
* */
def cross_entropy[D : TF: IsNotQuantized](target_prob: Tensor[D], prob: Tensor[D]): Output[D] =
target_prob.multiply(prob.log).sum(axes = 1).multiply(Tensor(-1.0).castTo(target_prob.dataType)).mean[Int]()
def buffered_preds[
In, TrainIn, TrainOut, Out,
Loss: TF : IsFloatOrDouble,
IT, ID, IS, ITT, ITD, ITS,
TT, InferIn, InferOut](
predictiveModel: Learn.SupEstimatorTF[In, TrainIn, TrainOut, Out, Loss, (Out, (In, TrainIn))],
workingData: InferIn,
buffer: Int, dataSize: Int)(
implicit
getSplitByIndex: MetaPipe12[InferIn, Int, Int, InferIn],
concatenateSplits: DataPipe[Iterable[InferOut], InferOut],
evOutputToDataTypeIn: OutputToDataType.Aux[In, ID],
evOutputToDataTypeOut: OutputToDataType.Aux[Out, ITD],
evOutputToShapeIn: OutputToShape.Aux[In, IS],
evOutputToShapeOut: OutputToShape.Aux[Out, ITS],
evOutputToTensorIn: OutputToTensor.Aux[In, IT],
evOutputToTensorOut: OutputToTensor.Aux[Out, ITT],
ev: Estimator.SupportedInferInput[In, IT, ITT, InferIn, InferOut],
// This implicit helps the Scala 2.11 compiler.
evOutputToTensorInOut: OutputToTensor.Aux[(In, Out), (IT, ITT)]
): InferOut = {
val get_data_split = getSplitByIndex(workingData)
val preds_splits: Iterable[InferOut] = (0 until dataSize)
.grouped(buffer)
.map(indices => {
val progress = math.round(10*indices.head*buffer*100.0/dataSize)/10d
print("Progress %:\t")
pprint.pprintln(progress)
predictiveModel.infer[IT, ID, IS, ITT, ITD, ITS, InferIn, InferOut](
() => get_data_split(indices.head, indices.last))
}).toIterable
concatenateSplits(preds_splits)
}
def predict_data[
In, TrainIn, TrainOut, Out,
Loss: TF : IsFloatOrDouble,
IT, ID, IS, ITT, ITD, ITS,
TT, InferOut](
predictiveModel: Learn.SupEstimatorTF[In, TrainIn, TrainOut, Out, Loss, (Out, (In, TrainIn))],
data: AbstractDataSet[IT, TT],
pred_flags: (Boolean, Boolean) = (false, true),
buff_size: Int = 400)(
implicit
getSplitByIndex: MetaPipe12[IT, Int, Int, IT],
concatenateSplits: DataPipe[Iterable[InferOut], InferOut],
evOutputToDataTypeIn: OutputToDataType.Aux[In, ID],
evOutputToDataTypeOut: OutputToDataType.Aux[Out, ITD],
evOutputToShapeIn: OutputToShape.Aux[In, IS],
evOutputToShapeOut: OutputToShape.Aux[Out, ITS],
evOutputToTensorIn: OutputToTensor.Aux[In, IT],
evOutputToTensorOut: OutputToTensor.Aux[Out, ITT],
ev: Estimator.SupportedInferInput[In, IT, ITT, IT, InferOut],
// This implicit helps the Scala 2.11 compiler.
evOutputToTensorInOut: OutputToTensor.Aux[(In, Out), (IT, ITT)]): (Option[InferOut], Option[InferOut]) = {
val train_preds: Option[InferOut] =
if (pred_flags._1) {
println("\nGenerating predictions for training data.\n")
val predictions = buffered_preds[
In, TrainIn, TrainOut, Out, Loss,
IT, ID, IS, ITT, ITD, ITS,
TT, IT, InferOut](
predictiveModel,
data.trainData,
buff_size,
data.nTrain)
Some(predictions)
} else None
val test_preds: Option[InferOut] =
if (pred_flags._2) {
println("\nGenerating predictions for test data.\n")
val predictions = buffered_preds[
In, TrainIn, TrainOut, Out, Loss,
IT, ID, IS, ITT, ITD, ITS,
TT, IT, InferOut](
predictiveModel,
data.testData,
buff_size,
data.nTest)
Some(predictions)
} else None
(train_preds, test_preds)
}
}
|
mandar2812/DynaML
|
dynaml-tensorflow/src/main/scala/io/github/tailhq/dynaml/tensorflow/utils/Utils.scala
|
Scala
|
apache-2.0
| 11,783
|
/*
* Copyright 2016 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.core.helper
import simx.core.entity.Entity
import simx.core.svaractor.SVarActor
import simx.core.svaractor.unifiedaccess.EntityUpdateHandling
/**
* Created by dwiebusch on 17.01.16.
*/
trait LateContextBinding[ActorContext <: SVarActor]{
private var _context : Option[ActorContext] = None
protected implicit def actorContext : ActorContext =
_context.getOrElse(throw new Exception("actor context was not bound"))
final def bind(implicit context : ActorContext) {
_context = Some(context)
}
}
sealed trait RemoteFunctionBase[ActorContext <: SVarActor] extends LateContextBinding[ActorContext]{
protected def receiver : Option[SVarActor.Ref]
receiver.foreach(_ ! this)
}
abstract class RemoteFunction[ActorContext <: SVarActor, P1, P2,P3,P4, R](val paramCount : Int, protected val receiver : Option[SVarActor.Ref] = None)
extends RemoteFunctionBase[ActorContext]
{
def apply(p1 : P1) : R = throw new NotImplementedError()
def apply(p1 : P1, p2 : P2) : R = throw new NotImplementedError()
def apply(p1 : P1, p2 : P2, p3 : P3) : R = throw new NotImplementedError()
def apply(p1 : P1, p2 : P2, p3 : P3, p4 : P4) : R = throw new NotImplementedError()
def callFunction(p1 : P1)(implicit ctxt : ActorContext) = {
assert(paramCount == 1)
bind(ctxt)
apply(p1)
}
def callFunction(p1 : P1, p2 : P2)(implicit ctxt : ActorContext) = {
assert(paramCount == 2)
bind(ctxt)
apply(p1, p2)
}
def callFunction(p1 : P1, p2 : P2, p3 : P3)(implicit ctxt : ActorContext) = {
assert(paramCount == 3)
bind(ctxt)
apply(p1, p2, p3)
}
def callFunction(p1 : P1, p2 : P2, p3 : P3, p4 : P4)(implicit ctxt : ActorContext) = {
assert(paramCount == 4)
bind(ctxt)
apply(p1, p2, p3, p4)
}
}
abstract class MySpecial2ParamEffect(remote : SVarActor.Ref)
extends RemoteFunction[EntityUpdateHandling, Entity, Entity, Entity, Entity, Unit](2, Some(remote))
{
override def apply(p1: Entity, p2: Entity):Unit = effect(p1, p2)
// either set this or use DSL from below
protected var effect: (Entity, Entity) => Unit =
(_, _) => throw new NotImplementedError()
// DSL
protected def effectFor(e : (Entity, Entity) => Unit) =
effect = e
object For {
def parameters(e : (Entity, Entity) => Unit) = effectFor(e)
}
}
abstract class RemoteFunction1[ActorContext <: SVarActor, P1, R](protected val receiver : Option[SVarActor.Ref] = None)
extends (P1 => R) with RemoteFunctionBase[ActorContext]
{
def callFunction(p1 : P1)(implicit ctxt : ActorContext) = {
bind(ctxt)
apply(p1)
}
}
abstract class RemoteFunction2[ActorContext <: SVarActor, P1, P2, R](protected val receiver : Option[SVarActor.Ref] = None)
extends ((P1, P2) => R) with RemoteFunctionBase[ActorContext]
{
def callFunction(p1 : P1, p2: P2)(implicit ctxt : ActorContext) = {
bind(ctxt)
apply(p1, p2)
}
}
// Example class
abstract class Effect(remote : SVarActor.Ref)
extends RemoteFunction2[EntityUpdateHandling, Entity, Entity, Unit](Some(remote))
{
// rename apply function to effect
final def apply(v1: Entity, v2: Entity): Unit = effect(v1, v2)
// either set this or use DSL from below
protected var effect: (Entity, Entity) => Unit =
(_, _) => throw new NotImplementedError()
// DSL
protected def effectFor(e : (Entity, Entity) => Unit) =
effect = e
object For {
def parameters(e : (Entity, Entity) => Unit) = effectFor(e)
}
}
// ------------ //
// Test Section //
// ------------ //
class ExecutingActor extends SVarActor with EntityUpdateHandling{
override protected def removeFromLocalRep(e: Entity): Unit = {}
addHandler[MySpecial2ParamEffect]{
msg =>
println("received " + msg)
msg.callFunction(new Entity, new Entity)
}
}
class RequestingActor(anotherActor : SVarActor.Ref) extends SVarActor{
protected def someInternalActorFunction()(implicit executingActor : EntityUpdateHandling) = {
println("sender " + actorContext.self.path)
println("executor " + executingActor.self.path)
SVarActor.shutdownSystem()
}
new MySpecial2ParamEffect(anotherActor){
effectFor { (entity1, entity2) =>
someInternalActorFunction()
println("\\tentity1: " + entity1 +"\\n\\tentity2: " + entity2)
}
}
}
object TestRemoteFunction{
def main(args: Array[String]) {
SVarActor.createActor(new RequestingActor(SVarActor.createActor(new ExecutingActor)))
}
}
|
simulator-x/core
|
src/simx/core/helper/LateContextBinding.scala
|
Scala
|
apache-2.0
| 5,305
|
package dsmoq.services
/**
* データセットの属性を表すケースクラス
*
* @param name 属性名
* @param value 属性値
*/
case class DataSetAttribute(
name: String,
value: String
)
|
nkawa/dsmoq
|
server/apiServer/src/main/scala/dsmoq/services/DataSetAttribute.scala
|
Scala
|
apache-2.0
| 207
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.examples
import org.apache.spark.streaming.{Seconds, StreamingContext}
import StreamingContext._
import org.apache.spark.SparkContext._
/**
* Calculates popular hashtags (topics) over sliding 10 and 60 second windows from a Twitter
* stream. The stream is instantiated with credentials and optionally filters supplied by the
* command line arguments.
*
*/
object TwitterPopularTags {
def main(args: Array[String]) {
if (args.length < 1) {
System.err.println("Usage: TwitterPopularTags <master>" +
" [filter1] [filter2] ... [filter n]")
System.exit(1)
}
val (master, filters) = (args.head, args.tail)
val ssc = new StreamingContext(master, "TwitterPopularTags", Seconds(2),
System.getenv("SPARK_HOME"), Seq(System.getenv("SPARK_EXAMPLES_JAR")))
val stream = ssc.twitterStream(None, filters)
val hashTags = stream.flatMap(status => status.getText.split(" ").filter(_.startsWith("#")))
val topCounts60 = hashTags.map((_, 1)).reduceByKeyAndWindow(_ + _, Seconds(60))
.map{case (topic, count) => (count, topic)}
.transform(_.sortByKey(false))
val topCounts10 = hashTags.map((_, 1)).reduceByKeyAndWindow(_ + _, Seconds(10))
.map{case (topic, count) => (count, topic)}
.transform(_.sortByKey(false))
// Print popular hashtags
topCounts60.foreach(rdd => {
val topList = rdd.take(5)
println("\nPopular topics in last 60 seconds (%s total):".format(rdd.count()))
topList.foreach{case (count, tag) => println("%s (%s tweets)".format(tag, count))}
})
topCounts10.foreach(rdd => {
val topList = rdd.take(5)
println("\nPopular topics in last 10 seconds (%s total):".format(rdd.count()))
topList.foreach{case (count, tag) => println("%s (%s tweets)".format(tag, count))}
})
ssc.start()
}
}
|
mkolod/incubator-spark
|
examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
|
Scala
|
apache-2.0
| 2,734
|
package com.azavea.maml.ast.codec
import com.azavea.maml.ast._
import com.azavea.maml.util.Neighborhood
import geotrellis.raster.TargetCell
import io.circe._
trait MamlCodecInstances extends MamlUtilityCodecs {
implicit def totalDecoder: Decoder[Expression]
implicit def totalEncoder: Encoder[Expression]
implicit lazy val decodeAddition: Decoder[Addition] =
Decoder.forProduct1("args"){ args: List[Expression] => Addition(args) }
implicit lazy val encodeAddition: Encoder[Addition] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeSubtraction: Decoder[Subtraction] =
Decoder.forProduct1("args"){ args: List[Expression] => Subtraction(args) }
implicit lazy val encodeSubtraction: Encoder[Subtraction] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeMultiplication: Decoder[Multiplication] =
Decoder.forProduct1("args"){ args: List[Expression] => Multiplication(args) }
implicit lazy val encodeMultiplication: Encoder[Multiplication] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeDivision: Decoder[Division] =
Decoder.forProduct1("args"){ args: List[Expression] => Division(args) }
implicit lazy val encodeDivision: Encoder[Division] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeMax: Decoder[Max] =
Decoder.forProduct1("args"){ args: List[Expression] => Max(args) }
implicit lazy val encodeMax: Encoder[Max] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeMin: Decoder[Min] =
Decoder.forProduct1("args"){ args: List[Expression] => Min(args) }
implicit lazy val encodeMin: Encoder[Min] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeMasking: Decoder[Masking] =
Decoder.forProduct1("args"){ args: List[Expression] => Masking(args) }
implicit lazy val encodeMasking: Encoder[Masking] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decoderSleep: Decoder[Sleep] =
Decoder.forProduct2("seconds", "args"){
(seconds: Long, args: List[Expression]) => Sleep(seconds, args)
}
implicit lazy val encoderSleep: Encoder[Sleep] =
Encoder.forProduct2("seconds", "args")(u => (u.seconds, u.children))
implicit lazy val decodePow: Decoder[Pow] =
Decoder.forProduct1("args"){ args: List[Expression] => Pow(args) }
implicit lazy val encodePow: Encoder[Pow] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeClassification: Decoder[Classification] =
Decoder.forProduct2("args", "classifcations")(Classification.apply)
implicit lazy val encodeClassification: Encoder[Classification] =
Encoder.forProduct3("args", "classifications", "symbol")(u => (u.children, u.classMap, u.sym))
implicit lazy val decodeFocalMax: Decoder[FocalMax] =
Decoder.forProduct3[FocalMax, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalMax(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalMax: Encoder[FocalMax] =
Encoder.forProduct4("args", "neighborhood", "target","symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalMin: Decoder[FocalMin] =
Decoder.forProduct3[FocalMin, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalMin(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalMin: Encoder[FocalMin] =
Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalMean: Decoder[FocalMean] =
Decoder.forProduct3[FocalMean, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalMean(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalMean: Encoder[FocalMean] =
Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalMedian: Decoder[FocalMedian] =
Decoder.forProduct3[FocalMedian, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalMedian(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalMedian: Encoder[FocalMedian] =
Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalMode: Decoder[FocalMode] =
Decoder.forProduct3[FocalMode, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalMode(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalMode: Encoder[FocalMode] =
Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalSum: Decoder[FocalSum] =
Decoder.forProduct3[FocalSum, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalSum(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalSum: Encoder[FocalSum] =
Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalStdDev: Decoder[FocalStdDev] =
Decoder.forProduct3[FocalStdDev, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") {
(args, zFactor, target) => FocalStdDev(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalStdDev: Encoder[FocalStdDev] =
Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym))
implicit lazy val decodeFocalSlope: Decoder[FocalSlope] =
Decoder.forProduct3[FocalSlope, List[Expression], Option[Double], Option[TargetCell]]("args", "zFactor", "target") {
(args, zFactor, target) => FocalSlope(args, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalSlope: Encoder[FocalSlope] =
Encoder.forProduct4("args", "zFactor", "target", "symbol")(u => (u.children, u.zFactor, u.target, u.sym))
implicit lazy val decodeFocalHillshade: Decoder[FocalHillshade] =
Decoder.forProduct5[FocalHillshade, List[Expression], Double, Double, Option[Double], Option[TargetCell]]("args", "azimuth", "altitude", "zFactor", "target") {
(args, azimuth, altitude, zFactor, target) => FocalHillshade(args, azimuth, altitude, zFactor, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalHillshade: Encoder[FocalHillshade] =
Encoder.forProduct6("args", "azimuth", "altitude", "zFactor", "target", "symbol")(u =>
(u.children, u.azimuth, u.altitude, u.zFactor, u.target, u.sym)
)
implicit lazy val decodeFocalAspect: Decoder[FocalAspect] =
Decoder.forProduct2[FocalAspect, List[Expression], Option[TargetCell]]("args", "target") {
(args, target) => FocalAspect(args, target.getOrElse(TargetCell.All))
}
implicit lazy val encodeFocalAspect: Encoder[FocalAspect] =
Encoder.forProduct3("args", "target", "symbol")(u =>
(u.children, u.target, u.sym)
)
implicit lazy val decodeGreater: Decoder[Greater] =
Decoder.forProduct1("args"){ args: List[Expression] => Greater(args) }
implicit lazy val encodeGreater: Encoder[Greater] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeGreaterOrEqual: Decoder[GreaterOrEqual] =
Decoder.forProduct1("args"){ args: List[Expression] => GreaterOrEqual(args) }
implicit lazy val encodeGreaterOrEqual: Encoder[GreaterOrEqual] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeLesserOrEqual: Decoder[LesserOrEqual] =
Decoder.forProduct1("args"){ args: List[Expression] => LesserOrEqual(args) }
implicit lazy val encodeLesserOrEqual: Encoder[LesserOrEqual] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeLesser: Decoder[Lesser] =
Decoder.forProduct1("args"){ args: List[Expression] => Lesser(args) }
implicit lazy val encodeLesser: Encoder[Lesser] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeUnequal: Decoder[Unequal] =
Decoder.forProduct1("args"){ args: List[Expression] => Unequal(args) }
implicit lazy val encodeUnequal: Encoder[Unequal] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeEqual: Decoder[Equal] =
Decoder.forProduct1("args"){ args: List[Expression] => Equal(args) }
implicit lazy val encodeEqual: Encoder[Equal] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeOr: Decoder[Or] =
Decoder.forProduct1("args"){ args: List[Expression] => Or(args) }
implicit lazy val encodeOr: Encoder[Or] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeXor: Decoder[Xor] =
Decoder.forProduct1("args"){ args: List[Expression] => Xor(args) }
implicit lazy val encodeXor: Encoder[Xor] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeAnd: Decoder[And] =
Decoder.forProduct1("args"){ args: List[Expression] => And(args) }
implicit lazy val encodeAnd: Encoder[And] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeAtan2: Decoder[Atan2] =
Decoder.forProduct1("args"){ args: List[Expression] => Atan2(args) }
implicit lazy val encodeAtan2: Encoder[Atan2] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeBranch: Decoder[Branch] =
Decoder.forProduct1("args"){ args: List[Expression] => Branch(args) }
implicit lazy val encodeBranch: Encoder[Branch] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeSin: Decoder[Sin] =
Decoder.forProduct1("args"){ args: List[Expression] => Sin(args) }
implicit lazy val encodeSin: Encoder[Sin] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeCos: Decoder[Cos] =
Decoder.forProduct1("args"){ args: List[Expression] => Cos(args) }
implicit lazy val encodeCos: Encoder[Cos] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeTan: Decoder[Tan] =
Decoder.forProduct1("args"){ args: List[Expression] => Tan(args) }
implicit lazy val encodeTan: Encoder[Tan] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeSinh: Decoder[Sinh] =
Decoder.forProduct1("args"){ args: List[Expression] => Sinh(args) }
implicit lazy val encodeSinh: Encoder[Sinh] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeCosh: Decoder[Cosh] =
Decoder.forProduct1("args"){ args: List[Expression] => Cosh(args) }
implicit lazy val encodeCosh: Encoder[Cosh] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeTanh: Decoder[Tanh] =
Decoder.forProduct1("args"){ args: List[Expression] => Tanh(args) }
implicit lazy val encodeTanh: Encoder[Tanh] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeAsin: Decoder[Asin] =
Decoder.forProduct1("args"){ args: List[Expression] => Asin(args) }
implicit lazy val encodeAsin: Encoder[Asin] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeAcos: Decoder[Acos] =
Decoder.forProduct1("args"){ args: List[Expression] => Acos(args) }
implicit lazy val encodeAcos: Encoder[Acos] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeAtan: Decoder[Atan] =
Decoder.forProduct1("args"){ args: List[Expression] => Atan(args) }
implicit lazy val encodeAtan: Encoder[Atan] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeRound: Decoder[Round] =
Decoder.forProduct1("args"){ args: List[Expression] => Round(args) }
implicit lazy val encodeRound: Encoder[Round] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeFloor: Decoder[Floor] =
Decoder.forProduct1("args")(Floor.apply)
implicit lazy val encodeFloor: Encoder[Floor] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeCeil: Decoder[Ceil] =
Decoder.forProduct1("args")(Ceil.apply)
implicit lazy val encodeCeil: Encoder[Ceil] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeLogE: Decoder[LogE] =
Decoder.forProduct1("args")(LogE.apply)
implicit lazy val encodeLogE: Encoder[LogE] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeLog10: Decoder[Log10] =
Decoder.forProduct1("args")(Log10.apply)
implicit lazy val encodeLog10: Encoder[Log10] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeSquareRoot: Decoder[SquareRoot] =
Decoder.forProduct1("args")(SquareRoot.apply)
implicit lazy val encodeSquareRoot: Encoder[SquareRoot] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeAbs: Decoder[Abs] =
Decoder.forProduct1("args")(Abs.apply)
implicit lazy val encodeAbs: Encoder[Abs] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeDefined: Decoder[Defined] =
Decoder.forProduct1("args")(Defined.apply)
implicit lazy val encodeDefined: Encoder[Defined] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeUndefined: Decoder[Undefined] =
Decoder.forProduct1("args")(Undefined.apply)
implicit lazy val encodeUndefined: Encoder[Undefined] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeNumNeg: Decoder[NumericNegation] =
Decoder.forProduct1("args")(NumericNegation.apply)
implicit lazy val encodeNumNeg: Encoder[NumericNegation] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeLogicNeg: Decoder[LogicalNegation] =
Decoder.forProduct1("args")(LogicalNegation.apply)
implicit lazy val encodeLogicNeg: Encoder[LogicalNegation] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeImageSelect: Decoder[ImageSelect] =
Decoder.forProduct2("args", "labels")(ImageSelect.apply)
implicit lazy val encodeImageSelect: Encoder[ImageSelect] =
Encoder.forProduct3("args", "labels", "symbol")(u => (u.children, u.labels, u.sym))
implicit lazy val decodeIntLit: Decoder[IntLit] =
Decoder.forProduct1("value")(IntLit.apply)
implicit lazy val encodeIntLit: Encoder[IntLit] =
Encoder.forProduct2("value", "symbol")(u => (u.value, u.sym))
implicit lazy val decodeIntvar: Decoder[IntVar] =
Decoder.forProduct1("name")(IntVar.apply)
implicit lazy val encodeIntvar: Encoder[IntVar] =
Encoder.forProduct2("name", "symbol")(u => (u.name, u.sym))
implicit lazy val decodeDblLit: Decoder[DblLit] =
Decoder.forProduct1("value")(DblLit.apply)
implicit lazy val encodeDblLit: Encoder[DblLit] =
Encoder.forProduct2("value", "symbol")(u => (u.value, u.sym))
implicit lazy val decodeDblVar: Decoder[DblVar] =
Decoder.forProduct1("name")(DblVar.apply)
implicit lazy val encodeDblVar: Encoder[DblVar] =
Encoder.forProduct2("name", "symbol")(u => (u.name, u.sym))
implicit lazy val decodeBoolLit: Decoder[BoolLit] =
Decoder.forProduct1("value")(BoolLit.apply)
implicit lazy val encodeBoolLit: Encoder[BoolLit] =
Encoder.forProduct2("value", "symbol")(u => (u.value, u.sym))
implicit lazy val decodeBoolVar: Decoder[BoolVar] =
Decoder.forProduct1("name")(BoolVar.apply)
implicit lazy val encodeBoolVar: Encoder[BoolVar] =
Encoder.forProduct2("name", "symbol")(u => (u.name, u.sym))
implicit lazy val decodeGeomLit: Decoder[GeomLit] =
Decoder.forProduct1("geom")(GeomLit.apply)
implicit lazy val encodeGeomLit: Encoder[GeomLit] =
Encoder.forProduct2("geom", "symbol")(u => (u.geom, u.sym))
implicit lazy val decodeGeomVar: Decoder[GeomVar] =
Decoder.forProduct1("name")(GeomVar.apply)
implicit lazy val encodeGeomVar: Encoder[GeomVar] =
Encoder.forProduct2("name", "symbol")(u => (u.name, u.sym))
implicit lazy val decodeRasterVar: Decoder[RasterVar] =
Decoder.forProduct1("name")(RasterVar.apply)
implicit lazy val encodeRasterVar: Encoder[RasterVar] =
Encoder.forProduct2("name", "symbol")(u => (u.name, u.sym))
implicit lazy val decodeRGB: Decoder[RGB] =
Decoder.forProduct4[RGB, List[Expression], Option[String], Option[String], Option[String]]("args", "redBand", "greenBand", "blueBand") {
(args, redBand, greenBand, blueBand) => RGB(args, redBand.getOrElse("0"), greenBand.getOrElse("0"), blueBand.getOrElse("0"))
}
implicit lazy val encodeRGB: Encoder[RGB] =
Encoder.forProduct5("args", "redBand", "greenBand", "blueBand", "symbol")(u => (u.children, u.redBand, u.greenBand, u.blueBand, u.sym))
implicit lazy val decodeAssemble: Decoder[Assemble] =
Decoder.forProduct1[Assemble, List[Expression]]("args") {
(args) => Assemble(args)
}
implicit lazy val encodeAssemble: Encoder[Assemble] =
Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym))
implicit lazy val decodeRescale: Decoder[Rescale] =
Decoder.forProduct4("args", "newMin", "newMax", "band")(Rescale.apply)
implicit lazy val encodeRescale: Encoder[Rescale] =
Encoder.forProduct5("args", "newMin", "newMax", "band", "symbol")(u => (u.children, u.newMin, u.newMax, u.band, u.sym))
implicit lazy val decodeNormalize: Decoder[Normalize] =
Decoder.forProduct6("args", "oldMin", "oldMax", "newMin", "newMax", "band")(Normalize.apply)
implicit lazy val encodeNormalize: Encoder[Normalize] =
Encoder.forProduct7("args", "oldMin", "oldMax", "newMin", "newMax", "band", "symbol")(u => (u.children, u.oldMin, u.oldMax, u.newMin, u.newMax, u.band, u.sym))
implicit lazy val decodeClamp: Decoder[Clamp] =
Decoder.forProduct4("args", "min", "max", "band")(Clamp.apply)
implicit lazy val encodeClamp: Encoder[Clamp] =
Encoder.forProduct5("args", "min", "max", "band", "symbol")(u => (u.children, u.min, u.max, u.band, u.sym))
}
|
geotrellis/maml
|
shared/src/main/scala/ast/codec/MamlCodecInstances.scala
|
Scala
|
apache-2.0
| 18,966
|
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.unicomplex
import java.lang.management.ManagementFactory
import java.util.concurrent.TimeUnit
import javax.management.ObjectName
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import org.scalatest._
import org.scalatest.concurrent.AsyncAssertions
import org.squbs.lifecycle.GracefulStop
import org.squbs.unicomplex.dummyextensions.DummyExtension
import spray.util.Utils
import scala.language.postfixOps
import scala.util.Try
object BadUnicomplexBootSpec {
val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath
val classPaths = Array(
"BadUnicomplexBoot",
"BadCube",
"BadCube1",
"BadCube2",
"NoMetaCube"
) map (dummyJarsDir + "/" + _)
val (_, port) = Utils.temporaryServerHostnameAndPort()
val config = ConfigFactory.parseString(
s"""
|squbs {
| actorsystem-name = BadUnicomplexBootSpec
| ${JMX.prefixConfig} = true
|}
|default-listener.bind-port = $port
""".stripMargin
) withFallback ConfigFactory.parseString(
"""
|
|akka.actor.deployment {
| /BadUnicomplexBoot/Prepender {
| router = round-robin-pool
| nr-of-instances = 5
| }
|}
|
|
""".stripMargin
)
//for coverage
UnicomplexBoot { (name, config) => ActorSystem(name, config) }
val boot = UnicomplexBoot(config)
.createUsing {
(name, config) => ActorSystem(name, config)
}
.scanComponents(classPaths)
.initExtensions.start()
}
class BadUnicomplexBootSpec extends TestKit(BadUnicomplexBootSpec.boot.actorSystem) with ImplicitSender
with WordSpecLike with Matchers with Inspectors with BeforeAndAfterAll
with AsyncAssertions {
import org.squbs.unicomplex.BadUnicomplexBootSpec._
implicit val timeout: akka.util.Timeout =
Try(System.getProperty("test.timeout").toLong) map {
millis =>
akka.util.Timeout(millis, TimeUnit.MILLISECONDS)
} getOrElse Timeouts.askTimeout
implicit val executionContext = system.dispatcher
override def afterAll() {
Unicomplex(system).uniActor ! GracefulStop
}
"UnicomplexBoot" must {
"start all cube actors" in {
val w = new Waiter
system.actorSelection("/user/BadUnicomplexBoot").resolveOne().onComplete(result => {
w {
assert(result.isSuccess)
}
w.dismiss()
})
w.await()
system.actorSelection("/user/BadUnicomplexBoot/AppendActor").resolveOne().onComplete(result => {
w {
assert(result.isSuccess)
}
w.dismiss()
})
w.await()
system.actorSelection("/user/BadUnicomplexBoot/Prepender").resolveOne().onComplete(result => {
w {
assert(result.isSuccess)
}
w.dismiss()
})
w.await()
}
"check cube MXbean" in {
import JMX._
val mbeanServer = ManagementFactory.getPlatformMBeanServer
val cubesObjName = new ObjectName(prefix(system) + cubesName)
val attr = mbeanServer.getAttribute(cubesObjName, "Cubes")
attr shouldBe a [Array[Any]]
all (attr.asInstanceOf[Array[Any]]) shouldBe a [javax.management.openmbean.CompositeData]
// 5 cubes registered above.
val cAttr = attr.asInstanceOf[Array[_]]
forAll (cAttr) (_ shouldBe a [javax.management.openmbean.CompositeData])
attr.asInstanceOf[Array[_]] should have size 1
}
"preInit, init and postInit all extensions" in {
boot.extensions.size should be (2)
//boot.extensions.forall(_.extLifecycle.get.isInstanceOf[DummyExtension]) should be(true)
boot.extensions(0).extLifecycle.get.asInstanceOf[DummyExtension].state should be ("CstartpreInitpostInit")
boot.extensions(1).extLifecycle should be (None)
}
"start again" in {
the[IllegalStateException] thrownBy {
boot.start()
} should have message "Unicomplex already started!"
}
"stopJVMOnExit" in {
boot.stopJVMOnExit shouldBe 'stopJVM
}
"externalConfigDir" in {
boot.externalConfigDir should be("squbsconfig")
}
"Constants" in {
UnicomplexBoot.extConfigDirKey should be("squbs.external-config-dir")
UnicomplexBoot.extConfigNameKey should be("squbs.external-config-files")
UnicomplexBoot.actorSystemNameKey should be("squbs.actorsystem-name")
}
}
}
|
keshin/squbs
|
squbs-unicomplex/src/test/scala/org/squbs/unicomplex/BadUnicomplexBootSpec.scala
|
Scala
|
apache-2.0
| 5,019
|
object Test extends App {
println("foo")
Macros.foo(42)
}
|
scala/scala
|
test/files/run/macro-impl-rename-context/Test_2.scala
|
Scala
|
apache-2.0
| 62
|
package sbt.inc
import xsbti.api.SourceAPI
import xsbt.api.ShowAPI
import xsbt.api.DefaultShowAPI._
import java.lang.reflect.Method
import java.util.{ List => JList }
/**
* A class which computes diffs (unified diffs) between two textual representations of an API.
*
* Internally, it uses java-diff-utils library but it calls it through reflection so there's
* no hard dependency on java-diff-utils.
*
* The reflective lookup of java-diff-utils library is performed in the constructor. Exceptions
* thrown by reflection are passed as-is to the caller of the constructor.
*
* @throws ClassNotFoundException if difflib.DiffUtils class cannot be located
* @throws LinkageError
* @throws ExceptionInInitializerError
*/
private[inc] class APIDiff {
import APIDiff._
private val diffUtilsClass = Class.forName(diffUtilsClassName)
// method signature: diff(List<?>, List<?>)
private val diffMethod: Method =
diffUtilsClass.getMethod(diffMethodName, classOf[JList[_]], classOf[JList[_]])
private val generateUnifiedDiffMethod: Method = {
val patchClass = Class.forName(patchClassName)
// method signature: generateUnifiedDiff(String, String, List<String>, Patch, int)
diffUtilsClass.getMethod(generateUnifiedDiffMethodName, classOf[String],
classOf[String], classOf[JList[String]], patchClass, classOf[Int])
}
/**
* Generates an unified diff between textual representations of `api1` and `api2`.
*/
def generateApiDiff(fileName: String, api1: SourceAPI, api2: SourceAPI, contextSize: Int): String = {
val api1Str = ShowAPI.show(api1)
val api2Str = ShowAPI.show(api2)
generateApiDiff(fileName, api1Str, api2Str, contextSize)
}
private def generateApiDiff(fileName: String, f1: String, f2: String, contextSize: Int): String = {
assert((diffMethod != null) && (generateUnifiedDiffMethod != null), "APIDiff isn't properly initialized.")
import scala.collection.JavaConverters._
def asJavaList[T](it: Iterator[T]): java.util.List[T] = it.toSeq.asJava
val f1Lines = asJavaList(f1.lines)
val f2Lines = asJavaList(f2.lines)
//val diff = DiffUtils.diff(f1Lines, f2Lines)
val diff /*: Patch*/ = diffMethod.invoke(null, f1Lines, f2Lines)
val unifiedPatch: JList[String] = generateUnifiedDiffMethod.invoke(null, fileName, fileName, f1Lines, diff,
(contextSize: java.lang.Integer)).asInstanceOf[JList[String]]
unifiedPatch.asScala.mkString("\n")
}
}
private[inc] object APIDiff {
private val diffUtilsClassName = "difflib.DiffUtils"
private val patchClassName = "difflib.Patch"
private val diffMethodName = "diff"
private val generateUnifiedDiffMethodName = "generateUnifiedDiff"
}
|
niktrop/sbt
|
compile/inc/src/main/scala/sbt/inc/APIDiff.scala
|
Scala
|
bsd-3-clause
| 2,694
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.status.api.v1
import java.util.{List => JList}
import javax.ws.rs.{GET, Produces, QueryParam}
import javax.ws.rs.core.MediaType
import org.apache.spark.ui.SparkUI
@Produces(Array(MediaType.APPLICATION_JSON))
private[v1] class AllStagesResource(ui: SparkUI) {
@GET
def stageList(@QueryParam("status") statuses: JList[StageStatus]): Seq[StageData] = {
ui.store.stageList(statuses)
}
}
|
cin/spark
|
core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala
|
Scala
|
apache-2.0
| 1,224
|
/*
,i::,
:;;;;;;;
;:,,::;.
1ft1;::;1tL
t1;::;1,
:;::; _____ __ ___ __
fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_
CLft11 :,, i1tffLi \__ \ ____ / /|_/ // __ `// ___// __ \
1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / /
CLt1i :,: .1tfL. /____/ /_/ /_/ \__,_/ \___//_/ /_/
Lft1,:;: , 1tfL:
;it1i ,,,:::;;;::1tti s_mach.datadiff
.t1i .,::;;; ;1tt Copyright (c) 2014 S-Mach, Inc.
Lft11ii;::;ii1tfL: Author: lance.gatlin@gmail.com
.L1 1tt1ttt,,Li
...1LLLL...
*/
package s_mach.datadiff
/* WARNING: Generated code. To modify see s_mach.datadiff.TupleDataDiffTestCodeGen */
import scala.util.Random
import org.scalatest.{Matchers, FlatSpec}
class TupleDataDiffTest extends FlatSpec with Matchers {
"Tuple2Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2))
tuple calcDiff tuple should equal(patchFor[(Int,Int)].noChange)
}
"Tuple2Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple3Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int)].noChange)
}
"Tuple3Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple4Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int)].noChange)
}
"Tuple4Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple5Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int)].noChange)
}
"Tuple5Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple6Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple6Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple7Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple7Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple8Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple8Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple9Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple9Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple10Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple10Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple11Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple11Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple12Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple12Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple13Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple13Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple14Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple14Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple15Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple15Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple16Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple16Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple17Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple17Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1,_17 = tuple._17 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple18Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple18Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1,_17 = tuple._17 + 1,_18 = tuple._18 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple19Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple19Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1,_17 = tuple._17 + 1,_18 = tuple._18 + 1,_19 = tuple._19 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple20Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple20Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1,_17 = tuple._17 + 1,_18 = tuple._18 + 1,_19 = tuple._19 + 1,_20 = tuple._20 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple21Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple21Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1,_17 = tuple._17 + 1,_18 = tuple._18 + 1,_19 = tuple._19 + 1,_20 = tuple._20 + 1,_21 = tuple._21 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
"Tuple22Diff.diff" must "detect differences between the old and new value" in {
val noChange = patchFor[Int].noChange
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = Random.nextInt())
tuple calcDiff modTuple should equal((noChange,modTuple._2 - tuple._2,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange,noChange))
tuple calcDiff tuple should equal(patchFor[(Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int,Int)].noChange)
}
"Tuple22Diff.patch" must "apply changes to an old value to achieve new value" in {
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_1 = Random.nextInt())
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
{
val tuple = (Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt(),Random.nextInt())
val modTuple = tuple.copy(_2 = tuple._2 + 1,_3 = tuple._3 + 1,_4 = tuple._4 + 1,_5 = tuple._5 + 1,_6 = tuple._6 + 1,_7 = tuple._7 + 1,_8 = tuple._8 + 1,_9 = tuple._9 + 1,_10 = tuple._10 + 1,_11 = tuple._11 + 1,_12 = tuple._12 + 1,_13 = tuple._13 + 1,_14 = tuple._14 + 1,_15 = tuple._15 + 1,_16 = tuple._16 + 1,_17 = tuple._17 + 1,_18 = tuple._18 + 1,_19 = tuple._19 + 1,_20 = tuple._20 + 1,_21 = tuple._21 + 1,_22 = tuple._22 + 1)
val d = tuple calcDiff modTuple
tuple applyPatch d should equal(modTuple)
}
}
}
|
S-Mach/s_mach.datadiff
|
datadiff/src/test/scala/s_mach/datadiff/TupleDataDiffTest.scala
|
Scala
|
mit
| 36,854
|
package pspz3
import com.microsoft.z3
trait ZType[A] {
def make(name: scala.Symbol): A
}
object ZType {
def apply[A](f: String => A): ZType[A] = new ZType[A] { def make(x: scala.Symbol): A = f(x.name) }
}
trait ZTypes extends HasContext {
import ctx._
implicit val ZTypeInt = ZType[ZInt](mkIntConst(_))
implicit val ZTypeBool = ZType[ZBool](mkBoolConst(_))
implicit val ZTypeReal = ZType[ZReal](mkRealConst(_))
implicit val ZTypeInt32 = ZType[Int32](mkBVConst(_, 32).as[Int32])
implicit val ZTypeInt64 = ZType[Int64](mkBVConst(_, 64).as[Int64])
implicit val ZTypeUInt32 = ZType[UInt32](mkBVConst(_, 32).as[UInt32])
implicit val ZTypeUInt64 = ZType[UInt64](mkBVConst(_, 64).as[UInt64])
}
|
paulp/pspz3
|
src/main/scala/ztype.scala
|
Scala
|
mit
| 720
|
package io.continuum.bokeh
package examples
package models
import breeze.linalg.linspace
import breeze.numerics.sin
import math.{Pi=>pi}
import thirdparty._
object TwinAxis extends Example with Tools {
object source extends ColumnDataSource {
val x = column(-2*pi to 2*pi by 0.1 toArray)
val y1 = column(sin(x.value))
val y2 = column(linspace(0, 100, x.value.length))
}
import source.{x,y1,y2}
val xdr = new Range1d().start(-6.5).end(6.5)
val ydr = new Range1d().start(-1.1).end(1.1)
val plot = new Plot()
.title("Twin Axis Plot")
.x_range(xdr)
.y_range(ydr)
.min_border(80)
.tools(Pan|WheelZoom)
.extra_y_ranges(Map("foo" -> new Range1d().start(0).end(100)))
val xaxis = new LinearAxis().plot(plot)
val y1axis = new LinearAxis().plot(plot)
val y2axis = new LinearAxis().plot(plot).y_range_name("foo")
plot.below := xaxis :: Nil
plot.left := y1axis :: y2axis :: Nil
val circle1_glyph = Circle().x(x).y(y1).fill_color(Color.Red).size(5).line_color(Color.Black)
val circle1 = new GlyphRenderer().data_source(source).glyph(circle1_glyph)
val circle2_glyph = Circle().x(x).y(y2).fill_color(Color.Blue).size(5).line_color(Color.Black)
val circle2 = new GlyphRenderer().data_source(source).glyph(circle2_glyph).y_range_name("foo")
plot.renderers := xaxis :: y1axis :: y2axis :: circle1 :: circle2 :: Nil
val document = new Document(plot)
val html = document.save("twin_axis.html", config.resources)
info(s"Wrote ${html.file}. Open ${html.url} in a web browser.")
}
|
bokeh/bokeh-scala
|
examples/src/main/scala/models/TwinAxis.scala
|
Scala
|
mit
| 1,622
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.yarn.api.records._
import org.apache.hadoop.yarn.client.api.AMRMClient
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.yarn.config._
import org.apache.spark.deploy.yarn.YarnAllocator._
import org.apache.spark.deploy.yarn.YarnSparkHadoopUtil._
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.scheduler.SplitInfo
import org.apache.spark.util.ManualClock
class MockResolver extends SparkRackResolver {
override def resolve(conf: Configuration, hostName: String): String = {
if (hostName == "host3") "/rack2" else "/rack1"
}
}
class YarnAllocatorSuite extends SparkFunSuite with Matchers with BeforeAndAfterEach {
val conf = new YarnConfiguration()
val sparkConf = new SparkConf()
sparkConf.set("spark.driver.host", "localhost")
sparkConf.set("spark.driver.port", "4040")
sparkConf.set(SPARK_JARS, Seq("notarealjar.jar"))
sparkConf.set("spark.yarn.launchContainers", "false")
val appAttemptId = ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 0), 0)
// Resource returned by YARN. YARN can give larger containers than requested, so give 6 cores
// instead of the 5 requested and 3 GB instead of the 2 requested.
val containerResource = Resource.newInstance(3072, 6)
var rmClient: AMRMClient[ContainerRequest] = _
var containerNum = 0
override def beforeEach() {
super.beforeEach()
rmClient = AMRMClient.createAMRMClient()
rmClient.init(conf)
rmClient.start()
}
override def afterEach() {
try {
rmClient.stop()
} finally {
super.afterEach()
}
}
class MockSplitInfo(host: String) extends SplitInfo(null, host, null, 1, null) {
override def hashCode(): Int = 0
override def equals(other: Any): Boolean = false
}
def createAllocator(maxExecutors: Int = 5): YarnAllocator = {
val args = Array(
"--jar", "somejar.jar",
"--class", "SomeClass")
val sparkConfClone = sparkConf.clone()
sparkConfClone
.set("spark.executor.instances", maxExecutors.toString)
.set("spark.executor.cores", "5")
.set("spark.executor.memory", "2048")
new YarnAllocator(
"not used",
mock(classOf[RpcEndpointRef]),
conf,
sparkConfClone,
rmClient,
appAttemptId,
new SecurityManager(sparkConf),
Map(),
new MockResolver())
}
def createContainer(host: String): Container = {
// When YARN 2.6+ is required, avoid deprecation by using version with long second arg
val containerId = ContainerId.newInstance(appAttemptId, containerNum)
containerNum += 1
val nodeId = NodeId.newInstance(host, 1000)
Container.newInstance(containerId, nodeId, "", containerResource, RM_REQUEST_PRIORITY, null)
}
test("single container allocated") {
// request a single container and receive it
val handler = createAllocator(1)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (1)
val container = createContainer("host1")
handler.handleAllocatedContainers(Array(container))
handler.getNumExecutorsRunning should be (1)
handler.allocatedContainerToHostMap.get(container.getId).get should be ("host1")
handler.allocatedHostToContainersMap.get("host1").get should contain (container.getId)
val size = rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size
size should be (0)
}
test("container should not be created if requested number if met") {
// request a single container and receive it
val handler = createAllocator(1)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (1)
val container = createContainer("host1")
handler.handleAllocatedContainers(Array(container))
handler.getNumExecutorsRunning should be (1)
handler.allocatedContainerToHostMap.get(container.getId).get should be ("host1")
handler.allocatedHostToContainersMap.get("host1").get should contain (container.getId)
val container2 = createContainer("host2")
handler.handleAllocatedContainers(Array(container2))
handler.getNumExecutorsRunning should be (1)
}
test("some containers allocated") {
// request a few containers and receive some of them
val handler = createAllocator(4)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (4)
val container1 = createContainer("host1")
val container2 = createContainer("host1")
val container3 = createContainer("host2")
handler.handleAllocatedContainers(Array(container1, container2, container3))
handler.getNumExecutorsRunning should be (3)
handler.allocatedContainerToHostMap.get(container1.getId).get should be ("host1")
handler.allocatedContainerToHostMap.get(container2.getId).get should be ("host1")
handler.allocatedContainerToHostMap.get(container3.getId).get should be ("host2")
handler.allocatedHostToContainersMap.get("host1").get should contain (container1.getId)
handler.allocatedHostToContainersMap.get("host1").get should contain (container2.getId)
handler.allocatedHostToContainersMap.get("host2").get should contain (container3.getId)
}
test("receive more containers than requested") {
val handler = createAllocator(2)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (2)
val container1 = createContainer("host1")
val container2 = createContainer("host2")
val container3 = createContainer("host4")
handler.handleAllocatedContainers(Array(container1, container2, container3))
handler.getNumExecutorsRunning should be (2)
handler.allocatedContainerToHostMap.get(container1.getId).get should be ("host1")
handler.allocatedContainerToHostMap.get(container2.getId).get should be ("host2")
handler.allocatedContainerToHostMap.contains(container3.getId) should be (false)
handler.allocatedHostToContainersMap.get("host1").get should contain (container1.getId)
handler.allocatedHostToContainersMap.get("host2").get should contain (container2.getId)
handler.allocatedHostToContainersMap.contains("host4") should be (false)
}
test("decrease total requested executors") {
val handler = createAllocator(4)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (4)
handler.requestTotalExecutorsWithPreferredLocalities(3, 0, Map.empty)
handler.updateResourceRequests()
handler.getPendingAllocate.size should be (3)
val container = createContainer("host1")
handler.handleAllocatedContainers(Array(container))
handler.getNumExecutorsRunning should be (1)
handler.allocatedContainerToHostMap.get(container.getId).get should be ("host1")
handler.allocatedHostToContainersMap.get("host1").get should contain (container.getId)
handler.requestTotalExecutorsWithPreferredLocalities(2, 0, Map.empty)
handler.updateResourceRequests()
handler.getPendingAllocate.size should be (1)
}
test("decrease total requested executors to less than currently running") {
val handler = createAllocator(4)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (4)
handler.requestTotalExecutorsWithPreferredLocalities(3, 0, Map.empty)
handler.updateResourceRequests()
handler.getPendingAllocate.size should be (3)
val container1 = createContainer("host1")
val container2 = createContainer("host2")
handler.handleAllocatedContainers(Array(container1, container2))
handler.getNumExecutorsRunning should be (2)
handler.requestTotalExecutorsWithPreferredLocalities(1, 0, Map.empty)
handler.updateResourceRequests()
handler.getPendingAllocate.size should be (0)
handler.getNumExecutorsRunning should be (2)
}
test("kill executors") {
val handler = createAllocator(4)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (4)
val container1 = createContainer("host1")
val container2 = createContainer("host2")
handler.handleAllocatedContainers(Array(container1, container2))
handler.requestTotalExecutorsWithPreferredLocalities(1, 0, Map.empty)
handler.executorIdToContainer.keys.foreach { id => handler.killExecutor(id ) }
val statuses = Seq(container1, container2).map { c =>
ContainerStatus.newInstance(c.getId(), ContainerState.COMPLETE, "Finished", 0)
}
handler.updateResourceRequests()
handler.processCompletedContainers(statuses.toSeq)
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (1)
}
test("lost executor removed from backend") {
val handler = createAllocator(4)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (4)
val container1 = createContainer("host1")
val container2 = createContainer("host2")
handler.handleAllocatedContainers(Array(container1, container2))
handler.requestTotalExecutorsWithPreferredLocalities(2, 0, Map())
val statuses = Seq(container1, container2).map { c =>
ContainerStatus.newInstance(c.getId(), ContainerState.COMPLETE, "Failed", -1)
}
handler.updateResourceRequests()
handler.processCompletedContainers(statuses.toSeq)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (2)
handler.getNumExecutorsFailed should be (2)
handler.getNumUnexpectedContainerRelease should be (2)
}
test("memory exceeded diagnostic regexes") {
val diagnostics =
"Container [pid=12465,containerID=container_1412887393566_0003_01_000002] is running " +
"beyond physical memory limits. Current usage: 2.1 MB of 2 GB physical memory used; " +
"5.8 GB of 4.2 GB virtual memory used. Killing container."
val vmemMsg = memLimitExceededLogMessage(diagnostics, VMEM_EXCEEDED_PATTERN)
val pmemMsg = memLimitExceededLogMessage(diagnostics, PMEM_EXCEEDED_PATTERN)
assert(vmemMsg.contains("5.8 GB of 4.2 GB virtual memory used."))
assert(pmemMsg.contains("2.1 MB of 2 GB physical memory used."))
}
test("window based failure executor counting") {
sparkConf.set("spark.yarn.executor.failuresValidityInterval", "100s")
val handler = createAllocator(4)
val clock = new ManualClock(0L)
handler.setClock(clock)
handler.updateResourceRequests()
handler.getNumExecutorsRunning should be (0)
handler.getPendingAllocate.size should be (4)
val containers = Seq(
createContainer("host1"),
createContainer("host2"),
createContainer("host3"),
createContainer("host4")
)
handler.handleAllocatedContainers(containers)
val failedStatuses = containers.map { c =>
ContainerStatus.newInstance(c.getId, ContainerState.COMPLETE, "Failed", -1)
}
handler.getNumExecutorsFailed should be (0)
clock.advance(100 * 1000L)
handler.processCompletedContainers(failedStatuses.slice(0, 1))
handler.getNumExecutorsFailed should be (1)
clock.advance(101 * 1000L)
handler.getNumExecutorsFailed should be (0)
handler.processCompletedContainers(failedStatuses.slice(1, 3))
handler.getNumExecutorsFailed should be (2)
clock.advance(50 * 1000L)
handler.processCompletedContainers(failedStatuses.slice(3, 4))
handler.getNumExecutorsFailed should be (3)
clock.advance(51 * 1000L)
handler.getNumExecutorsFailed should be (1)
clock.advance(50 * 1000L)
handler.getNumExecutorsFailed should be (0)
}
}
|
spark0001/spark2.1.1
|
yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
|
Scala
|
apache-2.0
| 13,039
|
package io.github.edadma.numbers
import scala.math._
abstract class AbstractQuaternion[T: Numeric, Q <: Quaternion[T, Double, Q, QuaternionDouble]] extends Quaternion[T, Double, Q, QuaternionDouble] {
protected def promote(a: Double, b: Double, c: Double, d: Double): QuaternionDouble = QuaternionDouble(a, b, c, d)
protected def _floor(a: Double): Double = math.floor(a)
protected def _ceil(a: Double): Double = math.ceil(a)
protected def _sqrt(a: Double): Double = math.sqrt(a)
protected def _atan2(y: Double, x: Double): Double = atan2(y, x)
protected def _ln(a: Double): Double = log(a)
protected def _exp(a: Double): Double = math.exp(a)
protected def _sin(a: Double): Double = math.sin(a)
protected def _cos(a: Double): Double = math.cos(a)
protected def _acos(a: Double): Double = math.acos(a)
protected def _pow(a: Double, b: Double): Double = math.pow(a, b)
protected def fdivide(a: Double, b: Double): Double = a / b
protected def fmul(a: Double, b: Double): Double = a * b
}
|
edadma/numbers
|
shared/src/main/scala/io/github/edadma/numbers/AbstractQuaternion.scala
|
Scala
|
mit
| 1,028
|
/*
* Copyright (c) 2011, Daniel Spiewak
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* - Neither the name of "Anti-XML" nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.codecommit.antixml
import org.specs2.mutable._
class StAXSpecs extends Specification {
object StAXParser extends StAXParser
"StAXParser" should {
"parse a StreamSource and generate an Elem" in {
StAXParser.fromString("<a:a xmlns:a='a'>hi<b attr='value' /> there</a:a>") mustEqual Elem(Some("a"), "a", Attributes(), Map("a" -> "a"), Group(Text("hi"), Elem(None, "b", Attributes("attr" -> "value"), Map("a" -> "a"), Group()), Text(" there")))
}
"parse a simpleString with an non-prefixed namespace" in {
StAXParser.fromString("<a xmlns='urn:a'/>") mustEqual Elem(None, "a", Attributes(), Map("" -> "urn:a"), Group())
}
"parse a simpleString with both a namespace and an attribute" in {
StAXParser.fromString("<a xmlns='urn:a' key='val' />") mustEqual Elem(None, "a", Attributes("key"->"val"), Map("" -> "urn:a"), Group())
}
}
}
|
djspiewak/anti-xml
|
src/test/scala/com/codecommit/antixml/StAXSpecs.scala
|
Scala
|
bsd-3-clause
| 2,473
|
package module.detailtrait
/**
* Created by liguodong on 2016/10/9.
*/
object AOPScala extends App{
/**
* Scala中多重继承中方法的混入是由左往右的顺序。(见以下实例讲解)
* scala中多继承对象方法的调用顺序是从右往左。(见以下实例讲解)
*/
val work = new Worker with toBefo with toBefo2
work.doAction
}
trait Action{
println("-----Action-----")
def doAction
}
trait toBefo extends Action{
println("-----toBefo-----")
abstract override def doAction {
println("toBefo function")
super.doAction
println("toBefo function over")
}
}
trait toBefo2 extends Action /* with toBefo*/ {
println("-----toBefo2-----")
abstract override def doAction={
println("toBefo2 function")
super.doAction
println("toBefo2 function over")
}
}
class Worker extends Action{
println("-----Worker-----")
override def doAction=println("working .........")
}
|
liguodongIOT/java-scala-mix-sbt
|
src/main/scala/module/detailtrait/AOPScala.scala
|
Scala
|
apache-2.0
| 957
|
package com.arcusys.valamis.lesson.scorm.storage.sequencing
import com.arcusys.valamis.lesson.scorm.model.manifest.ChildrenSelection
trait ChildrenSelectionStorage {
def create(sequencingId: Long, entity: ChildrenSelection)
def get(sequencingId: Long): Option[ChildrenSelection]
def delete(sequencingId: Long)
}
|
igor-borisov/valamis
|
valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/storage/sequencing/ChildrenSelectionStorage.scala
|
Scala
|
gpl-3.0
| 320
|
/*
* Copyright 2013 Marek Radonsky
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import models.Player
import play.api.mvc.RequestHeader
object Players extends GroupDependentController[Player] {
override val objectName = "Player"
override val modelObject = Player
override val objectFormat = Player.format
override def getObjectURI(id: Long)(implicit request: RequestHeader) = controllers.routes.Players.get(id).absoluteURL()
}
|
radonsky/Taurus
|
app/controllers/Players.scala
|
Scala
|
apache-2.0
| 981
|
package maastar.algorithm.maastar
import maastar.agent.{Agent, DecPomdpAgent}
import maastar.game._
import maastar.policy.PolicyNode
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FlatSpec, ShouldMatchers}
/**
* Created by nwertzberger on 6/23/14.
*/
class PolicyEvaluatorTest extends FlatSpec with ShouldMatchers with MockFactory {
val agent1 = new DecPomdpAgent("agent1")
val agent2 = new DecPomdpAgent("agent2")
val action = new Action("stomp")
val doubleStomp: Map[Agent, Action] = Map(agent1 -> action, agent2 -> action)
val state = new State("wasted",
Map(
doubleStomp -> new Transition(0.1)
)
)
state.getJointActionTransition(doubleStomp)
.setNextStates(Map(
new StateObservation(state, Map(agent1 -> Map(), agent2 -> Map())) -> 1.0
)
)
val eval = new PolicyEvaluator()
"PolicyEvaluator" should "return the base value for a policy" in {
val stupidPolicy: Map[Agent, PolicyNode] = Map(
agent1 -> new PolicyNode(action),
agent2 -> new PolicyNode(action)
)
eval.utilityOf(stupidPolicy, Map(state -> 1.0), 1) should be(0.1)
}
it should "track down to sub-transitions" in {
val stupidPolicy: Map[Agent, PolicyNode] = Map(
agent1 -> new PolicyNode(action, Map(Set() -> new PolicyNode(action))),
agent2 -> new PolicyNode(action, Map(Set() -> new PolicyNode(action)))
)
eval.utilityOf(stupidPolicy, Map(state -> 1.0), 0) should be(0.2)
}
it should "give one response for no observations" in {
val noAgentObs: Map[Agent, Set[Observation]] = Map(agent1 -> Set(), agent2 -> Set())
eval.getAllAgentObservationCombinations(noAgentObs).toList should equal(List(noAgentObs))
}
it should "get four responses for one possible observation" in {
val observation = new Observation("stinks")
val oneAgentObs: Map[Agent, Set[Observation]] = Map(agent1 -> Set(observation), agent2 -> Set(observation))
eval.getAllAgentObservationCombinations(oneAgentObs).toList.size should equal(4)
}
}
|
nwertzberger/maastar
|
src/test/scala/maastar/algorithm/maastar/PolicyEvaluatorTest.scala
|
Scala
|
apache-2.0
| 2,157
|
import scala.io.Source
if (args.length > 0)
for (linea <- Source.fromFile(args(0)).getLines())
println(linea.length + " " + linea)
else
Console.err.println("Introduzca nombre de archivo")
|
fblupi/grado_informatica-NTP
|
Teoria/T2/printFile.scala
|
Scala
|
gpl-2.0
| 193
|
package com.github.soniex2.notebetter.util
import net.minecraft.util.registry.RegistryNamespacedDefaultedByKey
/**
* @author soniex2
*/
object MinecraftScalaHelper {
type ResourceLocation = net.minecraft.util.ResourceLocation
object ResourceLocation extends (String => ResourceLocation) with ((String, String) => ResourceLocation) {
@inline override def apply(v1: String) = new ResourceLocation(v1)
@inline override def apply(v1: String, v2: String) = new ResourceLocation(v1, v2)
def unapply(resLoc: ResourceLocation): Option[String] = Some(resLoc.toString)
}
object CachedResourceLocation extends (String => CachedResourceLocation) with ((String, String) => CachedResourceLocation) {
@inline override def apply(v1: String) = new CachedResourceLocation(v1)
@inline override def apply(v1: String, v2: String) = new CachedResourceLocation(v1, v2)
}
type BlockPos = net.minecraft.util.math.BlockPos
object BlockPos extends ((Int, Int, Int) => BlockPos) {
@inline override def apply(x: Int, y: Int, z: Int) = new BlockPos(x, y, z)
def unapply(blockPos: BlockPos): Option[(Int, Int, Int)] = {
Some((blockPos.getX, blockPos.getY, blockPos.getZ))
}
}
implicit class RegNamespacedDefaultedByKey[K, V](reg: RegistryNamespacedDefaultedByKey[K, V]) {
def byKey(k: K) = Option(reg.getObject(k))
def byVal(v: V) = Option(reg.getNameForObject(v))
}
}
|
eNByeX/NoteBetter
|
src/main/scala/com/github/soniex2/notebetter/util/MinecraftScalaHelper.scala
|
Scala
|
mit
| 1,422
|
/**
* Copyright 2014 Jorge Aliss (jaliss at gmail dot com) - twitter: @jaliss
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package securesocial.core.services
import play.api.mvc.{ Call, RequestHeader }
import play.api.{ Configuration, Environment }
import securesocial.core.SslEnabled
/**
* A RoutesService that resolves the routes for some of the pages
*/
trait RoutesService {
/**
* The login page url
*/
def loginPageUrl(implicit req: RequestHeader): String
/**
* The page where users get redirected when they deny access to their accounts using
* oauth logins
*/
def accessDeniedUrl(implicit req: RequestHeader): String
/**
* The page that starts the sign up flow
*/
def startSignUpUrl(implicit req: RequestHeader): String
/**
* The url that processes submissions from the start sign up page
*/
def handleStartSignUpUrl(implicit req: RequestHeader): String
/**
* The sign up page
*/
def signUpUrl(mailToken: String)(implicit req: RequestHeader): String
/**
* The url that processes submissions from the sign up page
*/
def handleSignUpUrl(mailToken: String)(implicit req: RequestHeader): String
/**
* The page that starts the reset password flow
*/
def startResetPasswordUrl(implicit req: RequestHeader): String
/**
* The url that processes submissions from the start reset password page
*/
def handleStartResetPasswordUrl(implicit req: RequestHeader): String
/**
* The reset password page
*/
def resetPasswordUrl(mailToken: String)(implicit req: RequestHeader): String
/**
* The url that processes submissions from the reset password page
*/
def handleResetPasswordUrl(mailToken: String)(implicit req: RequestHeader): String
/**
* The password change page
*/
def passwordChangeUrl(implicit req: RequestHeader): String
/**
* The url that processes submissions from the password change page
*/
def handlePasswordChangeUrl(implicit req: RequestHeader): String
/**
* The url to start an authentication flow with the given provider
*/
def authenticationUrl(provider: String, redirectTo: Option[String] = None, scope: Option[String] = None, authorizationUrlParams: Map[String, String] = Map(), saveMode: Option[String] = None, miscParam: Option[String] = None)(implicit req: RequestHeader): String
def faviconPath: Call
def jqueryPath: Call
def bootstrapCssPath: Call
def customCssPath: Option[Call]
}
object RoutesService {
/**
* The default RoutesService implementation. It points to the routes
* defined by the built in controllers.
*/
class Default(environment: Environment, configuration: Configuration) extends RoutesService {
private val logger = play.api.Logger("securesocial.core.DefaultRoutesService")
val sslEnabled = SslEnabled(environment, configuration)
val FaviconKey = "securesocial.faviconPath"
val JQueryKey = "securesocial.jqueryPath"
val BootstrapCssKey = "securesocial.bootstrapCssPath"
val CustomCssKey = "securesocial.customCssPath"
val ApplicationHostKey = "securesocial.applicationHost"
val ApplicationPortKey = "securesocial.applicationPort"
private lazy val applicationHost = configuration.get[Option[String]](ApplicationHostKey).getOrElse {
throw new RuntimeException(s"Missing property: $ApplicationHostKey")
}
private lazy val applicationPort =
configuration.get[Option[Int]](ApplicationPortKey).map(port => s":$port").getOrElse("")
private lazy val hostAndPort = s"$applicationHost$applicationPort"
protected def absoluteUrl(call: Call)(implicit req: RequestHeader): String = {
call.absoluteURL(sslEnabled.value, hostAndPort)
}
override def loginPageUrl(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.LoginPage.login())
}
override def accessDeniedUrl(implicit req: RequestHeader): String = {
loginPageUrl
}
override def startSignUpUrl(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.Registration.startSignUp())
}
override def handleStartSignUpUrl(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.Registration.handleStartSignUp())
}
override def signUpUrl(mailToken: String)(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.Registration.signUp(mailToken))
}
override def handleSignUpUrl(mailToken: String)(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.Registration.handleSignUp(mailToken))
}
override def startResetPasswordUrl(implicit request: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.PasswordReset.startResetPassword())
}
override def handleStartResetPasswordUrl(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.PasswordReset.handleStartResetPassword())
}
override def resetPasswordUrl(mailToken: String)(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.PasswordReset.resetPassword(mailToken))
}
override def handleResetPasswordUrl(mailToken: String)(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.PasswordReset.handleResetPassword(mailToken))
}
override def passwordChangeUrl(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.PasswordChange.page())
}
override def handlePasswordChangeUrl(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.PasswordChange.handlePasswordChange)
}
override def authenticationUrl(provider: String, redirectTo: Option[String] = None, scope: Option[String] = None, authorizationUrlParams: Map[String, String], saveMode: Option[String], miscParam: Option[String])(implicit req: RequestHeader): String = {
absoluteUrl(securesocial.controllers.routes.ProviderController.authenticate(provider, redirectTo, scope, authorizationUrlParams, saveMode, miscParam))
}
protected def valueFor(key: String) = {
val value = configuration.get[String](key)
logger.debug(s"[securesocial] $key = $value")
securesocial.controllers.routes.Assets.at(value)
}
protected def valueFor(key: String, default: String) = {
val value = configuration.get[Option[String]](key).getOrElse(default)
logger.debug(s"[securesocial] $key = $value")
securesocial.controllers.routes.Assets.at(value)
}
/**
* Loads the Favicon to use from configuration, using a default one if not provided
* @return the path to Favicon file to use
*/
override val faviconPath = valueFor(FaviconKey)
/**
* Loads the Jquery file to use from configuration, using a default one if not provided
* @return the path to Jquery file to use
*/
override val jqueryPath = valueFor(JQueryKey)
/**
* Loads the Bootstrap CSS file to use from configuration, using a default one if not provided
* @return the path to Bootstrap CSS file to use
*/
override val bootstrapCssPath = valueFor(BootstrapCssKey)
/**
* Loads the Custom Css file to use from configuration. If there is none define, none will be used
* @return Option containing a custom css file or None
*/
override val customCssPath: Option[Call] = {
val path = configuration.get[Option[String]](CustomCssKey).map(securesocial.controllers.routes.Assets.at)
logger.debug("[securesocial] custom css path = %s".format(path))
path
}
}
}
|
k4200/securesocial
|
module-code/app/securesocial/core/services/RoutesService.scala
|
Scala
|
apache-2.0
| 8,208
|
package org.dbpedia.spotlight.model
import java.util.Map
import java.lang.String
/**
* @author pablomendes
* @author Joachim Daiber
*/
trait SurfaceFormIndexer {
/**
* Adds the [[org.dbpedia.spotlight.model.SurfaceForm]] with with the corresponding annotated count and
* total count. Total count is the number of times the surface form was observed,
* whether annotated or not.
*
* @param sf the surface form
* @param annotatedCount count of annotated occurrences of the surface form
* @param totalCount count of total occurrences of the surface form
*/
def addSurfaceForm(sf: SurfaceForm, annotatedCount: Int, totalCount: Int)
/**
* Adds every [[org.dbpedia.spotlight.model.SurfaceForm]] in the Map with its
* corresponding annotated and total count.
*
* @param sfCount Map from SurfaceForms to their annotated and total counts
* @param lowercaseCounts Map from lowercased surface forms to their total counts
*/
def addSurfaceForms(sfCount: Map[SurfaceForm, (Int, Int)], lowercaseCounts: Map[String, Int], MIN_SF_COUNT: Int)
}
|
Skunnyk/dbpedia-spotlight-model
|
index/src/main/scala/org/dbpedia/spotlight/model/SurfaceFormIndexer.scala
|
Scala
|
apache-2.0
| 1,088
|
package com.blinkbox.books.agora.catalogue.book
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpecLike, Matchers}
import spray.testkit.ScalatestRouteTest
import spray.routing.HttpService
import com.blinkbox.books.test.MockitoSyrup
import org.mockito.Matchers._
import org.mockito.Mockito._
import scala.concurrent.duration._
import com.blinkbox.books.spray.v1.Version1JsonSupport
import com.blinkbox.books.spray.JsonFormats
import spray.http.StatusCodes._
import com.blinkbox.books.config.ApiConfig
import scala.concurrent.Future
import java.net.URL
import com.blinkbox.books.spray.Page
import com.blinkbox.books.spray.v1.ListPage
import com.blinkbox.books.spray.SortOrder
@RunWith(classOf[JUnitRunner])
class BookApiTest extends FlatSpecLike with ScalatestRouteTest with HttpService with Matchers with MockitoSyrup with Version1JsonSupport {
implicit override def version1JsonFormats = JsonFormats.blinkboxFormat()
implicit val actorRefFactory = system
implicit val routeTestTimeout = RouteTestTimeout(5.seconds)
val apiConfig = mock[ApiConfig]
when(apiConfig.localUrl).thenReturn(new URL("http://localhost"))
val bookConfig = mock[BookConfig]
when(bookConfig.path).thenReturn("/book")
when(bookConfig.maxAge).thenReturn(60.seconds)
when(bookConfig.maxResults).thenReturn(50)
when(bookConfig.synopsisPathLink).thenReturn("synopsis")
val service = mock[BookService]
val api = new BookApi(apiConfig, bookConfig, service)
val routes = api.routes
val book = BookRepresentation("guid", "id", "title", "date", sampleEligible = true, List(), None)
val defaultOrder = SortOrder("title", desc = false)
val defaultPage = Page(0, bookConfig.maxResults)
val expectedListPage = ListPage(1, 0, 1, List(book), None)
val emptyListPage = ListPage(0, 0, 0, List.empty[BookRepresentation], None)
"The service" should "return the book if it exists" in {
when(service.getBookByIsbn(anyString)).thenReturn(Future.successful(Option(book)))
Get("/book/isbn") ~> routes ~> check {
verify(service).getBookByIsbn("isbn")
status shouldEqual OK
responseAs[BookRepresentation] shouldEqual book
}
}
it should "return 404 if the book does not exist" in {
when(service.getBookByIsbn(anyString)).thenReturn(Future.successful(None))
Get("/book/cobblers") ~> routes ~> check {
verify(service).getBookByIsbn("cobblers")
status shouldEqual NotFound
}
}
it should "return the book synopsis if it exists" in {
val synopsis = BookSynopsis("id", "synopsis")
when(service.getBookSynopsis(anyString)).thenReturn(Future.successful(Some(synopsis)))
Get("/book/isbn/synopsis") ~> routes ~> check {
verify(service).getBookSynopsis("isbn")
status shouldEqual OK
responseAs[BookSynopsis] shouldEqual synopsis
}
}
it should "return 404 if the book synopsis does not exist" in {
when(service.getBookSynopsis(anyString)).thenReturn(Future.successful(None))
Get("/book/cobblers/synopsis") ~> routes ~> check {
verify(service).getBookSynopsis("cobblers")
status shouldEqual NotFound
}
}
it should "return bulk books" in {
val isbns = List("1", "2")
val expected = ListPage(2,0,2,List(book, book),None)
when(service.getBooks(eql(isbns), any[Page])).thenReturn(Future.successful(expected))
Get("/book/?id=1&id=2") ~> routes ~> check {
verify(service).getBooks(isbns, Page(0, bookConfig.maxResults))
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual expected
}
}
it should "return paginated bulk books" in {
val isbns = List("1", "2", "3")
val expected = ListPage(3,1,1,List(book, book),None)
when(service.getBooks(isbns, Page(1, 1))).thenReturn(Future.successful(expected))
Get("/book/?id=1&id=2&id=3&offset=1&count=1") ~> routes ~> check {
verify(service).getBooks(isbns, Page(1, 1))
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual expected
}
}
it should "return empty results for unknown books" in {
when(service.getBooks(any[List[String]], any[Page])).thenReturn(Future.successful(emptyListPage))
Get("/book/?id=999") ~> routes ~> check {
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual emptyListPage
}
}
it should "return books by the given contributor" in {
when(service.getBooksByContributor("42", None, None, defaultPage, defaultOrder)).thenReturn(Future.successful(expectedListPage))
Get("/book/?contributor=42") ~> routes ~> check {
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual expectedListPage
}
}
it should "return empty results for an unknown contributor" in {
when(service.getBooksByContributor("999", None, None, defaultPage, defaultOrder)).thenReturn(Future.successful(emptyListPage))
Get("/book/?contributor=999") ~> routes ~> check {
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual emptyListPage
}
}
it should "return books by a given contributor with a specified publication date-range" in {
val dateParam = "2014-01-01"
val date = BookService.dateTimeFormat.parseDateTime(dateParam)
when(service.getBooksByContributor("42", Some(date), Some(date), Page(0, bookConfig.maxResults), SortOrder("title", desc = false))).thenReturn(Future.successful(expectedListPage))
Get(s"/book/?contributor=42&minPublicationDate=$dateParam&maxPublicationDate=$dateParam") ~> routes ~> check {
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual expectedListPage
}
}
it should "fail if the end-date is before the start-date" in {
Get(s"/book/?contributor=42&minPublicationDate=2014-01-01&maxPublicationDate=2013-01-01") ~> routes ~> check {
status shouldEqual BadRequest
}
}
it should "Return books by a given contributor with a specific sort order" in {
when(service.getBooksByContributor("42", None, None, Page(0, bookConfig.maxResults), SortOrder("author", desc = true))).thenReturn(Future.successful(expectedListPage))
Get(s"/book/?contributor=42&order=author&desc=true") ~> routes ~> check {
status shouldEqual OK
responseAs[ListPage[BookRepresentation]] shouldEqual expectedListPage
}
}
it should "fail if given an invalid sort order" in {
Get(s"/book/?contributor=42&order=cobblers") ~> routes ~> check {
status shouldEqual BadRequest
}
}
}
|
blinkboxbooks/catalogue-v2.scala
|
catalogue2-service-public/src/test/scala/com/blinkbox/books/agora/catalogue/book/BookApiTest.scala
|
Scala
|
mit
| 6,574
|
package org.jetbrains.plugins.scala
package lang
package formatting
package processors
import com.intellij.psi.tree.TokenSet
import psi.api.ScalaFile
import scaladoc.psi.api.ScDocComment
import settings.ScalaCodeStyleSettings
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypesEx
import com.intellij.psi.xml._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging._
import org.jetbrains.plugins.scala.lang.psi.api.base._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.base.types._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import com.intellij.formatting.Spacing
import com.intellij.psi.codeStyle.CommonCodeStyleSettings
import com.intellij.psi.{PsiElement, PsiComment, PsiWhiteSpace}
import psi.api.toplevel.imports. {ScImportSelectors, ScImportStmt}
import psi.ScalaPsiUtil
import xml.ScXmlPattern
import com.intellij.psi.javadoc.PsiDocComment
import psi.api.toplevel.ScEarlyDefinitions
import com.intellij.openapi.util.TextRange
import com.intellij.openapi.diagnostic.Logger
import refactoring.util.ScalaNamesUtil
import scaladoc.lexer.ScalaDocTokenType
import parser.ScalaElementTypes
import psi.api.toplevel.typedef._
import extensions._
import scala.annotation.tailrec
import org.jetbrains.plugins.scala.util.MultilineStringUtil
object ScalaSpacingProcessor extends ScalaTokenTypes {
private val LOG = Logger.getInstance("#org.jetbrains.plugins.scala.lang.formatting.processors.ScalaSpacingProcessor")
val NO_SPACING_WITH_NEWLINE = Spacing.createSpacing(0, 0, 0, true, 1)
val NO_SPACING = Spacing.createSpacing(0, 0, 0, false, 0)
val COMMON_SPACING = Spacing.createSpacing(1, 1, 0, true, 100)
val IMPORT_BETWEEN_SPACING = Spacing.createSpacing(0, 0, 1, true, 100)
val IMPORT_OTHER_SPACING = Spacing.createSpacing(0, 0, 2, true, 100)
val BLOCK_ELEMENT_TYPES = {
import ScalaElementTypes._
TokenSet.create(BLOCK_EXPR, TEMPLATE_BODY, PACKAGING, TRY_BLOCK, MATCH_STMT, CATCH_BLOCK)
}
private def getText(node: ASTNode, fileText: String): String = {
node.getTextRange.substring(fileText)
}
private def nextNotWithspace(elem: PsiElement): PsiElement = {
var next = elem.getNextSibling
while (next != null && (next.isInstanceOf[PsiWhiteSpace] ||
next.getNode.getElementType == ScalaTokenTypes.tWHITE_SPACE_IN_LINE)) next = next.getNextSibling
next
}
private def prevNotWithspace(elem: PsiElement): PsiElement = {
var prev = elem.getPrevSibling
while (prev != null && (prev.isInstanceOf[PsiWhiteSpace] ||
prev.getNode.getElementType == ScalaTokenTypes.tWHITE_SPACE_IN_LINE)) prev = prev.getPrevSibling
prev
}
def getSpacing(left: ScalaBlock, right: ScalaBlock): Spacing = {
val settings = right.getCommonSettings
val keepBlankLinesInCode = settings.KEEP_BLANK_LINES_IN_CODE
val keepLineBreaks = settings.KEEP_LINE_BREAKS
val keepBlankLinesInDeclarations = settings.KEEP_BLANK_LINES_IN_DECLARATIONS
val keepBlankLinesBeforeRBrace = settings.KEEP_BLANK_LINES_BEFORE_RBRACE
def getSpacing(x: Int, y: Int, z: Int) = {
if (keepLineBreaks) Spacing.createSpacing(y, y, z, true, x)
else Spacing.createSpacing(y, y, z, false, 0)
}
if (left == null) {
return getSpacing(keepBlankLinesInCode, 0, 0) //todo:
}
val scalaSettings: ScalaCodeStyleSettings =
left.getSettings.getCustomSettings(classOf[ScalaCodeStyleSettings])
def getDependentLFSpacing(x: Int, y: Int, range: TextRange) = {
if (keepLineBreaks) Spacing.createDependentLFSpacing(y, y, range, true, x)
else Spacing.createDependentLFSpacing(y, y, range, false, 0)
}
val WITHOUT_SPACING = getSpacing(keepBlankLinesInCode, 0, 0)
val WITHOUT_SPACING_NO_KEEP = Spacing.createSpacing(0, 0, 0, false, 0)
val WITHOUT_SPACING_DEPENDENT = (range: TextRange) => getDependentLFSpacing(keepBlankLinesInCode, 0, range)
val WITH_SPACING = getSpacing(keepBlankLinesInCode, 1, 0)
val WITH_SPACING_NO_KEEP = Spacing.createSpacing(1, 1, 0, false, 0)
val WITH_SPACING_DEPENDENT = (range: TextRange) => getDependentLFSpacing(keepBlankLinesInCode, 1, range)
val ON_NEW_LINE = getSpacing(keepBlankLinesInCode, 0, 1)
val DOUBLE_LINE = getSpacing(keepBlankLinesInCode, 0, 2)
val leftNode = left.getNode
val rightNode = right.getNode
val fileText = leftNode.getPsi.getContainingFile.getText
//new formatter spacing
val leftElementType = leftNode.getElementType
val rightElementType = rightNode.getElementType
val leftPsi = leftNode.getPsi
val rightPsi = rightNode.getPsi
val fileTextRange = new TextRange(0, fileText.length())
/**
* This is not nodes text! This is blocks text, which can be different from node.
*/
val (leftString, rightString) =
if (!fileTextRange.contains(left.getTextRange) ||
!fileTextRange.contains(right.getTextRange)) {
LOG.error("File text: \\n%s\\n\\nDoesn't contains nodes:\\n(%s, %s)".
format(fileText, leftPsi.getText, rightPsi.getText))
(leftPsi.getText, rightPsi.getText)
} else (left.getTextRange.substring(fileText),
right.getTextRange.substring(fileText))
import ScalaTokenTypes._
if ((leftPsi.isInstanceOf[PsiComment] || leftPsi.isInstanceOf[PsiDocComment]) &&
(rightPsi.isInstanceOf[PsiComment] || rightPsi.isInstanceOf[PsiDocComment])) {
return ON_NEW_LINE
}
//ScalaDocs
def docCommentOf(node: ASTNode) = node.getPsi.parentsInFile.findByType(classOf[ScDocComment]).getOrElse {
throw new RuntimeException("Unable to find parent doc comment")
}
(leftNode.getElementType, rightNode.getElementType,
leftNode.getTreeParent.getElementType, rightNode.getTreeParent.getElementType) match {
case (_, ScalaDocTokenType.DOC_COMMENT_LEADING_ASTERISKS, _, _) => return NO_SPACING_WITH_NEWLINE
case (_, ScalaDocTokenType.DOC_COMMENT_END, _, _) =>
return if (docCommentOf(rightNode).version == 1) {
NO_SPACING_WITH_NEWLINE
} else {
if (leftString(leftString.length() - 1) != ' ') WITH_SPACING else WITHOUT_SPACING
}
case (ScalaDocTokenType.DOC_COMMENT_START, _, _, _) =>
return if (docCommentOf(leftNode).version == 1) {
NO_SPACING_WITH_NEWLINE
} else {
if (getText(rightNode, fileText)(0) != ' ') WITH_SPACING else WITHOUT_SPACING
}
case (ScalaDocTokenType.DOC_COMMENT_LEADING_ASTERISKS, _, _, _) =>
if (getText(rightNode, fileText).apply(0) != ' ') return WITH_SPACING
else return WITHOUT_SPACING
case (ScalaDocTokenType.DOC_TAG_NAME, ScalaDocTokenType.DOC_TAG_VALUE_TOKEN, _, _) => return WITH_SPACING
case (_, x, _, _) if ScalaDocTokenType.ALL_SCALADOC_TOKENS.contains(x) => return Spacing.getReadOnlySpacing
case (x, _, _, _) if ScalaDocTokenType.ALL_SCALADOC_TOKENS.contains(x) => return Spacing.getReadOnlySpacing
case _ =>
}
//Xml
(leftNode.getElementType, rightNode.getElementType,
leftNode.getTreeParent.getElementType, rightNode.getTreeParent.getElementType) match {
case (ScalaElementTypes.XML_START_TAG, ScalaElementTypes.XML_END_TAG, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return WITHOUT_SPACING
case (ScalaElementTypes.XML_START_TAG, XmlTokenType.XML_DATA_CHARACTERS, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return WITHOUT_SPACING
case (XmlTokenType.XML_DATA_CHARACTERS, ScalaElementTypes.XML_END_TAG, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return WITHOUT_SPACING
case (ScalaElementTypes.XML_START_TAG, _, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return ON_NEW_LINE
case (_, ScalaElementTypes.XML_END_TAG, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return ON_NEW_LINE
case (XmlTokenType.XML_DATA_CHARACTERS, XmlTokenType.XML_DATA_CHARACTERS, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return WITH_SPACING
case (XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN, XmlTokenType.XML_CHAR_ENTITY_REF, _, _) =>
return Spacing.getReadOnlySpacing
case (XmlTokenType.XML_CHAR_ENTITY_REF, XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN, _, _) =>
return Spacing.getReadOnlySpacing
case (XmlTokenType.XML_DATA_CHARACTERS, XmlTokenType.XML_CDATA_END, _, _) =>
return Spacing.getReadOnlySpacing
case (XmlTokenType.XML_DATA_CHARACTERS, _, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return ON_NEW_LINE
case (XmlTokenType.XML_CDATA_START, XmlTokenType.XML_DATA_CHARACTERS, _, _) =>
return Spacing.getReadOnlySpacing
case (_, XmlTokenType.XML_DATA_CHARACTERS, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return ON_NEW_LINE
case (ScalaElementTypes.XML_EMPTY_TAG, ScalaElementTypes.XML_EMPTY_TAG, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return ON_NEW_LINE
case (_, ScalaTokenTypesEx.SCALA_IN_XML_INJECTION_START | ScalaTokenTypesEx.SCALA_IN_XML_INJECTION_END, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (XmlTokenType.XML_START_TAG_START | XmlTokenType.XML_END_TAG_START |
XmlTokenType.XML_CDATA_START | XmlTokenType.XML_PI_START, _, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (_, XmlTokenType.XML_TAG_END | XmlTokenType.XML_EMPTY_ELEMENT_END |
XmlTokenType.XML_CDATA_END | XmlTokenType.XML_PI_END, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (XmlTokenType.XML_NAME, ScalaElementTypes.XML_ATTRIBUTE, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return COMMON_SPACING
case (XmlTokenType.XML_NAME, XmlTokenType.XML_EQ, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (XmlTokenType.XML_EQ, XmlTokenType.XML_ATTRIBUTE_VALUE_START_DELIMITER |
ScalaTokenTypesEx.SCALA_IN_XML_INJECTION_START, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (XmlTokenType.XML_ATTRIBUTE_VALUE_START_DELIMITER, XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN, XmlTokenType.XML_ATTRIBUTE_VALUE_END_DELIMITER, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (ScalaTokenTypesEx.SCALA_IN_XML_INJECTION_START | ScalaTokenTypesEx.SCALA_IN_XML_INJECTION_END, _, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (_, XmlTokenType.XML_DATA_CHARACTERS | XmlTokenType.XML_COMMENT_END
| XmlTokenType.XML_COMMENT_CHARACTERS, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (XmlTokenType.XML_DATA_CHARACTERS | XmlTokenType.XML_COMMENT_START
| XmlTokenType.XML_COMMENT_CHARACTERS, _, _, _) =>
if (scalaSettings.KEEP_XML_FORMATTING) return Spacing.getReadOnlySpacing
return NO_SPACING
case (el1, el2, _, _) if scalaSettings.KEEP_XML_FORMATTING &&
(XML_ELEMENTS.contains(el1) || XML_ELEMENTS.contains(el2)) => return Spacing.getReadOnlySpacing
case _ =>
}
if (leftElementType == tLPARENTHESIS &&
(leftPsi.getParent.isInstanceOf[ScParenthesisedExpr] ||
leftPsi.getParent.isInstanceOf[ScParameterizedTypeElement] ||
leftPsi.getParent.isInstanceOf[ScParenthesisedPattern])) {
if (settings.PARENTHESES_EXPRESSION_LPAREN_WRAP) {
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING_DEPENDENT(leftPsi.getParent.getTextRange)
else return WITHOUT_SPACING_DEPENDENT(leftPsi.getParent.getTextRange)
}
else if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
//for interpolated strings
if (rightElementType == tINTERPOLATED_STRING_ESCAPE) return Spacing.getReadOnlySpacing
if (Set(tINTERPOLATED_STRING, tINTERPOLATED_MULTILINE_STRING).contains(rightElementType)) return WITHOUT_SPACING
if (Set(leftElementType, rightElementType).contains(tINTERPOLATED_STRING_INJECTION) ||
rightElementType == tINTERPOLATED_STRING_END) return Spacing.getReadOnlySpacing
if (Option(leftNode.getTreeParent.getTreePrev).exists(_.getElementType == tINTERPOLATED_STRING_ID)) {
return Spacing.getReadOnlySpacing
}
@tailrec
def isMultiLineStringCase(psiElem: PsiElement): Boolean = {
psiElem match {
case ml: ScLiteral if ml.isMultiLineString =>
right.getTextRange.contains(new TextRange(rightNode.getTextRange.getStartOffset, rightNode.getTextRange.getStartOffset + 3))
case _: ScInfixExpr | _: ScReferenceExpression | _: ScMethodCall => isMultiLineStringCase(psiElem.getFirstChild)
case _ => false
}
}
//multiline strings
if (scalaSettings.MULTILINE_STRING_SUPORT != ScalaCodeStyleSettings.MULTILINE_STRING_NONE && isMultiLineStringCase(rightPsi)) {
(scalaSettings.MULTI_LINE_QUOTES_ON_NEW_LINE, scalaSettings.KEEP_MULTI_LINE_QUOTES) match {
case (true, true) =>
return if (rightPsi.getPrevSibling != null && rightPsi.getPrevSibling.getText.contains("\\n")) ON_NEW_LINE else WITH_SPACING
case (true, false) => return ON_NEW_LINE
case (false, false) => return WITH_SPACING_NO_KEEP
case (false, true) => return Spacing.createDependentLFSpacing(1, 1, rightPsi.getParent.getTextRange, true, 1)
}
}
leftPsi match {
case l: ScLiteral if l.isMultiLineString && rightNode == leftNode =>
val marginChar = "" + MultilineStringUtil.getMarginChar(leftPsi)
if (leftString == marginChar && rightString != "\\"\\"\\"" && rightString != marginChar) {
return Spacing.getReadOnlySpacing
}
case _ =>
}
if (rightElementType == tRPARENTHESIS &&
(rightPsi.getParent.isInstanceOf[ScParenthesisedExpr] ||
rightPsi.getParent.isInstanceOf[ScParameterizedTypeElement] ||
rightPsi.getParent.isInstanceOf[ScParenthesisedPattern])) {
if (settings.PARENTHESES_EXPRESSION_RPAREN_WRAP) {
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
else return WITHOUT_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
}
else if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
if (leftElementType == tIDENTIFIER &&
rightPsi.isInstanceOf[ScArgumentExprList] && !getText(rightNode, fileText).trim.startsWith("{")) {
if (settings.SPACE_BEFORE_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
if (leftElementType == tLPARENTHESIS && (leftPsi.getParent.isInstanceOf[ScArgumentExprList] ||
leftPsi.getParent.isInstanceOf[ScPatternArgumentList])) {
if (settings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE) {
if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING_DEPENDENT(leftPsi.getParent.getTextRange)
else return WITHOUT_SPACING_DEPENDENT(leftPsi.getParent.getTextRange)
} else if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightElementType == tRPARENTHESIS && (rightPsi.getParent.isInstanceOf[ScArgumentExprList] ||
rightPsi.getParent.isInstanceOf[ScPatternArgumentList])) {
if (settings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE) {
if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES)
return WITH_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
else return WITHOUT_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
} else if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
if (leftElementType == tLPARENTHESIS && leftPsi.getParent.isInstanceOf[ScParameterClause]) {
if (settings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE) {
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING_DEPENDENT(leftPsi.getParent.getTextRange)
else return WITHOUT_SPACING_DEPENDENT(leftPsi.getParent.getTextRange)
} else if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightElementType == tRPARENTHESIS && rightPsi.getParent.isInstanceOf[ScParameterClause]) {
if (settings.METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE) {
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
else return WITHOUT_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
} else if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
//todo: spacing for early definitions
if (getText(rightNode, fileText).trim.startsWith("{")) {
if (rightPsi.isInstanceOf[ScImportSelectors]) {
return WITHOUT_SPACING
}
if (leftPsi.getParent.isInstanceOf[ScParenthesisedTypeElement]) {
return WITHOUT_SPACING
}
if (rightPsi.isInstanceOf[ScExtendsBlock] || rightPsi.isInstanceOf[ScEarlyDefinitions] || rightPsi.isInstanceOf[ScTemplateBody]) {
val extendsBlock = rightPsi match {
case e: ScExtendsBlock => e
case t: ScEarlyDefinitions => t.getParent
case t: ScTemplateBody => t.getParent
}
settings.CLASS_BRACE_STYLE match {
case CommonCodeStyleSettings.NEXT_LINE => return ON_NEW_LINE
case CommonCodeStyleSettings.NEXT_LINE_SHIFTED => return ON_NEW_LINE
case CommonCodeStyleSettings.NEXT_LINE_SHIFTED2 => return ON_NEW_LINE
case CommonCodeStyleSettings.END_OF_LINE =>
if (settings.SPACE_BEFORE_CLASS_LBRACE) return WITH_SPACING_NO_KEEP
else return WITHOUT_SPACING_NO_KEEP
case CommonCodeStyleSettings.NEXT_LINE_IF_WRAPPED =>
val startOffset = extendsBlock.getParent.getParent match {
case b: ScTypeDefinition => b.nameId.getTextRange.getStartOffset
case b: ScTemplateDefinition => b.nameId.getTextRange.getStartOffset
case b => b.getTextRange.getStartOffset
}
val range = new TextRange(startOffset, rightPsi.getTextRange.getStartOffset)
if (settings.SPACE_BEFORE_CLASS_LBRACE) return WITH_SPACING_DEPENDENT(range)
else return WITHOUT_SPACING_DEPENDENT(range)
}
} else {
rightPsi.getParent match {
case fun: ScFunction =>
settings.METHOD_BRACE_STYLE match {
case CommonCodeStyleSettings.NEXT_LINE => return ON_NEW_LINE
case CommonCodeStyleSettings.NEXT_LINE_SHIFTED => return ON_NEW_LINE
case CommonCodeStyleSettings.NEXT_LINE_SHIFTED2 => return ON_NEW_LINE
case CommonCodeStyleSettings.END_OF_LINE =>
if (settings.SPACE_BEFORE_METHOD_LBRACE) return WITH_SPACING_NO_KEEP
else return WITHOUT_SPACING_NO_KEEP
case CommonCodeStyleSettings.NEXT_LINE_IF_WRAPPED =>
val startOffset = fun.nameId.getTextRange.getStartOffset
val range = new TextRange(startOffset, rightPsi.getTextRange.getStartOffset)
if (settings.SPACE_BEFORE_METHOD_LBRACE) return WITH_SPACING_DEPENDENT(range)
else return WITHOUT_SPACING_DEPENDENT(range)
}
case _: ScBlock | _: ScEarlyDefinitions | _: ScTemplateBody if !rightPsi.getParent.isInstanceOf[ScTryBlock] => return ON_NEW_LINE
case parent =>
settings.BRACE_STYLE match {
case CommonCodeStyleSettings.NEXT_LINE => return ON_NEW_LINE
case CommonCodeStyleSettings.NEXT_LINE_SHIFTED => return ON_NEW_LINE
case CommonCodeStyleSettings.NEXT_LINE_SHIFTED2 => return ON_NEW_LINE
case CommonCodeStyleSettings.END_OF_LINE =>
return WITH_SPACING_NO_KEEP //todo: spacing settings
case CommonCodeStyleSettings.NEXT_LINE_IF_WRAPPED =>
val startOffset = parent.getTextRange.getStartOffset
val range = new TextRange(startOffset, rightPsi.getTextRange.getStartOffset)
return WITH_SPACING_DEPENDENT(range) //todo: spacing settings
}
}
}
}
if (leftPsi.isInstanceOf[ScStableCodeReferenceElement] && !rightPsi.isInstanceOf[ScPackaging]) {
leftPsi.getParent match {
case p: ScPackaging if p.reference == Some(leftPsi) =>
if (rightElementType != ScalaTokenTypes.tSEMICOLON && rightElementType != ScalaTokenTypes.tLBRACE) {
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AFTER_PACKAGE + 1, keepLineBreaks,
keepBlankLinesInCode)
}
case _ =>
}
}
if (leftPsi.isInstanceOf[ScPackaging]) {
if (rightElementType != ScalaTokenTypes.tSEMICOLON) {
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AFTER_PACKAGE + 1, keepLineBreaks, keepBlankLinesInCode)
}
}
if (rightPsi.isInstanceOf[ScPackaging]) {
if (leftPsi.isInstanceOf[ScStableCodeReferenceElement] || leftElementType == tLBRACE)
return Spacing.createSpacing(0, 0, 1, keepLineBreaks, keepBlankLinesInCode)
else
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_BEFORE_PACKAGE + 1, keepLineBreaks,
keepBlankLinesInCode)
}
if (leftPsi.isInstanceOf[ScImportStmt] && !rightPsi.isInstanceOf[ScImportStmt]) {
if (rightElementType != ScalaTokenTypes.tSEMICOLON) {
leftPsi.getParent match {
case _: ScEarlyDefinitions | _: ScTemplateBody | _: ScalaFile | _: ScPackaging =>
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AFTER_IMPORTS + 1, keepLineBreaks,
keepBlankLinesInCode)
case _ =>
}
} else if (settings.SPACE_BEFORE_SEMICOLON) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightPsi.isInstanceOf[ScImportStmt] && !leftPsi.isInstanceOf[ScImportStmt]) {
if (leftElementType != ScalaTokenTypes.tSEMICOLON || !prevNotWithspace(leftPsi).isInstanceOf[ScImportStmt]) {
rightPsi.getParent match {
case _: ScEarlyDefinitions | _: ScTemplateBody | _: ScalaFile | _: ScPackaging =>
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AFTER_IMPORTS + 1, keepLineBreaks,
keepBlankLinesInCode)
case _ =>
}
}
}
if (leftPsi.isInstanceOf[ScImportStmt] || rightPsi.isInstanceOf[ScImportStmt]) {
return Spacing.createSpacing(0, 0, 1, keepLineBreaks, keepBlankLinesInDeclarations)
}
if (leftPsi.isInstanceOf[ScTypeDefinition]) {
if (rightElementType != ScalaTokenTypes.tSEMICOLON) {
leftPsi.getParent match {
case _: ScEarlyDefinitions | _: ScTemplateBody | _: ScalaFile | _: ScPackaging =>
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AROUND_CLASS + 1, keepLineBreaks,
keepBlankLinesInDeclarations)
case _ =>
}
}
}
if (rightPsi.isInstanceOf[PsiComment] || rightPsi.isInstanceOf[PsiDocComment]) {
var pseudoRightPsi = nextNotWithspace(rightPsi)
while (pseudoRightPsi != null &&
(pseudoRightPsi.isInstanceOf[PsiComment] || pseudoRightPsi.isInstanceOf[PsiDocComment])) {
pseudoRightPsi = nextNotWithspace(pseudoRightPsi)
}
if (pseudoRightPsi.isInstanceOf[ScTypeDefinition]) {
pseudoRightPsi.getParent match {
case _: ScEarlyDefinitions | _: ScTemplateBody | _: ScalaFile | _: ScPackaging =>
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AROUND_CLASS + 1, keepLineBreaks,
keepBlankLinesInDeclarations)
case _ =>
}
}
}
if (rightPsi.isInstanceOf[ScTypeDefinition]) {
if (leftPsi.isInstanceOf[PsiComment] || leftPsi.isInstanceOf[PsiDocComment]) {
return ON_NEW_LINE
}
rightPsi.getParent match {
case _: ScEarlyDefinitions | _: ScTemplateBody | _: ScalaFile | _: ScPackaging =>
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_AROUND_CLASS + 1, keepLineBreaks,
keepBlankLinesInDeclarations)
case _ =>
}
}
if (rightNode.getElementType == ScalaTokenTypes.tRBRACE) {
rightNode.getTreeParent.getPsi match {
case block@(_: ScEarlyDefinitions | _: ScTemplateBody | _: ScPackaging | _: ScBlockExpr | _: ScMatchStmt |
_: ScTryBlock | _: ScCatchBlock) =>
val oneLineNonEmpty = leftString != "{" && !block.getText.contains('\\n')
val spaceInsideOneLineMethod = scalaSettings.INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD &&
rightNode.getTreeParent.getTreeParent != null && rightNode.getTreeParent.getTreeParent.getPsi.isInstanceOf[ScFunction]
val spaceInsideClosure = scalaSettings.SPACE_INSIDE_CLOSURE_BRACES && scalaSettings.KEEP_ONE_LINE_LAMBDAS_IN_ARG_LIST &&
(leftPsi.isInstanceOf[ScFunctionExpr] || block.isInstanceOf[ScBlockExpr] || leftPsi.isInstanceOf[ScCaseClauses])
val needsSpace = (oneLineNonEmpty && (spaceInsideOneLineMethod || spaceInsideClosure ||
scalaSettings.SPACES_IN_ONE_LINE_BLOCKS)) ||
leftPsi.isInstanceOf[PsiComment] && scalaSettings.KEEP_ONE_LINE_LAMBDAS_IN_ARG_LIST
val spaces = if (needsSpace) 1 else 0
return Spacing.createDependentLFSpacing(spaces, spaces, block.getTextRange, keepLineBreaks, keepBlankLinesBeforeRBrace)
case _ => return Spacing.createSpacing(0, 0, 0, keepLineBreaks, keepBlankLinesBeforeRBrace)
}
}
if (leftNode.getElementType == ScalaTokenTypes.tLBRACE) {
if (!scalaSettings.PLACE_CLOSURE_PARAMETERS_ON_NEW_LINE) {
val b = leftNode.getTreeParent.getPsi
val spacing = if (scalaSettings.SPACE_INSIDE_CLOSURE_BRACES) WITH_SPACING else WITHOUT_SPACING
rightNode.getElementType match {
case ScalaElementTypes.FUNCTION_EXPR => return spacing
case ScalaElementTypes.CASE_CLAUSES =>
if (b.getParent.isInstanceOf[ScArgumentExprList] || b.getParent.isInstanceOf[ScInfixExpr]) return spacing
case _ =>
}
}
leftNode.getTreeParent.getPsi match {
case b: ScTemplateBody if rightPsi.isInstanceOf[ScSelfTypeElement] =>
if (scalaSettings.PLACE_SELF_TYPE_ON_NEW_LINE) {
return ON_NEW_LINE
} else return WITHOUT_SPACING_NO_KEEP //todo: spacing setting
case b @ (_: ScEarlyDefinitions | _: ScTemplateBody) =>
if (settings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE && !getText(b.getNode, fileText).contains('\\n')) {
return Spacing.createDependentLFSpacing(0, 0, b.getTextRange, keepLineBreaks,
keepBlankLinesBeforeRBrace)
}
val c = PsiTreeUtil.getParentOfType(b, classOf[ScTemplateDefinition])
val setting = if (c.isInstanceOf[ScTypeDefinition]) settings.BLANK_LINES_AFTER_CLASS_HEADER
else settings.BLANK_LINES_AFTER_ANONYMOUS_CLASS_HEADER
return Spacing.createSpacing(0, 0, setting + 1, keepLineBreaks, keepBlankLinesInDeclarations)
case b: ScBlockExpr if b.getParent.isInstanceOf[ScFunction] =>
if (settings.KEEP_SIMPLE_METHODS_IN_ONE_LINE && !getText(b.getNode, fileText).contains('\\n')) {
val spaces = if (scalaSettings.INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD) 1 else 0
return Spacing.createDependentLFSpacing(spaces, spaces, b.getTextRange, keepLineBreaks,
keepBlankLinesBeforeRBrace)
}
return Spacing.createSpacing(0, 0, settings.BLANK_LINES_BEFORE_METHOD_BODY + 1, keepLineBreaks, keepBlankLinesInDeclarations)
case b: ScBlockExpr if scalaSettings.KEEP_ONE_LINE_LAMBDAS_IN_ARG_LIST &&
!b.getText.contains('\\n') && (rightPsi.isInstanceOf[ScCaseClauses] && b.getParent != null &&
b.getParent.isInstanceOf[ScArgumentExprList] || rightPsi.isInstanceOf[ScFunctionExpr]) =>
return Spacing.createDependentLFSpacing(1, 1, b.getTextRange, keepLineBreaks, keepBlankLinesBeforeRBrace)
case b: ScBlockExpr if scalaSettings.SPACE_INSIDE_CLOSURE_BRACES && !b.getText.contains('\\n') &&
scalaSettings.KEEP_ONE_LINE_LAMBDAS_IN_ARG_LIST &&
(b.getParent.isInstanceOf[ScArgumentExprList] || b.getParent.isInstanceOf[ScInfixExpr]) => return WITH_SPACING
case block@(_: ScPackaging | _: ScBlockExpr | _: ScMatchStmt |
_: ScTryBlock | _: ScCatchBlock) =>
val prev = block.getPrevSibling
if (settings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE || prev != null &&
prev.getNode.getElementType == tINTERPOLATED_STRING_INJECTION) {
val spaces = if (scalaSettings.SPACES_IN_ONE_LINE_BLOCKS) 1 else 0
return Spacing.createDependentLFSpacing(spaces, spaces, block.getTextRange, keepLineBreaks,
keepBlankLinesBeforeRBrace)
} else {
return ON_NEW_LINE
}
case _ => return Spacing.createSpacing(0, 0, 0, keepLineBreaks, keepBlankLinesBeforeRBrace)
}
}
if (leftPsi.isInstanceOf[ScSelfTypeElement]) {
val c = PsiTreeUtil.getParentOfType(leftPsi, classOf[ScTemplateDefinition])
val setting = if (c.isInstanceOf[ScTypeDefinition]) settings.BLANK_LINES_AFTER_CLASS_HEADER
else settings.BLANK_LINES_AFTER_ANONYMOUS_CLASS_HEADER
return Spacing.createSpacing(0, 0, setting + 1, keepLineBreaks, keepBlankLinesInDeclarations)
}
if (leftPsi.isInstanceOf[ScFunction] || leftPsi.isInstanceOf[ScValue] || leftPsi.isInstanceOf[ScVariable] || leftPsi.isInstanceOf[ScTypeAlias]) {
if (rightElementType != tSEMICOLON) {
leftPsi.getParent match {
case b @ (_: ScEarlyDefinitions | _: ScTemplateBody) =>
val p = PsiTreeUtil.getParentOfType(b, classOf[ScTemplateDefinition])
val setting = leftPsi match {
case _: ScFunction if p.isInstanceOf[ScTrait] => settings.BLANK_LINES_AROUND_METHOD_IN_INTERFACE
case _: ScFunction => settings.BLANK_LINES_AROUND_METHOD
case _ =>
rightPsi match {
case _: ScFunction if p.isInstanceOf[ScTrait] => settings.BLANK_LINES_AROUND_METHOD_IN_INTERFACE
case _: ScFunction => settings.BLANK_LINES_AROUND_METHOD
case _ if p.isInstanceOf[ScTrait] => settings.BLANK_LINES_AROUND_FIELD_IN_INTERFACE
case _ => settings.BLANK_LINES_AROUND_FIELD
}
}
if (rightPsi.isInstanceOf[PsiComment] && !fileText.
substring(leftPsi.getTextRange.getEndOffset, rightPsi.getTextRange.getEndOffset).contains("\\n"))
return COMMON_SPACING
else
return Spacing.createSpacing(0, 0, setting + 1, keepLineBreaks, keepBlankLinesInDeclarations)
case _ =>
}
}
}
if (rightPsi.isInstanceOf[PsiComment] || rightPsi.isInstanceOf[PsiDocComment]) {
var pseudoRightPsi = nextNotWithspace(rightPsi)
while (pseudoRightPsi != null &&
(pseudoRightPsi.isInstanceOf[PsiComment] || pseudoRightPsi.isInstanceOf[PsiDocComment])) {
pseudoRightPsi = nextNotWithspace(pseudoRightPsi)
}
if (pseudoRightPsi.isInstanceOf[ScFunction] || pseudoRightPsi.isInstanceOf[ScValue] ||
pseudoRightPsi.isInstanceOf[ScVariable] || pseudoRightPsi.isInstanceOf[ScTypeAlias]) {
pseudoRightPsi.getParent match {
case b @ (_: ScEarlyDefinitions | _: ScTemplateBody) =>
val p = PsiTreeUtil.getParentOfType(b, classOf[ScTemplateDefinition])
val setting = (pseudoRightPsi, p) match {
case (_: ScFunction, _: ScTrait) => settings.BLANK_LINES_AROUND_METHOD_IN_INTERFACE
case (_: ScFunction, _) => settings.BLANK_LINES_AROUND_METHOD
case (_, _: ScTrait) => settings.BLANK_LINES_AROUND_FIELD_IN_INTERFACE
case _ => settings.BLANK_LINES_AROUND_FIELD
}
return Spacing.createSpacing(0, 0, setting + 1, keepLineBreaks, keepBlankLinesInDeclarations)
case _ =>
}
}
}
if (rightPsi.isInstanceOf[ScFunction] || rightPsi.isInstanceOf[ScValue] || rightPsi.isInstanceOf[ScVariable] || rightPsi.isInstanceOf[ScTypeAlias]) {
if (leftPsi.isInstanceOf[PsiComment] || leftPsi.isInstanceOf[PsiDocComment]) {
return ON_NEW_LINE
}
rightPsi.getParent match {
case b @ (_: ScEarlyDefinitions | _: ScTemplateBody) =>
val p = PsiTreeUtil.getParentOfType(b, classOf[ScTemplateDefinition])
val setting = (rightPsi, p) match {
case (_: ScFunction, _: ScTrait) => settings.BLANK_LINES_AROUND_METHOD_IN_INTERFACE
case (_: ScFunction, _) => settings.BLANK_LINES_AROUND_METHOD
case (_, _: ScTrait) => settings.BLANK_LINES_AROUND_FIELD_IN_INTERFACE
case _ => settings.BLANK_LINES_AROUND_FIELD
}
return Spacing.createSpacing(0, 0, setting + 1, keepLineBreaks, keepBlankLinesInDeclarations)
case _ =>
}
}
//special else if treatment
if (leftNode.getElementType == ScalaTokenTypes.kELSE && rightNode.getPsi.isInstanceOf[ScIfStmt]) {
if (settings.SPECIAL_ELSE_IF_TREATMENT) return WITH_SPACING
else return ON_NEW_LINE
}
if (rightNode.getElementType == ScalaTokenTypes.kELSE && right.myLastNode != null) {
var lastNode = left.myLastNode
while (lastNode != null && (ScalaPsiUtil.isLineTerminator(lastNode.getPsi) ||
lastNode.getPsi.isInstanceOf[PsiWhiteSpace])) lastNode = lastNode.getTreePrev
if (lastNode == null) return WITH_SPACING_DEPENDENT(rightNode.getTreeParent.getTextRange)
else if (getText(lastNode, fileText).endsWith("}")) {
if (settings.ELSE_ON_NEW_LINE) return ON_NEW_LINE
else return WITH_SPACING
} else return WITH_SPACING_DEPENDENT(rightNode.getTreeParent.getTextRange)
}
if (leftElementType == ScalaElementTypes.MODIFIERS) {
if (rightPsi.isInstanceOf[ScParameters]) {
if (scalaSettings.SPACE_AFTER_MODIFIERS_CONSTRUCTOR) return WITH_SPACING
else return WITHOUT_SPACING
}
if (settings.MODIFIER_LIST_WRAP) return WITH_SPACING_DEPENDENT(leftNode.getTreeParent.getTextRange)
else return WITH_SPACING
}
if (rightPsi.isInstanceOf[ScCatchBlock]) {
if (settings.CATCH_ON_NEW_LINE) return ON_NEW_LINE
else return WITH_SPACING
}
if (rightPsi.isInstanceOf[ScFinallyBlock]) {
if (settings.FINALLY_ON_NEW_LINE) return ON_NEW_LINE
else return WITH_SPACING
}
if (rightElementType == kWHILE) {
if (settings.WHILE_ON_NEW_LINE) return WITH_SPACING_DEPENDENT(rightPsi.getParent.getTextRange)
else return WITH_SPACING
}
//old formatter spacing
//comments processing
if (leftNode.getPsi.isInstanceOf[ScDocComment]) return ON_NEW_LINE
if (rightNode.getPsi.isInstanceOf[ScDocComment] && leftNode.getElementType == ScalaTokenTypes.tLBRACE) return ON_NEW_LINE
if (rightNode.getPsi.isInstanceOf[ScDocComment]) return DOUBLE_LINE
if (rightNode.getPsi.isInstanceOf[PsiComment] || leftNode.getPsi.isInstanceOf[PsiComment])
return COMMON_SPACING
//; : . and , processing
if (rightString.length > 0 && rightString(0) == '.') {
if (rightNode.getElementType != ScalaTokenTypes.tFLOAT && !rightNode.getPsi.isInstanceOf[ScLiteral]) return WITHOUT_SPACING
}
if (rightString.length > 0 && rightString(0) == ',') {
if (settings.SPACE_BEFORE_COMMA) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightNode.getElementType == ScalaTokenTypes.tCOLON) {
var left = leftNode
// For operations like
// var Object_!= : Symbol = _
if (scalaSettings.SPACE_BEFORE_TYPE_COLON) return WITH_SPACING //todo:
while (left != null && left.getLastChildNode != null) {
left = left.getLastChildNode
}
val tp = PsiTreeUtil.getParentOfType(left.getPsi, classOf[ScTypeParam])
if (tp ne null) {
return if (tp.nameId.getNode eq left) WITHOUT_SPACING else WITH_SPACING
}
return if (left.getElementType == ScalaTokenTypes.tIDENTIFIER &&
ScalaNamesUtil.isIdentifier(getText(left, fileText) + ":")) WITH_SPACING else WITHOUT_SPACING
}
if (rightString.length > 0 && rightString(0) == ';') {
if (settings.SPACE_BEFORE_SEMICOLON && !rightNode.getTreeParent.getPsi.isInstanceOf[ScalaFile] &&
rightNode.getPsi.getParent.getParent.isInstanceOf[ScForStatement]) return WITH_SPACING
else if (!rightNode.getTreeParent.getPsi.isInstanceOf[ScalaFile] &&
rightNode.getPsi.getParent.getParent.isInstanceOf[ScForStatement]) return WITHOUT_SPACING
}
if (leftString.length > 0 && leftString(leftString.length - 1) == '.') {
return WITHOUT_SPACING
}
if (leftString.length > 0 && leftString(leftString.length - 1) == ',') {
if (settings.SPACE_AFTER_COMMA) return WITH_SPACING
else return WITHOUT_SPACING
}
if (leftNode.getElementType == ScalaTokenTypes.tCOLON) {
if (scalaSettings.SPACE_AFTER_TYPE_COLON) return WITH_SPACING
else return WITHOUT_SPACING
}
if (leftString.length > 0 && leftString(leftString.length - 1) == ';') {
if (settings.SPACE_AFTER_SEMICOLON && !rightNode.getTreeParent.getPsi.isInstanceOf[ScalaFile] &&
rightNode.getPsi.getParent.getParent.isInstanceOf[ScForStatement]) return WITH_SPACING
else if (!rightNode.getTreeParent.getPsi.isInstanceOf[ScalaFile] &&
rightNode.getPsi.getParent.getParent.isInstanceOf[ScForStatement]) return WITHOUT_SPACING
}
if (leftNode.getElementType == ScalaTokenTypes.tSEMICOLON) {
if (getText(leftNode.getTreeParent, fileText).indexOf('\\n') == -1) return WITH_SPACING
else ON_NEW_LINE
}
//processing left parenthesis (if it's from right) as Java cases
if (rightNode.getElementType == ScalaTokenTypes.tLPARENTHESIS) {
leftNode.getElementType match {
case ScalaTokenTypes.kIF =>
if (settings.SPACE_BEFORE_IF_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case ScalaTokenTypes.kWHILE =>
if (settings.SPACE_BEFORE_WHILE_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case ScalaTokenTypes.kFOR =>
if (settings.SPACE_BEFORE_FOR_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _ =>
}
}
if (rightNode.getPsi.isInstanceOf[ScParameters] &&
leftNode.getTreeParent.getPsi.isInstanceOf[ScFunction]) {
if (settings.SPACE_BEFORE_METHOD_PARENTHESES || (scalaSettings.SPACE_BEFORE_INFIX_LIKE_METHOD_PARENTHESES &&
ScalaNamesUtil.isOperatorName(leftNode.getTreeParent.getPsi.asInstanceOf[ScFunction].name)) ||
(scalaSettings.PRESERVE_SPACE_AFTER_METHOD_DECLARATION_NAME &&
rightNode.getTreePrev.getPsi.isInstanceOf[PsiWhiteSpace]))
return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightPsi.isInstanceOf[ScArguments] &&
(leftNode.getTreeParent.getPsi.isInstanceOf[ScMethodCall] ||
leftNode.getTreeParent.getPsi.isInstanceOf[ScConstructor]) ||
rightPsi.isInstanceOf[ScArguments] && rightNode.getTreeParent.getPsi.isInstanceOf[ScSelfInvocation] &&
leftNode.getText == "this") {
if (settings.SPACE_BEFORE_METHOD_CALL_PARENTHESES && !rightString.startsWith("{") &&
(leftNode.getLastChildNode == null || !leftNode.getLastChildNode.getPsi.isInstanceOf[ScArguments]) &&
!leftPsi.isInstanceOf[ScArguments])
return WITH_SPACING
else if (scalaSettings.SPACE_BEFORE_BRACE_METHOD_CALL && rightString.startsWith("{")) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightNode.getTreeParent.getPsi.isInstanceOf[ScSelfInvocation] &&
leftNode.getTreeParent.getPsi.isInstanceOf[ScSelfInvocation] && leftPsi.isInstanceOf[ScArguments] &&
rightPsi.isInstanceOf[ScArguments]) {
return WITHOUT_SPACING
}
// SCL-2601
if ((rightNode.getPsi.isInstanceOf[ScUnitExpr] || rightNode.getPsi.isInstanceOf[ScTuple]) &&
leftNode.getTreeParent.getPsi.isInstanceOf[ScInfixExpr]) {
if (scalaSettings.SPACE_BEFORE_INFIX_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
//processing left parenthesis (if it's from right) only Scala cases
if (rightNode.getPsi.isInstanceOf[ScParameters] &&
leftNode.getTreeParent.getPsi.isInstanceOf[ScPrimaryConstructor]) {
if (settings.SPACE_BEFORE_METHOD_PARENTHESES || (scalaSettings.SPACE_BEFORE_INFIX_LIKE_METHOD_PARENTHESES &&
ScalaNamesUtil.isOperatorName(leftNode.getTreeParent.getPsi.asInstanceOf[ScPrimaryConstructor].name)) ||
(scalaSettings.PRESERVE_SPACE_AFTER_METHOD_DECLARATION_NAME &&
rightNode.getTreePrev.getPsi.isInstanceOf[PsiWhiteSpace]))
return WITH_SPACING
else return WITHOUT_SPACING
}
rightNode.getPsi match {
case _: ScPrimaryConstructor if rightString.startsWith("(") =>
if (settings.SPACE_BEFORE_METHOD_PARENTHESES ||
(scalaSettings.SPACE_BEFORE_INFIX_LIKE_METHOD_PARENTHESES && ScalaNamesUtil.isOperatorName(leftString)) ||
(scalaSettings.PRESERVE_SPACE_AFTER_METHOD_DECLARATION_NAME &&
rightNode.getTreePrev.getPsi.isInstanceOf[PsiWhiteSpace]))
return WITH_SPACING
else return WITHOUT_SPACING
case _: ScPrimaryConstructor =>
return WITH_SPACING
case _ =>
}
if (leftNode.getPsi.isInstanceOf[ScParameterClause] &&
rightNode.getPsi.isInstanceOf[ScParameterClause]) {
return WITHOUT_SPACING //todo: add setting
}
if (rightNode.getPsi.isInstanceOf[ScPatternArgumentList] &&
rightNode.getTreeParent.getPsi.isInstanceOf[ScConstructorPattern]) {
if (settings.SPACE_BEFORE_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightPsi.isInstanceOf[ScArguments] && rightNode.getTreeParent.getPsi.isInstanceOf[ScSelfInvocation]) {
}
//processing left parenthesis (if it's from left)
if (leftNode.getElementType == ScalaTokenTypes.tLPARENTHESIS) {
if (rightNode.getElementType == ScalaTokenTypes.tRPARENTHESIS)
return WITHOUT_SPACING
leftNode.getTreeParent.getPsi match {
case _: ScForStatement =>
if (settings.SPACE_WITHIN_FOR_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScIfStmt =>
if (settings.SPACE_WITHIN_IF_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScWhileStmt | _: ScDoStmt =>
if (settings.SPACE_WITHIN_WHILE_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScParenthesisedExpr =>
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case x: ScParameterClause if x.getParent.getParent.isInstanceOf[ScFunction] =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case x: ScParameterClause if x.getParent.getParent.isInstanceOf[ScPrimaryConstructor] =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScPatternArgumentList =>
if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScArguments =>
if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScParenthesisedPattern =>
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScTuplePattern =>
WITHOUT_SPACING //todo: add setting
case _: ScParenthesisedTypeElement =>
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScTupleTypeElement =>
WITHOUT_SPACING //todo: add setting
case _: ScTuple =>
WITHOUT_SPACING //todo: add setting
case _: ScBindings =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScFunctionalTypeElement =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _ =>
}
}
//processing right parenthesis (if it's from right)
if (rightNode.getElementType == ScalaTokenTypes.tRPARENTHESIS) {
if (leftNode.getElementType == ScalaTokenTypes.tLPARENTHESIS)
return WITHOUT_SPACING
rightNode.getTreeParent.getPsi match {
case _: ScForStatement =>
if (settings.SPACE_WITHIN_FOR_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScIfStmt =>
if (settings.SPACE_WITHIN_IF_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScWhileStmt | _: ScDoStmt =>
if (settings.SPACE_WITHIN_WHILE_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScParenthesisedExpr =>
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case x: ScParameterClause if x.getParent.getParent.isInstanceOf[ScFunction] =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case x: ScParameterClause if x.getParent.getParent.isInstanceOf[ScPrimaryConstructor] =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScPatternArgumentList =>
if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScArguments =>
if (settings.SPACE_WITHIN_METHOD_CALL_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScParenthesisedPattern =>
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScTuplePattern =>
WITHOUT_SPACING //todo: add setting
case _: ScParenthesisedTypeElement =>
if (settings.SPACE_WITHIN_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScTupleTypeElement =>
WITHOUT_SPACING //todo: add setting
case _: ScTuple =>
WITHOUT_SPACING //todo: add setting
case _: ScBindings =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScFunctionalTypeElement =>
if (settings.SPACE_WITHIN_METHOD_PARENTHESES) return WITH_SPACING
else return WITHOUT_SPACING
case _ =>
}
}
//proccessing sqbrackets
if (leftNode.getElementType == ScalaTokenTypes.tLSQBRACKET) {
if (rightNode.getElementType == ScalaTokenTypes.tRSQBRACKET) {
return WITHOUT_SPACING
}
else {
if (settings.SPACE_WITHIN_BRACKETS) return WITH_SPACING
else return WITHOUT_SPACING
}
}
if (rightNode.getElementType == ScalaTokenTypes.tRSQBRACKET) {
if (settings.SPACE_WITHIN_BRACKETS) return WITH_SPACING
else return WITHOUT_SPACING
}
if (rightString.length > 0 &&
rightString(0) == '[') {
return WITHOUT_SPACING
}
//processing before left brace
if (rightString.length > 0 && rightString(0) == '{' && rightNode.getElementType != ScalaTokenTypesEx.SCALA_IN_XML_INJECTION_START) {
val parentPsi = rightNode.getTreeParent.getPsi
parentPsi match {
case _: ScTypeDefinition =>
if (settings.SPACE_BEFORE_CLASS_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScFunction =>
if (settings.SPACE_BEFORE_METHOD_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScIfStmt =>
if (settings.SPACE_BEFORE_IF_LBRACE && !(leftNode.getElementType == ScalaTokenTypes.kELSE)) return WITH_SPACING
else if (settings.SPACE_BEFORE_ELSE_LBRACE && leftNode.getElementType == ScalaTokenTypes.kELSE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScWhileStmt =>
if (settings.SPACE_BEFORE_WHILE_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScForStatement =>
if (settings.SPACE_BEFORE_FOR_LBRACE && leftNode.getElementType != ScalaTokenTypes.kFOR) return WITH_SPACING
else if (leftNode.getElementType == ScalaTokenTypes.kFOR) return WITHOUT_SPACING
else return WITHOUT_SPACING
case _: ScDoStmt =>
if (settings.SPACE_BEFORE_DO_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScMatchStmt =>
if (scalaSettings.SPACE_BEFORE_MATCH_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScTryBlock =>
if (settings.SPACE_BEFORE_TRY_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScCatchBlock =>
if (settings.SPACE_BEFORE_CATCH_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScFinallyBlock =>
if (settings.SPACE_BEFORE_FINALLY_LBRACE) return WITH_SPACING
else return WITHOUT_SPACING
case _: ScExistentialClause =>
return WITH_SPACING //todo: add setting
case _: ScAnnotationExpr =>
return WITH_SPACING //todo: add setting
case _: ScExtendsBlock =>
return WITH_SPACING //todo: add setting
case _: ScPackaging =>
return WITH_SPACING //todo: add setting
case _ =>
return WITH_SPACING
}
}
//special for "case <caret> =>" (for SurroundWith)
if (leftNode.getElementType == ScalaTokenTypes.kCASE &&
rightNode.getElementType == ScalaTokenTypes.tFUNTYPE) return Spacing.createSpacing(2, 2, 0, false, 0)
//Case Clauses case
if (leftNode.getElementType == ScalaElementTypes.CASE_CLAUSE && rightNode.getElementType == ScalaElementTypes.CASE_CLAUSE) {
val block = leftNode.getTreeParent
val minLineFeeds = if (block.getTextRange.substring(fileText).contains("\\n")) 1 else 0
return WITH_SPACING_DEPENDENT(leftNode.getTreeParent.getTreeParent.getTextRange)
}
(leftNode.getElementType, rightNode.getElementType,
leftNode.getTreeParent.getElementType, rightNode.getTreeParent.getElementType) match {
case (ScalaTokenTypes.tFUNTYPE, ScalaElementTypes.PARAM_CLAUSES, ScalaElementTypes.FUNCTION_EXPR, _) //TODO: is this even ever used?
if !scalaSettings.PLACE_CLOSURE_PARAMETERS_ON_NEW_LINE =>
if (rightString.startsWith("{")) WITH_SPACING
else if (leftNode.getTreeParent.getTextRange.substring(fileText).contains("\\n")) ON_NEW_LINE
else WITH_SPACING
//annotation
case (_, ScalaElementTypes.ANNOTATIONS, ScalaElementTypes.ANNOT_TYPE, _) => WITHOUT_SPACING
//case for package statement
case (ScalaElementTypes.REFERENCE, ret, _, _) if ret != ScalaElementTypes.PACKAGING &&
leftNode.getTreePrev != null && leftNode.getTreePrev.getTreePrev != null &&
leftNode.getTreePrev.getTreePrev.getElementType == ScalaTokenTypes.kPACKAGE => DOUBLE_LINE
case (ScalaElementTypes.REFERENCE, ScalaElementTypes.PACKAGING, _, _) if leftNode.getTreePrev != null &&
leftNode.getTreePrev.getTreePrev != null &&
leftNode.getTreePrev.getTreePrev.getElementType == ScalaTokenTypes.kPACKAGE => ON_NEW_LINE
//case for covariant or contrvariant type params
case (ScalaTokenTypes.tIDENTIFIER, ScalaTokenTypes.tIDENTIFIER, ScalaElementTypes.TYPE_PARAM, ScalaElementTypes.TYPE_PARAM) => NO_SPACING
//class params
case (ScalaTokenTypes.tIDENTIFIER | ScalaElementTypes.TYPE_PARAM_CLAUSE, ScalaElementTypes.PRIMARY_CONSTRUCTOR, _, _)
if rightNode.getPsi.asInstanceOf[ScPrimaryConstructor].annotations.isEmpty &&
!rightNode.getPsi.asInstanceOf[ScPrimaryConstructor].hasModifier => NO_SPACING
//Type*
case (_, ScalaTokenTypes.tIDENTIFIER, _, ScalaElementTypes.PARAM_TYPE) if rightString == "*" => NO_SPACING
//Parameters
case (ScalaTokenTypes.tIDENTIFIER, ScalaElementTypes.PARAM_CLAUSES, _, _) => NO_SPACING
case (_, ScalaElementTypes.TYPE_ARGS, _, (ScalaElementTypes.TYPE_GENERIC_CALL | ScalaElementTypes.GENERIC_CALL)) => NO_SPACING
case (_, ScalaElementTypes.PATTERN_ARGS, _, ScalaElementTypes.CONSTRUCTOR_PATTERN) => NO_SPACING
//Annotation
case (ScalaTokenTypes.tAT, _, _, _) if rightPsi.isInstanceOf[ScXmlPattern] => WITH_SPACING
case (ScalaTokenTypes.tAT, _, _, _) => NO_SPACING
case (ScalaTokenTypes.tIDENTIFIER, ScalaTokenTypes.tAT, ScalaElementTypes.NAMING_PATTERN, _) => NO_SPACING
case (_, ScalaTokenTypes.tAT, _, _) => NO_SPACING_WITH_NEWLINE
case (ScalaElementTypes.ANNOTATION, _, _, _) => COMMON_SPACING
//Prefix Identifier
case ((ScalaElementTypes.REFERENCE_EXPRESSION | ScalaTokenTypes.tIDENTIFIER), _,
(ScalaElementTypes.LITERAL | ScalaElementTypes.PREFIX_EXPR
| ScalaElementTypes.VARIANT_TYPE_PARAM), _) => NO_SPACING
//Braces
case (ScalaTokenTypes.tLBRACE, ScalaTokenTypes.tRBRACE, _, _) => NO_SPACING
case (ScalaTokenTypes.tLBRACE, _,
(ScalaElementTypes.TEMPLATE_BODY | ScalaElementTypes.MATCH_STMT | ScalaElementTypes.REFINEMENT |
ScalaElementTypes.EXISTENTIAL_CLAUSE | ScalaElementTypes.BLOCK_EXPR), _) => IMPORT_BETWEEN_SPACING
case (ScalaTokenTypes.tLBRACE, _, _, _) => NO_SPACING_WITH_NEWLINE
case (_, ScalaTokenTypes.tRBRACE, (ScalaElementTypes.TEMPLATE_BODY | ScalaElementTypes.MATCH_STMT | ScalaElementTypes.REFINEMENT |
ScalaElementTypes.EXISTENTIAL_CLAUSE | ScalaElementTypes.BLOCK_EXPR), _) => IMPORT_BETWEEN_SPACING
case (_, ScalaTokenTypes.tRBRACE, _, _) => NO_SPACING_WITH_NEWLINE
//Semicolon
case (ScalaTokenTypes.tSEMICOLON, _, parentType, _) =>
if ((BLOCK_ELEMENT_TYPES contains parentType) && !getText(leftNode.getTreeParent, fileText).contains("\\n")) COMMON_SPACING
else IMPORT_BETWEEN_SPACING
case (_, ScalaTokenTypes.tSEMICOLON, _, _) =>
NO_SPACING
//Imports
case (ScalaElementTypes.IMPORT_STMT, ScalaElementTypes.IMPORT_STMT, _, _) => IMPORT_BETWEEN_SPACING
case (ScalaElementTypes.IMPORT_STMT, _, ScalaElementTypes.FILE, _) => DOUBLE_LINE
case (ScalaElementTypes.IMPORT_STMT, _, ScalaElementTypes.PACKAGING, _) => DOUBLE_LINE
case (ScalaElementTypes.IMPORT_STMT, _, _, _) => IMPORT_BETWEEN_SPACING
//Dot
case (ScalaTokenTypes.tDOT, _, _, _) => NO_SPACING_WITH_NEWLINE
case (_, ScalaTokenTypes.tDOT, _, _) => NO_SPACING
//Comma
case (ScalaTokenTypes.tCOMMA, _, _, _) => COMMON_SPACING
case (_, ScalaTokenTypes.tCOMMA, _, _) => NO_SPACING
//Parenthesises and Brackets
case ((ScalaTokenTypes.tLPARENTHESIS | ScalaTokenTypes.tLSQBRACKET), _, _, _) => NO_SPACING_WITH_NEWLINE
case (_, ScalaTokenTypes.tLSQBRACKET, _, _) => NO_SPACING
case (_, ScalaTokenTypes.tLPARENTHESIS, ScalaElementTypes.CONSTRUCTOR_PATTERN, _) => NO_SPACING
case ((ScalaTokenTypes.tRPARENTHESIS | ScalaTokenTypes.tRSQBRACKET), _, _, _) => COMMON_SPACING
case (_, (ScalaTokenTypes.tRPARENTHESIS | ScalaTokenTypes.tRSQBRACKET), _, _) => NO_SPACING_WITH_NEWLINE
//Case clauses
case (ScalaElementTypes.CASE_CLAUSE, _, _, _) => IMPORT_BETWEEN_SPACING
case (_, ScalaElementTypes.CASE_CLAUSE, _, _) => IMPORT_BETWEEN_SPACING
//#
case (ScalaTokenTypes.tINNER_CLASS, _, _, _) => NO_SPACING
case (ScalaTokenTypes.tUNDER, ScalaTokenTypes.tIDENTIFIER, _, _) =>
leftNode.getPsi.getNextSibling match {
case _: PsiWhiteSpace => COMMON_SPACING
case _ => NO_SPACING
}
case (_, ScalaTokenTypes.tINNER_CLASS, _, _) => NO_SPACING
//Other cases
case _ =>
COMMON_SPACING
}
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/formatting/processors/ScalaSpacingProcessor.scala
|
Scala
|
apache-2.0
| 58,102
|
package scrabble
abstract class Tile {
val letter: Char
val value: Int
override def toString = letter.toString
}
case class Letter(letter: Char, value: Int) extends Tile {
override def equals(that: Any) = that match {
case Letter(chr, vl) => (chr == letter) && (value == value)
case _ => false
}
}
case class BlankLetter(letter: Char = '_') extends Tile {
val value = 0
override def equals(that: Any) = that match {
case BlankLetter(chr) => true
case _ => false
}
}
|
Happy0/scalascrabble
|
src/main/scala/Letter.scala
|
Scala
|
gpl-2.0
| 505
|
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.{Activity, AbstractModule}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{T, Table}
import scala.reflect.ClassTag
/**
* This class is a container for a single module which will be applied
* to all input elements. The member module is cloned as necessary to
* process all input elements.
*
* @param module
*/
@SerialVersionUID( 4403280698280280268L)
class MapTable[T: ClassTag](
var module: AbstractModule[_ <: Activity, _ <: Activity, T] = null)
(implicit ev: TensorNumeric[T]) extends Container[Table, Table, T] {
private def extend(n: Int): Unit = {
modules.update(0, module.asInstanceOf[AbstractModule[Activity, Activity, T]])
var i = 1
while (i <= n && modules.size <= i) {
modules.append(module
.cloneModule()
.asInstanceOf[AbstractModule[Activity, Activity, T]])
i += 1
}
}
override def add(module: AbstractModule[_ <: Activity, _ <: Activity, T]): this.type = {
require(module != null, "Single module required")
this.module = module
if (modules.nonEmpty) {
modules.update(0, module.asInstanceOf[AbstractModule[Activity, Activity, T]])
} else {
modules.append(module.asInstanceOf[AbstractModule[Activity, Activity, T]])
}
this
}
override def updateOutput(input: Table): Table = {
extend(input.length())
var i = 0
while (i < input.length()) {
output.update(i + 1, modules(i).updateOutput(input(i + 1)))
i += 1
}
output
}
override def updateGradInput(input: Table, gradOutput: Table): Table = {
extend(input.length())
var i = 0
while (i < input.length()) {
gradInput.update(i + 1, modules(i).updateGradInput(input(i + 1), gradOutput(i + 1)))
i += 1
}
gradInput
}
override def accGradParameters(input: Table, gradOutput: Table,
scale: Double = 1.0): Unit = {
extend(input.length())
var i = 0
while (i < input.length()) {
modules(i).accGradParameters(input(i + 1), gradOutput(i + 1), scale)
i += 1
}
}
override def zeroGradParameters(): Unit = {
if (module != null) {
module.zeroGradParameters()
}
}
override def updateParameters(learningRate: T): Unit = {
if (module != null) {
module.updateParameters(learningRate)
}
}
override def toString(): String = {
val tab = " "
val extlast = " "
val line = "\\n"
var str = "nn.MapTable"
if (module != null) {
str += s"{$line$tab$module$line}"
} else {
str += " { }"
}
str
}
}
object MapTable {
def apply[@specialized(Float, Double) T: ClassTag](
module: AbstractModule[_ <: Activity, _ <: Activity, T] = null
)(implicit ev: TensorNumeric[T]) : MapTable[T] = {
new MapTable[T](module)
}
}
|
dding3/BigDL
|
dl/src/main/scala/com/intel/analytics/bigdl/nn/MapTable.scala
|
Scala
|
apache-2.0
| 3,710
|
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle.scalariform
import org.junit.Test
import org.scalastyle.file.CheckerTest
import org.scalatest.junit.AssertionsForJUnit
// scalastyle:off magic.number
class SimplifyBooleanExpressionCheckerTest extends AssertionsForJUnit with CheckerTest {
protected val classUnderTest = classOf[SimplifyBooleanExpressionChecker]
protected val key = "simplify.boolean.expression"
@Test def testEquals(): Unit = {
val source = """
package foobar
object Foobar {
val b = true
val foo01 = (b == true)
val foo02 = (b != true)
val foo03 = (b == false)
val foo04 = (b != false)
}"""
assertErrors(List(columnError(6, 15), columnError(7, 15), columnError(8, 15), columnError(9, 15)), source)
}
@Test def testErrors(): Unit = {
val source = """
package foobar
object Foobar {
val b = true
val foo01 = (b == true)
val foo02 = !false
val foo03 = !true
}"""
assertErrors(List(columnError(6, 15), columnError(7, 14), columnError(8, 14)), source)
}
@Test def testErrors2(): Unit = {
val source = """
package foobar
object Foobar {
val b = true
val foo04 = b && true
val foo05 = true && b
val foo06 = b && false
val foo07 = false && b
}"""
assertErrors(List(columnError(6, 14), columnError(7, 14), columnError(8, 14), columnError(9, 14)), source)
}
@Test def testErrors3(): Unit = {
val source = """
package foobar
object Foobar {
val b = true
val foo08 = b || true
val foo09 = true || b
val foo10 = b || false
val foo11 = false || b
}"""
assertErrors(List(columnError(6, 14), columnError(7, 14), columnError(8, 14), columnError(9, 14)), source)
}
@Test def testOK(): Unit = {
val source = """
package foobar
object Foobar {
val b = true
val foo12 = b && b // doesn't match
val foo13 = (b && b) || b
val foo14 = b && (true)
}"""
assertErrors(List(columnError(8, 14)), source)
}
}
|
scalastyle/scalastyle
|
src/test/scala/org/scalastyle/scalariform/SimplifyBooleanExpressionCheckerTest.scala
|
Scala
|
apache-2.0
| 2,629
|
package com.github.mtailor.srtplayground.actors
import akka.pattern.ask
import com.github.mtailor.srtplayground.actors.MonitoringActor.DownloadsStarted
import com.github.mtailor.srtplayground.helpers.BaseActor
import org.jsoup.Jsoup
import spray.http.HttpResponse
import scala.collection.JavaConverters._
class MediaPagesActor extends BaseActor {
override def receive = {
case movieNameInUrl: String =>
(subsceneHttpCallsActor ? s"/subtitles/$movieNameInUrl")
.mapTo[HttpResponse]
.foreach { response =>
// find all links
extractSubtitleUrlsOfMediaPage(response.entity.asString) match {
case Seq() => throw new RuntimeException(s"0 subtitle found for $movieNameInUrl")
case urls =>
monitoringActor ! DownloadsStarted(urls.size)
urls.foreach { url =>
// and send them
subtitlePagesActor ! url
}
}
}
}
private def extractSubtitleUrlsOfMediaPage(bodyOfMediaPage: String): Seq[String] =
Jsoup
.parse(bodyOfMediaPage)
.select(".subtitles table td.a1 a")
.asScala
.map (_.attr("href"))
.toSeq
}
|
mtailor/srt-playground
|
src/main/scala/com/github/mtailor/srtplayground/actors/MediaPagesActor.scala
|
Scala
|
apache-2.0
| 1,196
|
package com.bot4s.telegram.api
import cats.Applicative
package object declarative {
type Action[F[_], T] = T => F[Unit]
type Filter[T] = T => Boolean
type Args = Seq[String]
type ActionWithArgs[F[_], T] = T => Args => F[Unit]
type Extractor[T, R] = T => Option[R]
/**
* Adds a filter to an action handler.
*
* {{{
* when(onCommand('secret), isSenderAuthenticated) {
* implicit msg =>
* reply("42")
* }
* }}}
*
* @param actionInstaller e.g onMessage, onCommand('hello)
* @param action Action executed if the filter pass.
*/
def when[F[_]: Applicative, T](actionInstaller: Action[F, T] => Unit, filter: Filter[T])(
action: Action[F, T]
): Unit = {
val newAction = { t: T =>
if (filter(t)) {
action(t)
} else {
Applicative[F].pure(())
}
}
actionInstaller(newAction)
}
/**
* Adds a filter to an action handler; including a fallback action.
*
* {{{
* whenOrElse(onCommand('secret), isSenderAuthenticated) {
* implicit msg =>
* reply("42")
* } /* or else */ {
* reply("You must /login first")(_)
* }
* }}}
*
* @param actionInstaller e.g onMessage, onCommand('hello)
* @param action Action executed if the filter pass.
* @param elseAction Action executed if the filter does not pass.
*/
def whenOrElse[F[_], T](actionInstaller: Action[F, T] => Unit, filter: Filter[T])(
action: Action[F, T]
)(elseAction: Action[F, T]): Unit = {
val newAction = { t: T =>
if (filter(t))
action(t)
else
elseAction(t)
}
actionInstaller(newAction)
}
}
|
mukel/telegrambot4s
|
core/src/com/bot4s/telegram/api/declarative/package.scala
|
Scala
|
apache-2.0
| 1,698
|
/*
* RunningWindowMax.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape
package stream
import akka.stream.{Attributes, FanInShape3, Inlet, Outlet}
import de.sciss.fscape.stream.impl.{NodeImpl, RunningWindowValueLogic, StageImpl}
import scala.math.max
object RunningWindowMax {
def apply[A, E <: BufElem[A]](in: Outlet[E], size: OutI, gate: OutI)
(implicit b: Builder, tpe: StreamType[A, E]): Outlet[E] = {
val stage0 = new Stage[A, E](b.layer)
val stage = b.add(stage0)
b.connect(in , stage.in0)
b.connect(size, stage.in1)
b.connect(gate, stage.in2)
stage.out
}
private final val name = "RunningWindowMax"
private type Shp[E] = FanInShape3[E, BufI, BufI, E]
private final class Stage[A, E <: BufElem[A]](layer: Layer)(implicit a: Allocator, tpe: StreamType[A, E])
extends StageImpl[Shp[E]](name) { stage =>
val shape: Shape = new FanInShape3(
in0 = Inlet [E] (s"${stage.name}.in" ),
in1 = InI (s"${stage.name}.size"),
in2 = InI (s"${stage.name}.gate"),
out = Outlet[E] (s"${stage.name}.out" )
)
def createLogic(attr: Attributes): NodeImpl[Shape] = {
val res: RunningWindowValueLogic[_, _] = if (tpe.isDouble) {
new RunningWindowValueLogic[Double, BufD](stage.name, layer, shape.asInstanceOf[Shp[BufD]])(max)
} else if (tpe.isInt) {
new RunningWindowValueLogic[Int , BufI](stage.name, layer, shape.asInstanceOf[Shp[BufI]])(max)
} else {
assert (tpe.isLong)
new RunningWindowValueLogic[Long , BufL](stage.name, layer, shape.asInstanceOf[Shp[BufL]])(max)
}
res.asInstanceOf[RunningWindowValueLogic[A, E]]
}
}
}
|
Sciss/FScape-next
|
core/shared/src/main/scala/de/sciss/fscape/stream/RunningWindowMax.scala
|
Scala
|
agpl-3.0
| 1,953
|
package controllers
import javax.inject._
import play.api._
import play.api.mvc._
/**
* This controller creates an `Action` to handle HTTP requests to the
* application's home page.
*/
@Singleton
class HomeController @Inject() extends Controller {
/**
* Create an Action to render an HTML page.
*
* The configuration in the `routes` file means that this method
* will be called when the application receives a `GET` request with
* a path of `/`.
*/
def index = Action { implicit request =>
// Ok(views.html.index())
Ok("Welcome to PlayBack!")
}
}
|
makersu/playback-scala.g8
|
src/main/g8/app/controllers/HomeController.scala
|
Scala
|
apache-2.0
| 585
|
/**
* Generated by API Builder - https://www.apibuilder.io
* Service version: 0.14.85
* apibuilder 0.14.93 app.apibuilder.io/apicollective/apibuilder-generator/latest/play_2_8_client
*/
package io.apibuilder.generator.v0.models {
/**
* An attribute represents a key/value pair that is optionally used to provide
* additional instructions / data to the code generator. An example could be an
* attribute to specify the root import path for a go client..
*/
final case class Attribute(
name: String,
value: String
)
/**
* @param code Machine readable code for this specific error message
* @param message Description of the error
*/
final case class Error(
code: String,
message: String
)
/**
* Represents a source file
*
* @param name The recommended name for the file.
* @param dir The recommended directory path for the file where appropriate.
* @param contents The actual source code.
*/
final case class File(
name: String,
dir: _root_.scala.Option[String] = None,
contents: String,
flags: _root_.scala.Option[Seq[io.apibuilder.generator.v0.models.FileFlag]] = None
)
/**
* The generator metadata.
*
* @param language A comma separate list of the programming language(s) that this generator
* produces
* @param attributes The list of attributes that this code generator can use. You can find the full
* list of available attributes and their descriptions at
* http://apibuilder.io/doc/attributes
*/
final case class Generator(
key: String,
name: String,
language: _root_.scala.Option[String] = None,
description: _root_.scala.Option[String] = None,
attributes: Seq[String] = Nil
)
final case class Healthcheck(
status: String
)
/**
* The result of invoking a generator.
*
* @param source The actual source code.
* @param files A collection of source files
*/
final case class Invocation(
@deprecated("Use files instead") source: String,
files: Seq[io.apibuilder.generator.v0.models.File]
)
/**
* The invocation form is the payload send to the code generators when requesting
* generation of client code.
*/
final case class InvocationForm(
service: io.apibuilder.spec.v0.models.Service,
attributes: Seq[io.apibuilder.generator.v0.models.Attribute] = Nil,
userAgent: _root_.scala.Option[String] = None,
importedServices: _root_.scala.Option[Seq[io.apibuilder.spec.v0.models.Service]] = None
)
/**
* Allows generator authors to flag files with special characteristics. It is up to
* the client (i.e. the cli) to decide how to interpret them.
*/
sealed trait FileFlag extends _root_.scala.Product with _root_.scala.Serializable
object FileFlag {
/**
* Indicates files that an end user starts from but should edit. Not intended to be
* the final product (see:
* https://stackoverflow.com/questions/235018/what-is-scaffolding-is-it-a-term-for-a-particular-platform).
* Consider not overwriting these files when code is re-generated.
*/
case object Scaffolding extends FileFlag { override def toString = "scaffolding" }
/**
* UNDEFINED captures values that are sent either in error or
* that were added by the server after this library was
* generated. We want to make it easy and obvious for users of
* this library to handle this case gracefully.
*
* We use all CAPS for the variable name to avoid collisions
* with the camel cased values above.
*/
final case class UNDEFINED(override val toString: String) extends FileFlag
/**
* all returns a list of all the valid, known values. We use
* lower case to avoid collisions with the camel cased values
* above.
*/
val all: scala.List[FileFlag] = scala.List(Scaffolding)
private[this]
val byName: Map[String, FileFlag] = all.map(x => x.toString.toLowerCase -> x).toMap
def apply(value: String): FileFlag = fromString(value).getOrElse(UNDEFINED(value))
def fromString(value: String): _root_.scala.Option[FileFlag] = byName.get(value.toLowerCase)
}
}
package io.apibuilder.generator.v0.models {
package object json {
import play.api.libs.json.__
import play.api.libs.json.JsString
import play.api.libs.json.Writes
import play.api.libs.functional.syntax._
import io.apibuilder.common.v0.models.json._
import io.apibuilder.generator.v0.models.json._
import io.apibuilder.spec.v0.models.json._
private[v0] implicit val jsonReadsUUID = __.read[String].map { str =>
_root_.java.util.UUID.fromString(str)
}
private[v0] implicit val jsonWritesUUID = new Writes[_root_.java.util.UUID] {
def writes(x: _root_.java.util.UUID) = JsString(x.toString)
}
private[v0] implicit val jsonReadsJodaDateTime = __.read[String].map { str =>
_root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseDateTime(str)
}
private[v0] implicit val jsonWritesJodaDateTime = new Writes[_root_.org.joda.time.DateTime] {
def writes(x: _root_.org.joda.time.DateTime) = {
JsString(_root_.org.joda.time.format.ISODateTimeFormat.dateTime.print(x))
}
}
private[v0] implicit val jsonReadsJodaLocalDate = __.read[String].map { str =>
_root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseLocalDate(str)
}
private[v0] implicit val jsonWritesJodaLocalDate = new Writes[_root_.org.joda.time.LocalDate] {
def writes(x: _root_.org.joda.time.LocalDate) = {
JsString(_root_.org.joda.time.format.ISODateTimeFormat.date.print(x))
}
}
implicit val jsonReadsApibuilderGeneratorFileFlag = new play.api.libs.json.Reads[io.apibuilder.generator.v0.models.FileFlag] {
def reads(js: play.api.libs.json.JsValue): play.api.libs.json.JsResult[io.apibuilder.generator.v0.models.FileFlag] = {
js match {
case v: play.api.libs.json.JsString => play.api.libs.json.JsSuccess(io.apibuilder.generator.v0.models.FileFlag(v.value))
case _ => {
(js \ "value").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.apibuilder.generator.v0.models.FileFlag(v))
case err: play.api.libs.json.JsError =>
(js \ "file_flag").validate[String] match {
case play.api.libs.json.JsSuccess(v, _) => play.api.libs.json.JsSuccess(io.apibuilder.generator.v0.models.FileFlag(v))
case err: play.api.libs.json.JsError => err
}
}
}
}
}
}
def jsonWritesApibuilderGeneratorFileFlag(obj: io.apibuilder.generator.v0.models.FileFlag) = {
play.api.libs.json.JsString(obj.toString)
}
def jsObjectFileFlag(obj: io.apibuilder.generator.v0.models.FileFlag) = {
play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsString(obj.toString))
}
implicit def jsonWritesApibuilderGeneratorFileFlag: play.api.libs.json.Writes[FileFlag] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.FileFlag] {
def writes(obj: io.apibuilder.generator.v0.models.FileFlag) = {
jsonWritesApibuilderGeneratorFileFlag(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorAttribute: play.api.libs.json.Reads[Attribute] = {
for {
name <- (__ \ "name").read[String]
value <- (__ \ "value").read[String]
} yield Attribute(name, value)
}
def jsObjectAttribute(obj: io.apibuilder.generator.v0.models.Attribute): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"name" -> play.api.libs.json.JsString(obj.name),
"value" -> play.api.libs.json.JsString(obj.value)
)
}
implicit def jsonWritesApibuilderGeneratorAttribute: play.api.libs.json.Writes[Attribute] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.Attribute] {
def writes(obj: io.apibuilder.generator.v0.models.Attribute) = {
jsObjectAttribute(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorError: play.api.libs.json.Reads[Error] = {
for {
code <- (__ \ "code").read[String]
message <- (__ \ "message").read[String]
} yield Error(code, message)
}
def jsObjectError(obj: io.apibuilder.generator.v0.models.Error): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"code" -> play.api.libs.json.JsString(obj.code),
"message" -> play.api.libs.json.JsString(obj.message)
)
}
implicit def jsonWritesApibuilderGeneratorError: play.api.libs.json.Writes[Error] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.Error] {
def writes(obj: io.apibuilder.generator.v0.models.Error) = {
jsObjectError(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorFile: play.api.libs.json.Reads[File] = {
for {
name <- (__ \ "name").read[String]
dir <- (__ \ "dir").readNullable[String]
contents <- (__ \ "contents").read[String]
flags <- (__ \ "flags").readNullable[Seq[io.apibuilder.generator.v0.models.FileFlag]]
} yield File(name, dir, contents, flags)
}
def jsObjectFile(obj: io.apibuilder.generator.v0.models.File): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"name" -> play.api.libs.json.JsString(obj.name),
"contents" -> play.api.libs.json.JsString(obj.contents)
) ++ (obj.dir match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("dir" -> play.api.libs.json.JsString(x))
}) ++
(obj.flags match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("flags" -> play.api.libs.json.Json.toJson(x))
})
}
implicit def jsonWritesApibuilderGeneratorFile: play.api.libs.json.Writes[File] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.File] {
def writes(obj: io.apibuilder.generator.v0.models.File) = {
jsObjectFile(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorGenerator: play.api.libs.json.Reads[Generator] = {
for {
key <- (__ \ "key").read[String]
name <- (__ \ "name").read[String]
language <- (__ \ "language").readNullable[String]
description <- (__ \ "description").readNullable[String]
attributes <- (__ \ "attributes").read[Seq[String]]
} yield Generator(key, name, language, description, attributes)
}
def jsObjectGenerator(obj: io.apibuilder.generator.v0.models.Generator): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"key" -> play.api.libs.json.JsString(obj.key),
"name" -> play.api.libs.json.JsString(obj.name),
"attributes" -> play.api.libs.json.Json.toJson(obj.attributes)
) ++ (obj.language match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("language" -> play.api.libs.json.JsString(x))
}) ++
(obj.description match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("description" -> play.api.libs.json.JsString(x))
})
}
implicit def jsonWritesApibuilderGeneratorGenerator: play.api.libs.json.Writes[Generator] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.Generator] {
def writes(obj: io.apibuilder.generator.v0.models.Generator) = {
jsObjectGenerator(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorHealthcheck: play.api.libs.json.Reads[Healthcheck] = {
(__ \ "status").read[String].map { x => new Healthcheck(status = x) }
}
def jsObjectHealthcheck(obj: io.apibuilder.generator.v0.models.Healthcheck): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"status" -> play.api.libs.json.JsString(obj.status)
)
}
implicit def jsonWritesApibuilderGeneratorHealthcheck: play.api.libs.json.Writes[Healthcheck] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.Healthcheck] {
def writes(obj: io.apibuilder.generator.v0.models.Healthcheck) = {
jsObjectHealthcheck(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorInvocation: play.api.libs.json.Reads[Invocation] = {
for {
source <- (__ \ "source").read[String]
files <- (__ \ "files").read[Seq[io.apibuilder.generator.v0.models.File]]
} yield Invocation(source, files)
}
def jsObjectInvocation(obj: io.apibuilder.generator.v0.models.Invocation): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"source" -> play.api.libs.json.JsString(obj.source),
"files" -> play.api.libs.json.Json.toJson(obj.files)
)
}
implicit def jsonWritesApibuilderGeneratorInvocation: play.api.libs.json.Writes[Invocation] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.Invocation] {
def writes(obj: io.apibuilder.generator.v0.models.Invocation) = {
jsObjectInvocation(obj)
}
}
}
implicit def jsonReadsApibuilderGeneratorInvocationForm: play.api.libs.json.Reads[InvocationForm] = {
for {
service <- (__ \ "service").read[io.apibuilder.spec.v0.models.Service]
attributes <- (__ \ "attributes").read[Seq[io.apibuilder.generator.v0.models.Attribute]]
userAgent <- (__ \ "user_agent").readNullable[String]
importedServices <- (__ \ "imported_services").readNullable[Seq[io.apibuilder.spec.v0.models.Service]]
} yield InvocationForm(service, attributes, userAgent, importedServices)
}
def jsObjectInvocationForm(obj: io.apibuilder.generator.v0.models.InvocationForm): play.api.libs.json.JsObject = {
play.api.libs.json.Json.obj(
"service" -> io.apibuilder.spec.v0.models.json.jsObjectService(obj.service),
"attributes" -> play.api.libs.json.Json.toJson(obj.attributes)
) ++ (obj.userAgent match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("user_agent" -> play.api.libs.json.JsString(x))
}) ++
(obj.importedServices match {
case None => play.api.libs.json.Json.obj()
case Some(x) => play.api.libs.json.Json.obj("imported_services" -> play.api.libs.json.Json.toJson(x))
})
}
implicit def jsonWritesApibuilderGeneratorInvocationForm: play.api.libs.json.Writes[InvocationForm] = {
new play.api.libs.json.Writes[io.apibuilder.generator.v0.models.InvocationForm] {
def writes(obj: io.apibuilder.generator.v0.models.InvocationForm) = {
jsObjectInvocationForm(obj)
}
}
}
}
}
package io.apibuilder.generator.v0 {
object Bindables {
import play.api.mvc.{PathBindable, QueryStringBindable}
// import models directly for backwards compatibility with prior versions of the generator
import Core._
import Models._
object Core {
implicit def pathBindableDateTimeIso8601(implicit stringBinder: QueryStringBindable[String]): PathBindable[_root_.org.joda.time.DateTime] = ApibuilderPathBindable(ApibuilderTypes.dateTimeIso8601)
implicit def queryStringBindableDateTimeIso8601(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[_root_.org.joda.time.DateTime] = ApibuilderQueryStringBindable(ApibuilderTypes.dateTimeIso8601)
implicit def pathBindableDateIso8601(implicit stringBinder: QueryStringBindable[String]): PathBindable[_root_.org.joda.time.LocalDate] = ApibuilderPathBindable(ApibuilderTypes.dateIso8601)
implicit def queryStringBindableDateIso8601(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[_root_.org.joda.time.LocalDate] = ApibuilderQueryStringBindable(ApibuilderTypes.dateIso8601)
}
object Models {
import io.apibuilder.generator.v0.models._
val fileFlagConverter: ApibuilderTypeConverter[io.apibuilder.generator.v0.models.FileFlag] = new ApibuilderTypeConverter[io.apibuilder.generator.v0.models.FileFlag] {
override def convert(value: String): io.apibuilder.generator.v0.models.FileFlag = io.apibuilder.generator.v0.models.FileFlag(value)
override def convert(value: io.apibuilder.generator.v0.models.FileFlag): String = value.toString
override def example: io.apibuilder.generator.v0.models.FileFlag = io.apibuilder.generator.v0.models.FileFlag.Scaffolding
override def validValues: Seq[io.apibuilder.generator.v0.models.FileFlag] = io.apibuilder.generator.v0.models.FileFlag.all
}
implicit def pathBindableFileFlag(implicit stringBinder: QueryStringBindable[String]): PathBindable[io.apibuilder.generator.v0.models.FileFlag] = ApibuilderPathBindable(fileFlagConverter)
implicit def queryStringBindableFileFlag(implicit stringBinder: QueryStringBindable[String]): QueryStringBindable[io.apibuilder.generator.v0.models.FileFlag] = ApibuilderQueryStringBindable(fileFlagConverter)
}
trait ApibuilderTypeConverter[T] {
def convert(value: String): T
def convert(value: T): String
def example: T
def validValues: Seq[T] = Nil
def errorMessage(key: String, value: String, ex: java.lang.Exception): String = {
val base = s"Invalid value '$value' for parameter '$key'. "
validValues.toList match {
case Nil => base + "Ex: " + convert(example)
case values => base + ". Valid values are: " + values.mkString("'", "', '", "'")
}
}
}
object ApibuilderTypes {
val dateTimeIso8601: ApibuilderTypeConverter[_root_.org.joda.time.DateTime] = new ApibuilderTypeConverter[_root_.org.joda.time.DateTime] {
override def convert(value: String): _root_.org.joda.time.DateTime = _root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseDateTime(value)
override def convert(value: _root_.org.joda.time.DateTime): String = _root_.org.joda.time.format.ISODateTimeFormat.dateTime.print(value)
override def example: _root_.org.joda.time.DateTime = _root_.org.joda.time.DateTime.now
}
val dateIso8601: ApibuilderTypeConverter[_root_.org.joda.time.LocalDate] = new ApibuilderTypeConverter[_root_.org.joda.time.LocalDate] {
override def convert(value: String): _root_.org.joda.time.LocalDate = _root_.org.joda.time.format.ISODateTimeFormat.dateTimeParser.parseLocalDate(value)
override def convert(value: _root_.org.joda.time.LocalDate): String = _root_.org.joda.time.format.ISODateTimeFormat.date.print(value)
override def example: _root_.org.joda.time.LocalDate = _root_.org.joda.time.LocalDate.now
}
}
final case class ApibuilderQueryStringBindable[T](
converters: ApibuilderTypeConverter[T]
) extends QueryStringBindable[T] {
override def bind(key: String, params: Map[String, Seq[String]]): _root_.scala.Option[_root_.scala.Either[String, T]] = {
params.getOrElse(key, Nil).headOption.map { v =>
try {
Right(
converters.convert(v)
)
} catch {
case ex: java.lang.Exception => Left(
converters.errorMessage(key, v, ex)
)
}
}
}
override def unbind(key: String, value: T): String = {
s"$key=${converters.convert(value)}"
}
}
final case class ApibuilderPathBindable[T](
converters: ApibuilderTypeConverter[T]
) extends PathBindable[T] {
override def bind(key: String, value: String): _root_.scala.Either[String, T] = {
try {
Right(
converters.convert(value)
)
} catch {
case ex: java.lang.Exception => Left(
converters.errorMessage(key, value, ex)
)
}
}
override def unbind(key: String, value: T): String = {
converters.convert(value)
}
}
}
}
package io.apibuilder.generator.v0 {
object Constants {
val BaseUrl = "https://api.apibuilder.io"
val Namespace = "io.apibuilder.generator.v0"
val UserAgent = "apibuilder 0.14.93 app.apibuilder.io/apicollective/apibuilder-generator/latest/play_2_8_client"
val Version = "0.14.85"
val VersionMajor = 0
}
class Client(
ws: play.api.libs.ws.WSClient,
val baseUrl: String = "https://api.apibuilder.io",
auth: scala.Option[io.apibuilder.generator.v0.Authorization] = None,
defaultHeaders: Seq[(String, String)] = Nil
) extends interfaces.Client {
import io.apibuilder.common.v0.models.json._
import io.apibuilder.generator.v0.models.json._
import io.apibuilder.spec.v0.models.json._
private[this] val logger = play.api.Logger("io.apibuilder.generator.v0.Client")
logger.info(s"Initializing io.apibuilder.generator.v0.Client for url $baseUrl")
def generators: Generators = Generators
def healthchecks: Healthchecks = Healthchecks
def invocations: Invocations = Invocations
object Generators extends Generators {
override def get(
key: _root_.scala.Option[String] = None,
limit: Int = 100,
offset: Int = 0,
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[Seq[io.apibuilder.generator.v0.models.Generator]] = {
val queryParameters = Seq(
key.map("key" -> _),
Some("limit" -> limit.toString),
Some("offset" -> offset.toString)
).flatten
_executeRequest("GET", s"/generators", queryParameters = queryParameters, requestHeaders = requestHeaders).map {
case r if r.status == 200 => _root_.io.apibuilder.generator.v0.Client.parseJson("Seq[io.apibuilder.generator.v0.models.Generator]", r, _.validate[Seq[io.apibuilder.generator.v0.models.Generator]])
case r => throw io.apibuilder.generator.v0.errors.FailedRequest(r.status, s"Unsupported response code[${r.status}]. Expected: 200")
}
}
override def getByKey(
key: String,
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[io.apibuilder.generator.v0.models.Generator] = {
_executeRequest("GET", s"/generators/${play.utils.UriEncoding.encodePathSegment(key, "UTF-8")}", requestHeaders = requestHeaders).map {
case r if r.status == 200 => _root_.io.apibuilder.generator.v0.Client.parseJson("io.apibuilder.generator.v0.models.Generator", r, _.validate[io.apibuilder.generator.v0.models.Generator])
case r if r.status == 404 => throw io.apibuilder.generator.v0.errors.UnitResponse(r.status)
case r => throw io.apibuilder.generator.v0.errors.FailedRequest(r.status, s"Unsupported response code[${r.status}]. Expected: 200, 404")
}
}
}
object Healthchecks extends Healthchecks {
override def get(
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[io.apibuilder.generator.v0.models.Healthcheck] = {
_executeRequest("GET", s"/_internal_/healthcheck", requestHeaders = requestHeaders).map {
case r if r.status == 200 => _root_.io.apibuilder.generator.v0.Client.parseJson("io.apibuilder.generator.v0.models.Healthcheck", r, _.validate[io.apibuilder.generator.v0.models.Healthcheck])
case r => throw io.apibuilder.generator.v0.errors.FailedRequest(r.status, s"Unsupported response code[${r.status}]. Expected: 200")
}
}
}
object Invocations extends Invocations {
override def postByKey(
key: String,
invocationForm: io.apibuilder.generator.v0.models.InvocationForm,
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[io.apibuilder.generator.v0.models.Invocation] = {
val payload = play.api.libs.json.Json.toJson(invocationForm)
_executeRequest("POST", s"/invocations/${play.utils.UriEncoding.encodePathSegment(key, "UTF-8")}", body = Some(payload), requestHeaders = requestHeaders).map {
case r if r.status == 200 => _root_.io.apibuilder.generator.v0.Client.parseJson("io.apibuilder.generator.v0.models.Invocation", r, _.validate[io.apibuilder.generator.v0.models.Invocation])
case r if r.status == 409 => throw io.apibuilder.generator.v0.errors.ErrorsResponse(r)
case r => throw io.apibuilder.generator.v0.errors.FailedRequest(r.status, s"Unsupported response code[${r.status}]. Expected: 200, 409")
}
}
}
def _requestHolder(path: String): play.api.libs.ws.WSRequest = {
val holder = ws.url(baseUrl + path).addHttpHeaders(
"User-Agent" -> Constants.UserAgent,
"X-Apidoc-Version" -> Constants.Version,
"X-Apidoc-Version-Major" -> Constants.VersionMajor.toString
).addHttpHeaders(defaultHeaders : _*)
auth.fold(holder) {
case Authorization.Basic(username, password) => {
holder.withAuth(username, password.getOrElse(""), play.api.libs.ws.WSAuthScheme.BASIC)
}
case a => sys.error("Invalid authorization scheme[" + a.getClass + "]")
}
}
def _logRequest(method: String, req: play.api.libs.ws.WSRequest): play.api.libs.ws.WSRequest = {
val queryComponents = for {
(name, values) <- req.queryString
value <- values
} yield s"$name=$value"
val url = s"${req.url}${queryComponents.mkString("?", "&", "")}"
auth.fold(logger.info(s"curl -X $method '$url'")) { _ =>
logger.info(s"curl -X $method -u '[REDACTED]:' '$url'")
}
req
}
def _executeRequest(
method: String,
path: String,
queryParameters: Seq[(String, String)] = Nil,
requestHeaders: Seq[(String, String)] = Nil,
body: Option[play.api.libs.json.JsValue] = None
): scala.concurrent.Future[play.api.libs.ws.WSResponse] = {
method.toUpperCase match {
case "GET" => {
_logRequest("GET", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).get()
}
case "POST" => {
_logRequest("POST", _requestHolder(path).addHttpHeaders(_withJsonContentType(requestHeaders):_*).addQueryStringParameters(queryParameters:_*)).post(body.getOrElse(play.api.libs.json.Json.obj()))
}
case "PUT" => {
_logRequest("PUT", _requestHolder(path).addHttpHeaders(_withJsonContentType(requestHeaders):_*).addQueryStringParameters(queryParameters:_*)).put(body.getOrElse(play.api.libs.json.Json.obj()))
}
case "PATCH" => {
_logRequest("PATCH", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).patch(body.getOrElse(play.api.libs.json.Json.obj()))
}
case "DELETE" => {
_logRequest("DELETE", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).delete()
}
case "HEAD" => {
_logRequest("HEAD", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).head()
}
case "OPTIONS" => {
_logRequest("OPTIONS", _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*)).options()
}
case _ => {
_logRequest(method, _requestHolder(path).addHttpHeaders(requestHeaders:_*).addQueryStringParameters(queryParameters:_*))
sys.error("Unsupported method[%s]".format(method))
}
}
}
/**
* Adds a Content-Type: application/json header unless the specified requestHeaders
* already contain a Content-Type header
*/
def _withJsonContentType(headers: Seq[(String, String)]): Seq[(String, String)] = {
headers.find { _._1.toUpperCase == "CONTENT-TYPE" } match {
case None => headers ++ Seq(("Content-Type" -> "application/json; charset=UTF-8"))
case Some(_) => headers
}
}
}
object Client {
def parseJson[T](
className: String,
r: play.api.libs.ws.WSResponse,
f: (play.api.libs.json.JsValue => play.api.libs.json.JsResult[T])
): T = {
f(play.api.libs.json.Json.parse(r.body)) match {
case play.api.libs.json.JsSuccess(x, _) => x
case play.api.libs.json.JsError(errors) => {
throw io.apibuilder.generator.v0.errors.FailedRequest(r.status, s"Invalid json for class[" + className + "]: " + errors.mkString(" "))
}
}
}
}
sealed trait Authorization extends _root_.scala.Product with _root_.scala.Serializable
object Authorization {
final case class Basic(username: String, password: Option[String] = None) extends Authorization
}
package interfaces {
trait Client {
def baseUrl: String
def generators: io.apibuilder.generator.v0.Generators
def healthchecks: io.apibuilder.generator.v0.Healthchecks
def invocations: io.apibuilder.generator.v0.Invocations
}
}
trait Generators {
/**
* Get all available generators
*
* @param key Filter generators with this key
* @param limit The number of records to return
* @param offset Used to paginate. First page of results is 0.
*/
def get(
key: _root_.scala.Option[String] = None,
limit: Int = 100,
offset: Int = 0,
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[Seq[io.apibuilder.generator.v0.models.Generator]]
/**
* Get generator with this key
*/
def getByKey(
key: String,
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[io.apibuilder.generator.v0.models.Generator]
}
trait Healthchecks {
def get(
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[io.apibuilder.generator.v0.models.Healthcheck]
}
trait Invocations {
/**
* Invoke a generator
*/
def postByKey(
key: String,
invocationForm: io.apibuilder.generator.v0.models.InvocationForm,
requestHeaders: Seq[(String, String)] = Nil
)(implicit ec: scala.concurrent.ExecutionContext): scala.concurrent.Future[io.apibuilder.generator.v0.models.Invocation]
}
package errors {
import io.apibuilder.common.v0.models.json._
import io.apibuilder.generator.v0.models.json._
import io.apibuilder.spec.v0.models.json._
final case class ErrorsResponse(
response: play.api.libs.ws.WSResponse,
message: Option[String] = None
) extends Exception(message.getOrElse(response.status + ": " + response.body)){
lazy val errors = _root_.io.apibuilder.generator.v0.Client.parseJson("Seq[io.apibuilder.generator.v0.models.Error]", response, _.validate[Seq[io.apibuilder.generator.v0.models.Error]])
}
final case class UnitResponse(status: Int) extends Exception(s"HTTP $status")
final case class FailedRequest(responseCode: Int, message: String, requestUri: Option[_root_.java.net.URI] = None) extends _root_.java.lang.Exception(s"HTTP $responseCode: $message")
}
}
|
mbryzek/apidoc
|
generated/app/ApicollectiveApibuilderGeneratorV0Client.scala
|
Scala
|
mit
| 31,616
|
package slick.compiler
import slick.ast._
import Util._
import slick.util.ConstArray
/** Specialize the AST for edge cases of query parameters. This is required for
* compiling `take(0)` for some databases which do not allow `LIMIT 0`. */
class SpecializeParameters extends Phase {
val name = "specializeParameters"
def apply(state: CompilerState): CompilerState =
state.map(ClientSideOp.mapServerSide(_, keepType = true)(transformServerSide))
def transformServerSide(n: Node): Node = {
val cs = n.collect { case c @ Comprehension(_, _, _, _, _, _, _, _, Some(_: QueryParameter), _, _) => c }
logger.debug("Affected fetch clauses in: "+cs.mkString(", "))
cs.foldLeft(n) { case (n, c @ Comprehension(_, _, _, _, _, _, _, _, Some(fetch: QueryParameter), _, _)) =>
val compiledFetchParam = QueryParameter(fetch.extractor, ScalaBaseType.longType)
val guarded = n.replace({ case c2: Comprehension if c2 == c => c2.copy(fetch = Some(LiteralNode(0L))) }, keepType = true)
val fallback = n.replace({ case c2: Comprehension if c2 == c => c2.copy(fetch = Some(compiledFetchParam)) }, keepType = true)
ParameterSwitch(ConstArray(compare(fetch.extractor, 0L) -> guarded), fallback).infer()
}
}
/** Create a function that calls an extractor for a value and compares the result with a fixed value. */
def compare(f: (Any => Any), v: Any) = new (Any => Boolean) {
def apply(param: Any) = v == f(param)
override def toString = s"$f(...) == $v"
}
}
|
nafg/slick
|
slick/src/main/scala/slick/compiler/SpecializeParameters.scala
|
Scala
|
bsd-2-clause
| 1,505
|
package pep_089
object Solution {
val stream = getClass.getResourceAsStream("/pep_089/p089_roman.txt")
val lines = scala.io.Source.fromInputStream(stream).getLines()
def solve(): String = lines
.map(s => s.length - s.replaceAll("DCCCC|LXXXX|VIIII|CCCC|XXXX|IIII", " ").length)
.sum.toString
}
|
filippovitale/pe
|
pe-solution/src/main/scala/pep_089/Solution.scala
|
Scala
|
mit
| 312
|
package scala.offheap
package internal
import java.{lang => jl}
import internal.SunMisc.UNSAFE
object Sanitizer {
private[this] final val UNPACKED_ID_MASK = 65535L
private[this] final val ID_MASK = jl.Long.MAX_VALUE << 48
private[this] final val ADDR_MASK = jl.Long.MAX_VALUE >> 16
def pack(id: Long, addr: Addr): Addr = (id << 48) | addr
def unpackId(addr: Addr): Long = (addr >> 48) & UNPACKED_ID_MASK
def unpackAddr(addr: Addr): Addr = addr & ADDR_MASK
private[this] final val MAX = 65536
private[this] val valid = new java.util.concurrent.atomic.AtomicIntegerArray(MAX)
private[this] val trans = new java.util.concurrent.atomic.AtomicLong(0L)
private def truncate(v: Int): Int =
if (v == 0) 1 else v
private def advance(prev: Int): Int = {
val inc = prev + 1
if (inc < MAX) inc
else 1
}
def register(): Long = {
var commit = false
var res = 0
do {
val prevtrans = trans.get
val start = truncate((prevtrans % MAX).toInt)
res = advance(start)
while (valid.get(res) == 1 && res != start)
res = advance(res)
if (res == start && trans.get == prevtrans)
throw new IllegalArgumentException(
s"can't have more than ${MAX-1} regions open in checked memory mode")
commit = valid.compareAndSet(res, 0, 1)
trans.incrementAndGet
} while (!commit)
res.toLong
}
def unregister(id: Long): Unit = {
valid.compareAndSet(id.toInt, 1, 0)
trans.incrementAndGet
}
def validate(addr: Addr): Addr =
if (Checked.MEMORY) {
val id = unpackId(addr).toInt
if (id != 0 && valid.get(id) != 1) {
throw new InaccessibleMemoryException
}
unpackAddr(addr)
} else {
addr
}
}
|
ignasi35/scala-offheap
|
core/src/main/scala/offheap/internal/Sanitizer.scala
|
Scala
|
bsd-3-clause
| 1,747
|
package org.jetbrains.plugins.scala
import scala.language.implicitConversions
/**
* @author ven
*/
class Suspension[T](fun: () => T) {
def this(t: T) = this ({() => t})
lazy val v = fun()
}
object Suspension {
implicit def any2Susp[T](t: T): Suspension[T] = new Suspension(t)
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/Suspension.scala
|
Scala
|
apache-2.0
| 289
|
type Elem = Object
def newElem() = new Object()
val MissFactor = 2
val Runs = 100 // number of runs to warm-up, and then number of runs to test
val ItersPerRun = 1000
val elems = Array.fill(1024 * MissFactor)(newElem())
def testJava =
val set = java.util.HashMap[Elem, Elem]()
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set.put(e, e)
i += 1
while i > 0 do
i -= 1
val v = set.get(elems(i))
if v != null then
count += 1
iter += 1
count
def testJavaId =
val set = java.util.IdentityHashMap[Elem, Elem]()
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set.put(e, e)
i += 1
while i > 0 do
i -= 1
val v = set.get(elems(i))
if v != null then
count += 1
iter += 1
count
def testScalaMap =
val set = scala.collection.mutable.HashMap[Elem, Elem]()
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set.update(e, e)
i += 1
while i > 0 do
i -= 1
set.get(elems(i)) match
case Some(_) => count += 1
case None =>
iter += 1
count
def testAnyRefMap =
val set = scala.collection.mutable.AnyRefMap[Elem, Elem]()
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set.update(e, e)
i += 1
while i > 0 do
i -= 1
val v = set.getOrNull(elems(i))
if v != null then
count += 1
iter += 1
count
def testDottyMap =
val set = dotty.tools.dotc.util.HashMap[Elem, Elem](128)
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set.update(e, e)
i += 1
while i > 0 do
i -= 1
val v = set.lookup(elems(i))
if v != null then
count += 1
iter += 1
count
def testDottySet =
val set = dotty.tools.dotc.util.HashSet[Elem](64)
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set += e
i += 1
while i > 0 do
i -= 1
if set.contains(elems(i)) then
count += 1
iter += 1
count
def testScalaSet =
val set = scala.collection.mutable.HashSet[Elem]()
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set += e
i += 1
while i > 0 do
i -= 1
if set.contains(elems(i)) then
count += 1
iter += 1
count
def testLinearSet =
var set = dotty.tools.dotc.util.LinearSet.empty[Elem]
var count = 0
var iter = 0
while iter < ItersPerRun do
var i = 0
while i < elems.length do
val e = elems(i)
if i % MissFactor == 0 then
set += e
i += 1
while i > 0 do
i -= 1
if set.contains(elems(i)) then
count += 1
iter += 1
count
val expected = (elems.size / MissFactor) * ItersPerRun
def profile(name: String, op: => Int) =
System.gc()
for i <- 0 until 100 do assert(op == expected)
val start = System.nanoTime()
var count = 0
for i <- 0 until 100 do count += op
//println(count)
assert(count == expected * Runs)
println(s"$name took ${(System.nanoTime().toDouble - start)/1_000_000} ms")
@main def Test =
profile("dotty.tools.dotc.LinearSet", testLinearSet)
profile("dotty.tools.dotc.HashSet ", testDottySet)
profile("dotty.tools.dotc.HashMap ", testDottyMap)
profile("scala.collection.HashSet ", testScalaSet)
profile("scala.collection.AnyRefMap", testAnyRefMap)
profile("scala.collection.HashMap ", testScalaMap)
profile("java.util.IdentityHashMap ", testJavaId)
profile("java.util.HashMap ", testJava)
profile("java.util.HashMap ", testJava)
profile("java.util.IdentityHashMap ", testJavaId)
profile("scala.collection.HashMap ", testScalaMap)
profile("scala.collection.AnyRefMap", testAnyRefMap)
profile("scala.collection.HashSet ", testScalaSet)
profile("dotty.tools.dotc.HashMap ", testDottyMap)
profile("dotty.tools.dotc.HashSet ", testDottySet)
profile("dotty.tools.dotc.LinearSet", testLinearSet)
|
dotty-staging/dotty
|
tests/pos-with-compiler/benchSets.scala
|
Scala
|
apache-2.0
| 4,645
|
package propertynder.util
import akka.http.scaladsl.model.HttpResponse
import akka.stream.ActorMaterializer
import akka.util.ByteString
import scala.concurrent.Future
object HTTP {
implicit class ResponseWithBody(response: HttpResponse) {
def body(implicit mat: ActorMaterializer): Future[String] = {
implicit val ec = mat.system.dispatcher
response.entity.dataBytes
.runFold(ByteString(""))(_ ++ _)
.map(_.utf8String)
}
}
}
|
ostapneko/propertynder
|
src/main/scala/propertynder/util/HTTP.scala
|
Scala
|
mit
| 471
|
package dielectric
object Dielectric
extends spark.AllInstances
with syntax.AllSyntax
|
adelbertc/dielectric
|
src/main/scala/dielectric/Dielectric.scala
|
Scala
|
apache-2.0
| 94
|
package com.redislabs.provider.redis.df.cluster
import com.redislabs.provider.redis.df.CsvDataframeSuite
import com.redislabs.provider.redis.env.RedisClusterEnv
class CsvDataframeClusterSuite extends CsvDataframeSuite with RedisClusterEnv
|
RedisLabs/spark-redis
|
src/test/scala/com/redislabs/provider/redis/df/cluster/CsvDataframeClusterSuite.scala
|
Scala
|
bsd-3-clause
| 241
|
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack.{Continue, Stop}
import monix.execution.cancelables.{CompositeCancelable, SingleAssignCancelable}
import monix.execution.{Ack, Cancelable}
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.concurrent.Future
private[reactive] final class ThrottleLastObservable[+A, S](
source: Observable[A],
sampler: Observable[S],
shouldRepeatOnSilence: Boolean)
extends Observable[A] {
def unsafeSubscribeFn(downstream: Subscriber[A]): Cancelable = {
val upstreamSubscription = SingleAssignCancelable()
val samplerSubscription = SingleAssignCancelable()
val composite = CompositeCancelable(upstreamSubscription, samplerSubscription)
upstreamSubscription := source.unsafeSubscribeFn(new Subscriber.Sync[A] { upstreamSubscriber =>
implicit val scheduler = downstream.scheduler
// Value is volatile to keep write to lastValue visible
// after this one is seen as being true
@volatile private[this] var hasValue = false
// MUST BE written before `hasValue = true`
private[this] var lastValue: A = _
// To be written in onComplete/onError, to be read from tick
private[this] var upstreamIsDone = false
// MUST BE synchronized by `upstreamSubscriber`.
private[this] var downstreamIsDone = false
def onNext(elem: A): Ack =
if (downstreamIsDone) Stop
else {
lastValue = elem
hasValue = true
Continue
}
def onError(ex: Throwable): Unit =
upstreamSubscriber.synchronized {
if (!downstreamIsDone) {
downstreamIsDone = true
samplerSubscription.cancel()
downstream.onError(ex)
}
}
def onComplete(): Unit =
upstreamSubscriber.synchronized {
upstreamIsDone = true
}
samplerSubscription := sampler.unsafeSubscribeFn(new Subscriber[S] { self =>
implicit val scheduler = downstream.scheduler
def onNext(elem: S): Future[Ack] =
upstreamSubscriber.synchronized(signalNext())
def onError(ex: Throwable): Unit =
upstreamSubscriber.onError(ex)
def onComplete(): Unit =
upstreamSubscriber.synchronized {
upstreamIsDone = true
signalNext()
()
}
def signalNext(): Future[Ack] =
if (downstreamIsDone) Stop
else {
val next =
if (!hasValue) Continue
else {
hasValue = shouldRepeatOnSilence
val ack = downstream.onNext(lastValue)
ack.syncOnStopOrFailure { _ =>
downstreamIsDone = true
upstreamSubscription.cancel()
}
}
if (!upstreamIsDone) next
else {
downstreamIsDone = true
upstreamSubscription.cancel()
if (next ne Stop) downstream.onComplete()
Stop
}
}
})
})
composite
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/ThrottleLastObservable.scala
|
Scala
|
apache-2.0
| 3,800
|
package org.ensime.indexer
import java.sql.SQLException
import akka.actor._
import akka.event.slf4j.SLF4JLogging
import org.apache.commons.vfs2._
import org.ensime.api._
import org.ensime.indexer.DatabaseService._
import org.ensime.util.file._
//import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
/**
* Provides methods to perform ENSIME-specific indexing tasks,
* receives events that require an index update, and provides
* searches against the index.
*
* We have an H2 database for storing relational information
* and Lucene for advanced indexing.
*/
class SearchService(
config: EnsimeConfig,
resolver: SourceResolver
)(
implicit
actorSystem: ActorSystem,
vfs: EnsimeVFS
) extends ClassfileIndexer
with ClassfileListener
with SLF4JLogging {
private val QUERY_TIMEOUT = 30 seconds
private val version = "1.0"
private val index = new IndexService(config.cacheDir / ("index-" + version))
private val db = new DatabaseService(config.cacheDir / ("sql-" + version))
implicit val workerEC = actorSystem.dispatchers.lookup("akka.search-service-dispatcher")
// FIXME: apologies, this is pretty messy. We should move to an
// actor based system for all of the persisting instead of this
// hybrid approach.
/**
* Indexes everything, making best endeavours to avoid scanning what
* is unnecessary (e.g. we already know that a jar or classfile has
* been indexed).
*
* @return the number of rows (removed, indexed) from the database.
*/
def refresh(): Future[(Int, Int)] = {
def scan(f: FileObject) = f.findFiles(EnsimeVFS.ClassfileSelector) match {
case null => Nil
case res => res.toList
}
// it is much faster during startup to obtain the full list of
// known files from the DB then and check against the disk, than
// check each file against DatabaseService.outOfDate
def findStaleFileChecks(checks: Seq[FileCheck]): List[FileCheck] = {
log.info("findStaleFileChecks")
val jarUris = config.allJars.map(vfs.vfile).map(_.getName.getURI)
for {
check <- checks
name = check.file.getName.getURI
if !check.file.exists || check.changed ||
(!name.endsWith(".class") && !jarUris(name))
} yield check
}.toList
// delete the stale data before adding anything new
// returns number of rows deleted
def deleteReferences(checks: List[FileCheck]): Future[Int] = {
log.info(s"removing ${checks.size} stale files from the index")
deleteInBatches(checks.map(_.file))
}
// a snapshot of everything that we want to index
def findBases(): Set[FileObject] = {
log.info("findBases")
config.modules.flatMap {
case (name, m) =>
m.targetDirs.flatMap { d => scan(vfs.vfile(d)) } :::
m.testTargetDirs.flatMap { d => scan(vfs.vfile(d)) } :::
m.compileJars.map(vfs.vfile) ::: m.testJars.map(vfs.vfile)
}
}.toSet ++ config.javaLibs.map(vfs.vfile)
// if the filecheck is outdated, extract symbols and then persist.
def indexBase(base: FileObject): Future[Option[Int]] =
db.outOfDate(base).flatMap { outOfDate =>
if (!outOfDate) Future.successful(None)
else base match {
case classfile if classfile.getName.getExtension == "class" =>
// too noisy to log
val check = FileCheck(classfile)
val symbols = Future {
blocking {
extractSymbols(classfile, classfile)
}
}
symbols.flatMap(persist(check, _))
case jar =>
log.debug(s"indexing $jar")
val check = FileCheck(jar)
val symbols = Future {
blocking {
scan(vfs.vjar(jar)) flatMap (extractSymbols(jar, _))
}
}
symbols.flatMap(persist(check, _))
}
}
// index all the given bases and return number of rows written
def indexBases(bases: Set[FileObject]): Future[Int] = {
log.info("indexBases")
Future.sequence(bases.map(indexBase)).map(_.flatten.sum)
}
def commitIndex(): Future[Unit] = Future {
blocking {
log.debug("committing index to disk...")
index.commit()
log.debug("...done committing index")
}
}
// chain together all the future tasks
for {
checks <- db.knownFiles()
stale = findStaleFileChecks(checks)
deletes <- deleteReferences(stale)
bases = findBases()
added <- indexBases(bases)
_ <- commitIndex()
} yield (deletes, added)
}
def refreshResolver(): Unit = resolver.update()
def persist(check: FileCheck, symbols: List[FqnSymbol]): Future[Option[Int]] = {
val iwork = Future { blocking { index.persist(check, symbols) } }
val dwork = db.persist(check, symbols)
iwork.flatMap { _ => dwork }
}
private val blacklist = Set("sun/", "sunw/", "com/sun/")
private val ignore = Set("$$anon$", "$$anonfun$", "$worker$")
import org.ensime.util.RichFileObject._
private def extractSymbols(container: FileObject, f: FileObject): List[FqnSymbol] = {
f.pathWithinArchive match {
case Some(relative) if blacklist.exists(relative.startsWith) => Nil
case _ =>
val name = container.getName.getURI
val path = f.getName.getURI
val (clazz, refs) = indexClassfile(f)
val depickler = new ClassfileDepickler(f)
val source = resolver.resolve(clazz.name.pack, clazz.source)
val sourceUri = source.map(_.getName.getURI)
// TODO: other types of visibility when we get more sophisticated
if (clazz.access != Public) Nil
else FqnSymbol(None, name, path, clazz.name.fqnString, None, None, sourceUri, clazz.source.line) ::
clazz.methods.toList.filter(_.access == Public).map { method =>
val descriptor = method.descriptor.descriptorString
FqnSymbol(None, name, path, method.name.fqnString, Some(descriptor), None, sourceUri, method.line)
} ::: clazz.fields.toList.filter(_.access == Public).map { field =>
val internal = field.clazz.internalString
FqnSymbol(None, name, path, field.name.fqnString, None, Some(internal), sourceUri, clazz.source.line)
} ::: depickler.getTypeAliases.toList.filter(_.access == Public).map { rawType =>
FqnSymbol(None, name, path, rawType.fqnString, None, None, sourceUri, None)
}
}
}.filterNot(sym => ignore.exists(sym.fqn.contains))
// TODO: provide context (user's current module and main/test)
/** free-form search for classes */
def searchClasses(query: String, max: Int): List[FqnSymbol] = {
val fqns = index.searchClasses(query, max)
Await.result(db.find(fqns), QUERY_TIMEOUT) take max
}
/** free-form search for classes and methods */
def searchClassesMethods(terms: List[String], max: Int): List[FqnSymbol] = {
val fqns = index.searchClassesMethods(terms, max)
Await.result(db.find(fqns), QUERY_TIMEOUT) take max
}
/** only for exact fqns */
def findUnique(fqn: String): Option[FqnSymbol] = Await.result(db.find(fqn), QUERY_TIMEOUT)
/* DELETE then INSERT in H2 is ridiculously slow, so we put all modifications
* into a blocking queue and dedicate a thread to block on draining the queue.
* This has the effect that we always react to a single change on disc but we
* will work through backlogs in bulk.
*
* We always do a DELETE, even if the entries are new, but only INSERT if
* the list of symbols is non-empty.
*/
val backlogActor = actorSystem.actorOf(Props(new IndexingQueueActor(this)), "ClassfileIndexer")
// deletion in both Lucene and H2 is really slow, batching helps
def deleteInBatches(
files: List[FileObject],
batchSize: Int = 1000
): Future[Int] = {
val removing = files.grouped(batchSize).map(delete)
Future.sequence(removing).map(_.sum)
}
// returns number of rows removed
def delete(files: List[FileObject]): Future[Int] = {
// this doesn't speed up Lucene deletes, but it means that we
// don't wait for Lucene before starting the H2 deletions.
val iwork = Future { blocking { index.remove(files) } }
val dwork = db.removeFiles(files)
iwork.flatMap(_ => dwork)
}
def classfileAdded(f: FileObject): Unit = classfileChanged(f)
def classfileRemoved(f: FileObject): Unit = {
backlogActor ! FileUpdate(f, Nil)
}
def classfileChanged(f: FileObject): Unit = Future {
val symbols = extractSymbols(f, f)
backlogActor ! FileUpdate(f, symbols)
}(workerEC)
def shutdown(): Future[Unit] = {
db.shutdown()
}
}
case class FileUpdate(
fileObject: FileObject,
symbolList: List[FqnSymbol]
)
class IndexingQueueActor(searchService: SearchService) extends Actor with ActorLogging {
import context.system
import scala.concurrent.duration._
case object Process
// De-dupes files that have been updated since we were last told to
// index them. No need to aggregate values: the latest wins.
var todo = Map.empty[FileObject, List[FqnSymbol]]
// debounce and give us a chance to batch (which is *much* faster)
var worker: Cancellable = _
private def debounce(): Unit = {
Option(worker).foreach(_.cancel())
import context.dispatcher
worker = system.scheduler.scheduleOnce(5 seconds, self, Process)
}
override def receive: Receive = {
case FileUpdate(fo, syms) =>
todo += fo -> syms
debounce()
case Process if todo.isEmpty => // nothing to do
case Process =>
val (batch, remaining) = todo.splitAt(500)
todo = remaining
log.debug(s"Indexing ${batch.size} classfiles")
// blocks the actor thread intentionally -- this is real work
// and the underlying I/O implementation is blocking. Give me an
// async SQL database and we can talk...
//
// UPDATE 2015-10-24: Slick no longer blocking. This can most likely be fixed now
// (however the author of the above comment imagined)
{
import searchService.workerEC // TODO: check the right EC is used here
// batch the deletion (big bottleneck)
Await.ready(
for {
_ <- searchService.delete(batch.keys.toList)
_ <- Future.sequence(
// opportunity to do more batching here
batch.collect {
case (file, syms) if syms.nonEmpty =>
searchService.persist(FileCheck(file), syms)
}
)
} yield (),
Duration.Inf
)
}
}
}
|
eddsteel/ensime
|
core/src/main/scala/org/ensime/indexer/SearchService.scala
|
Scala
|
gpl-3.0
| 10,698
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.businessactivities
import connectors.DataCacheConnector
import controllers.actions.SuccessfulAuthAction
import models.Country
import models.businessactivities._
import org.jsoup.Jsoup
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.PrivateMethodTester
import play.api.test.Helpers._
import uk.gov.hmrc.http.cache.client.CacheMap
import utils.{AmlsSpec, AuthorisedFixture, AutoCompleteServiceMocks}
import views.html.businessactivities.who_is_your_accountant_is_uk_address
import scala.concurrent.Future
class WhoIsYourAccountantIsUkControllerSpec extends AmlsSpec with PrivateMethodTester {
trait Fixture extends AuthorisedFixture with AutoCompleteServiceMocks{
self =>
val request = addToken(authRequest)
lazy val view = app.injector.instanceOf[who_is_your_accountant_is_uk_address]
val controller = new WhoIsYourAccountantIsUkController (
dataCacheConnector = mock[DataCacheConnector],
authAction = SuccessfulAuthAction,
autoCompleteService = mockAutoComplete,
ds = commonDependencies,
cc = mockMcc,
who_is_your_accountant_is_uk_address = view
)
}
val emptyCache = CacheMap("", Map.empty)
val mockCacheMap = mock[CacheMap]
"InvolvedInOtherController" when {
"get is called" must {
"show the who is your accountant page when there is no existing data" in new Fixture {
when(controller.dataCacheConnector.fetch[BusinessActivities](any(), any())(any(), any()))
.thenReturn(Future.successful(None))
val result = controller.get()(request)
status(result) must be(OK)
val page = Jsoup.parse(contentAsString(result))
page.getElementById("isUK-true").hasAttr("checked") must be(false)
page.getElementById("isUK-false").hasAttr("checked") must be(false)
}
"show the who is your accountant page when there is existing data" in new Fixture {
when(controller.dataCacheConnector.fetch[BusinessActivities](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(BusinessActivities(
whoIsYourAccountant = Some(WhoIsYourAccountant(
Some(WhoIsYourAccountantName("testname", Some("testtradingName"))),
Some(WhoIsYourAccountantIsUk(false)),
Some(NonUkAccountantsAddress("line1","line2",Some("line3"),Some("line4"), Country("Albania", "AL")))
))
))))
val result = controller.get()(request)
status(result) must be(OK)
val page = Jsoup.parse(contentAsString(result))
page.getElementById("isUK-true").hasAttr("checked") must be(false)
page.getElementById("isUK-false").hasAttr("checked") must be(true)
}
}
"post is called" when {
"given invalid data" must {
"respond with BAD_REQUEST" in new Fixture {
when(controller.dataCacheConnector.fetch[BusinessActivities](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(BusinessActivities(
whoIsYourAccountant = Some(WhoIsYourAccountant(
Some(WhoIsYourAccountantName("testname", Some("testtradingName"))),
None,
None
))
))))
val result = controller.post()(request)
status(result) must be(BAD_REQUEST)
}
}
"edit is true" must {
"respond with SEE_OTHER and redirect to the SummaryController" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"isUK" -> "true"
)
when(controller.dataCacheConnector.fetch[BusinessActivities](any(), any())(any(), any()))
.thenReturn(Future.successful(None))
when(controller.dataCacheConnector.save[BusinessActivities](any(), any(), any())(any(), any()))
.thenReturn(Future.successful(emptyCache))
val result = controller.post(true)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(routes.WhoIsYourAccountantUkAddressController.get(true).url))
}
}
"edit is false" must {
"respond with SEE_OTHER and redirect to the WhoIsYourAccountantUkAddressController" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"isUK" -> "true"
)
when(controller.dataCacheConnector.fetch[BusinessActivities](any(), any())(any(), any()))
.thenReturn(Future.successful(None))
when(controller.dataCacheConnector.save[BusinessActivities](any(), any(), any())(any(), any()))
.thenReturn(Future.successful(emptyCache))
val result = controller.post(false)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(routes.WhoIsYourAccountantUkAddressController.get().url))
}
"respond with SEE_OTHER and redirect to the WhoIsYourAccountantNonUkAddressController" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"isUK" -> "false"
)
when(controller.dataCacheConnector.fetch[BusinessActivities](any(), any())(any(), any()))
.thenReturn(Future.successful(None))
when(controller.dataCacheConnector.save[BusinessActivities](any(), any(), any())(any(), any()))
.thenReturn(Future.successful(emptyCache))
val result = controller.post(false)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(routes.WhoIsYourAccountantNonUkAddressController.get().url))
}
}
}
}
}
|
hmrc/amls-frontend
|
test/controllers/businessactivities/WhoIsYourAccountantIsUkControllerSpec.scala
|
Scala
|
apache-2.0
| 6,265
|
package at.forsyte.apalache.tla.typecheck.passes
import at.forsyte.apalache.tla.imp.src.SourceStore
import at.forsyte.apalache.tla.typecheck.{TlaType1, TypeCheckerListener}
import at.forsyte.apalache.tla.typecheck.etc.{EtcRef, ExactRef}
import com.typesafe.scalalogging.LazyLogging
class LoggingTypeCheckerListener(sourceStore: SourceStore) extends TypeCheckerListener with LazyLogging {
/**
* This method is called when the type checker finds the type of an expression.
*
* @param sourceRef a reference to the source expression; this reference must be exact
* @param monotype its monotype
*/
override def onTypeFound(sourceRef: ExactRef, monotype: TlaType1): Unit = {
}
/**
* This method is called when the type checker finds a type error.
*
* @param sourceRef a reference to the source expression; this one does not have to be exact
* @param message the error description
*/
override def onTypeError(sourceRef: EtcRef, message: String): Unit = {
val locStr = sourceStore.find(sourceRef.tlaId).map(_.toString).getOrElse("Unknown location")
logger.error("[%s]: %s".format(locStr, message))
}
}
|
konnov/apalache
|
tla-types/src/main/scala/at/forsyte/apalache/tla/typecheck/passes/LoggingTypeCheckerListener.scala
|
Scala
|
apache-2.0
| 1,160
|
package org.jetbrains.plugins.scala.codeInsight
import com.intellij.codeInsight.editorActions.moveLeftRight.MoveElementLeftRightHandler
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScArgumentExprList
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameterClause
/**
* @author Nikolay.Tropin
*/
class ScalaMoveLeftRightHandler extends MoveElementLeftRightHandler {
override def getMovableSubElements(element: PsiElement): Array[PsiElement] = {
element match {
case argList: ScArgumentExprList =>
argList.exprs.toArray
case paramClause: ScParameterClause =>
paramClause.parameters.toArray
case _ =>
Array.empty
}
}
}
|
katejim/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInsight/ScalaMoveLeftRightHandler.scala
|
Scala
|
apache-2.0
| 740
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s
import java.util.concurrent.TimeUnit
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.{PYSPARK_DRIVER_PYTHON, PYSPARK_PYTHON}
import org.apache.spark.internal.config.ConfigBuilder
private[spark] object Config extends Logging {
val DECOMMISSION_SCRIPT =
ConfigBuilder("spark.kubernetes.decommission.script")
.doc("The location of the script to use for graceful decommissioning")
.version("3.2.0")
.stringConf
.createWithDefault("/opt/decom.sh")
val KUBERNETES_CONTEXT =
ConfigBuilder("spark.kubernetes.context")
.doc("The desired context from your K8S config file used to configure the K8S " +
"client for interacting with the cluster. Useful if your config file has " +
"multiple clusters or user identities defined. The client library used " +
"locates the config file via the KUBECONFIG environment variable or by defaulting " +
"to .kube/config under your home directory. If not specified then your current " +
"context is used. You can always override specific aspects of the config file " +
"provided configuration using other Spark on K8S configuration options.")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_MASTER_URL =
ConfigBuilder("spark.kubernetes.driver.master")
.doc("The internal Kubernetes master (API server) address " +
"to be used for driver to request executors.")
.version("3.0.0")
.stringConf
.createWithDefault(KUBERNETES_MASTER_INTERNAL_URL)
val KUBERNETES_DRIVER_SERVICE_DELETE_ON_TERMINATION =
ConfigBuilder("spark.kubernetes.driver.service.deleteOnTermination")
.doc("If true, driver service will be deleted on Spark application termination. " +
"If false, it will be cleaned up when the driver pod is deletion.")
.version("3.2.0")
.booleanConf
.createWithDefault(true)
val KUBERNETES_DRIVER_OWN_PVC =
ConfigBuilder("spark.kubernetes.driver.ownPersistentVolumeClaim")
.doc("If true, driver pod becomes the owner of on-demand persistent volume claims " +
"instead of the executor pods")
.version("3.2.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_DRIVER_REUSE_PVC =
ConfigBuilder("spark.kubernetes.driver.reusePersistentVolumeClaim")
.doc("If true, driver pod tries to reuse driver-owned on-demand persistent volume claims " +
"of the deleted executor pods if exists. This can be useful to reduce executor pod " +
"creation delay by skipping persistent volume creations. Note that a pod in " +
"`Terminating` pod status is not a deleted pod by definition and its resources " +
"including persistent volume claims are not reusable yet. Spark will create new " +
"persistent volume claims when there exists no reusable one. In other words, the total " +
"number of persistent volume claims can be larger than the number of running executors " +
s"sometimes. This config requires ${KUBERNETES_DRIVER_OWN_PVC.key}=true.")
.version("3.2.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_NAMESPACE =
ConfigBuilder("spark.kubernetes.namespace")
.doc("The namespace that will be used for running the driver and executor pods.")
.version("2.3.0")
.stringConf
.createWithDefault("default")
val CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.container.image")
.doc("Container image to use for Spark containers. Individual container types " +
"(e.g. driver or executor) can also be configured to use different images if desired, " +
"by setting the container type-specific image name.")
.version("2.3.0")
.stringConf
.createOptional
val DRIVER_CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.driver.container.image")
.doc("Container image to use for the driver.")
.version("2.3.0")
.fallbackConf(CONTAINER_IMAGE)
val EXECUTOR_CONTAINER_IMAGE =
ConfigBuilder("spark.kubernetes.executor.container.image")
.doc("Container image to use for the executors.")
.version("2.3.0")
.fallbackConf(CONTAINER_IMAGE)
val CONTAINER_IMAGE_PULL_POLICY =
ConfigBuilder("spark.kubernetes.container.image.pullPolicy")
.doc("Kubernetes image pull policy. Valid values are Always, Never, and IfNotPresent.")
.version("2.3.0")
.stringConf
.checkValues(Set("Always", "Never", "IfNotPresent"))
.createWithDefault("IfNotPresent")
val IMAGE_PULL_SECRETS =
ConfigBuilder("spark.kubernetes.container.image.pullSecrets")
.doc("Comma separated list of the Kubernetes secrets used " +
"to access private image registries.")
.version("2.4.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val CONFIG_MAP_MAXSIZE =
ConfigBuilder("spark.kubernetes.configMap.maxSize")
.doc("Max size limit for a config map. This is configurable as per" +
" https://etcd.io/docs/v3.4.0/dev-guide/limit/ on k8s server end.")
.version("3.1.0")
.longConf
.createWithDefault(1572864) // 1.5 MiB
val KUBERNETES_AUTH_DRIVER_CONF_PREFIX = "spark.kubernetes.authenticate.driver"
val KUBERNETES_AUTH_EXECUTOR_CONF_PREFIX = "spark.kubernetes.authenticate.executor"
val KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX = "spark.kubernetes.authenticate.driver.mounted"
val KUBERNETES_AUTH_CLIENT_MODE_PREFIX = "spark.kubernetes.authenticate"
val OAUTH_TOKEN_CONF_SUFFIX = "oauthToken"
val OAUTH_TOKEN_FILE_CONF_SUFFIX = "oauthTokenFile"
val CLIENT_KEY_FILE_CONF_SUFFIX = "clientKeyFile"
val CLIENT_CERT_FILE_CONF_SUFFIX = "clientCertFile"
val CA_CERT_FILE_CONF_SUFFIX = "caCertFile"
val SUBMISSION_CLIENT_REQUEST_TIMEOUT =
ConfigBuilder("spark.kubernetes.submission.requestTimeout")
.doc("request timeout to be used in milliseconds for starting the driver")
.version("3.0.0")
.intConf
.createWithDefault(10000)
val SUBMISSION_CLIENT_CONNECTION_TIMEOUT =
ConfigBuilder("spark.kubernetes.submission.connectionTimeout")
.doc("connection timeout to be used in milliseconds for starting the driver")
.version("3.0.0")
.intConf
.createWithDefault(10000)
val DRIVER_CLIENT_REQUEST_TIMEOUT =
ConfigBuilder("spark.kubernetes.driver.requestTimeout")
.doc("request timeout to be used in milliseconds for driver to request executors")
.version("3.0.0")
.intConf
.createWithDefault(10000)
val DRIVER_CLIENT_CONNECTION_TIMEOUT =
ConfigBuilder("spark.kubernetes.driver.connectionTimeout")
.doc("connection timeout to be used in milliseconds for driver to request executors")
.version("3.0.0")
.intConf
.createWithDefault(10000)
val KUBERNETES_DRIVER_SERVICE_ACCOUNT_NAME =
ConfigBuilder(s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.serviceAccountName")
.doc("Service account that is used when running the driver pod. The driver pod uses " +
"this service account when requesting executor pods from the API server. If specific " +
"credentials are given for the driver pod to use, the driver will favor " +
"using those credentials instead.")
.version("2.3.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_SERVICE_ACCOUNT_NAME =
ConfigBuilder(s"$KUBERNETES_AUTH_EXECUTOR_CONF_PREFIX.serviceAccountName")
.doc("Service account that is used when running the executor pod." +
"If this parameter is not setup, the fallback logic will use the driver's service account.")
.version("3.1.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_LIMIT_CORES =
ConfigBuilder("spark.kubernetes.driver.limit.cores")
.doc("Specify the hard cpu limit for the driver pod")
.version("2.3.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_REQUEST_CORES =
ConfigBuilder("spark.kubernetes.driver.request.cores")
.doc("Specify the cpu request for the driver pod")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_SUBMIT_CHECK =
ConfigBuilder("spark.kubernetes.submitInDriver")
.internal()
.version("2.4.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_EXECUTOR_LIMIT_CORES =
ConfigBuilder("spark.kubernetes.executor.limit.cores")
.doc("Specify the hard cpu limit for each executor pod")
.version("2.3.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_SCHEDULER_NAME =
ConfigBuilder("spark.kubernetes.executor.scheduler.name")
.doc("Specify the scheduler name for each executor pod")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_SCHEDULER_NAME =
ConfigBuilder("spark.kubernetes.driver.scheduler.name")
.doc("Specify the scheduler name for driver pod")
.version("3.3.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_REQUEST_CORES =
ConfigBuilder("spark.kubernetes.executor.request.cores")
.doc("Specify the cpu request for each executor pod")
.version("2.4.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_POD_NAME =
ConfigBuilder("spark.kubernetes.driver.pod.name")
.doc("Name of the driver pod.")
.version("2.3.0")
.stringConf
.createOptional
// For testing only.
val KUBERNETES_DRIVER_POD_NAME_PREFIX =
ConfigBuilder("spark.kubernetes.driver.resourceNamePrefix")
.internal()
.version("3.0.0")
.stringConf
.createOptional
// the definition of a label in DNS (RFC 1123).
private val dns1123LabelFmt = "[a-z0-9]([-a-z0-9]*[a-z0-9])?"
private val podConfValidator = (s"^$dns1123LabelFmt(\\\\.$dns1123LabelFmt)*$$").r.pattern
// The possible longest executor name would be "$prefix-exec-${Int.MaxValue}"
private def isValidExecutorPodNamePrefix(prefix: String): Boolean = {
// 6 is length of '-exec-'
val reservedLen = Int.MaxValue.toString.length + 6
val validLength = prefix.length + reservedLen <= KUBERNETES_DNSNAME_MAX_LENGTH
validLength && podConfValidator.matcher(prefix).matches()
}
val KUBERNETES_EXECUTOR_POD_NAME_PREFIX =
ConfigBuilder("spark.kubernetes.executor.podNamePrefix")
.doc("Prefix to use in front of the executor pod names. It must conform the rules defined " +
"by the Kubernetes <a href=\\"https://kubernetes.io/docs/concepts/overview/" +
"working-with-objects/names/#dns-label-names\\">DNS Label Names</a>. " +
"The prefix will be used to generate executor pod names in the form of " +
"<code>$podNamePrefix-exec-$id</code>, where the `id` is a positive int value, " +
"so the length of the `podNamePrefix` needs to be <= 47(= 63 - 10 - 6).")
.version("2.3.0")
.stringConf
.checkValue(isValidExecutorPodNamePrefix,
"must conform https://kubernetes.io/docs/concepts/overview/working-with-objects" +
"/names/#dns-label-names and the value length <= 47")
.createOptional
val KUBERNETES_EXECUTOR_DISABLE_CONFIGMAP =
ConfigBuilder("spark.kubernetes.executor.disableConfigMap")
.doc("If true, disable ConfigMap creation for executors.")
.version("3.2.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_DRIVER_POD_FEATURE_STEPS =
ConfigBuilder("spark.kubernetes.driver.pod.featureSteps")
.doc("Class names of an extra driver pod feature step implementing " +
"KubernetesFeatureConfigStep. This is a developer API. Comma separated. " +
"Runs after all of Spark internal feature steps.")
.version("3.2.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val KUBERNETES_EXECUTOR_POD_FEATURE_STEPS =
ConfigBuilder("spark.kubernetes.executor.pod.featureSteps")
.doc("Class name of an extra executor pod feature step implementing " +
"KubernetesFeatureConfigStep. This is a developer API. Comma separated. " +
"Runs after all of Spark internal feature steps.")
.version("3.2.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val KUBERNETES_EXECUTOR_DECOMMISSION_LABEL =
ConfigBuilder("spark.kubernetes.executor.decommmissionLabel")
.doc("Label to apply to a pod which is being decommissioned." +
" Designed for use with pod disruption budgets and similar mechanism" +
" such as pod-deletion-cost.")
.version("3.3.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_DECOMMISSION_LABEL_VALUE =
ConfigBuilder("spark.kubernetes.executor.decommmissionLabelValue")
.doc("Label value to apply to a pod which is being decommissioned." +
" Designed for use with pod disruption budgets and similar mechanism" +
" such as pod-deletion-cost.")
.version("3.3.0")
.stringConf
.createOptional
val KUBERNETES_ALLOCATION_PODS_ALLOCATOR =
ConfigBuilder("spark.kubernetes.allocation.pods.allocator")
.doc("Allocator to use for pods. Possible values are direct (the default) and statefulset " +
", or a full class name of a class implementing AbstractPodsAllocator. " +
"Future version may add Job or replicaset. This is a developer API and may change " +
"or be removed at anytime.")
.version("3.3.0")
.stringConf
.createWithDefault("direct")
val KUBERNETES_ALLOCATION_BATCH_SIZE =
ConfigBuilder("spark.kubernetes.allocation.batch.size")
.doc("Number of pods to launch at once in each round of executor allocation.")
.version("2.3.0")
.intConf
.checkValue(value => value > 0, "Allocation batch size should be a positive integer")
.createWithDefault(5)
val KUBERNETES_ALLOCATION_BATCH_DELAY =
ConfigBuilder("spark.kubernetes.allocation.batch.delay")
.doc("Time to wait between each round of executor allocation.")
.version("2.3.0")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(value => value > 0, "Allocation batch delay must be a positive time value.")
.createWithDefaultString("1s")
val KUBERNETES_ALLOCATION_DRIVER_READINESS_TIMEOUT =
ConfigBuilder("spark.kubernetes.allocation.driver.readinessTimeout")
.doc("Time to wait for driver pod to get ready before creating executor pods. This wait " +
"only happens on application start. If timeout happens, executor pods will still be " +
"created.")
.version("3.1.3")
.timeConf(TimeUnit.SECONDS)
.checkValue(value => value > 0, "Allocation driver readiness timeout must be a positive "
+ "time value.")
.createWithDefaultString("1s")
val KUBERNETES_ALLOCATION_EXECUTOR_TIMEOUT =
ConfigBuilder("spark.kubernetes.allocation.executor.timeout")
.doc("Time to wait before a newly created executor POD request, which does not reached " +
"the POD pending state yet, considered timedout and will be deleted.")
.version("3.1.0")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(value => value > 0, "Allocation executor timeout must be a positive time value.")
.createWithDefaultString("600s")
val KUBERNETES_EXECUTOR_LOST_REASON_CHECK_MAX_ATTEMPTS =
ConfigBuilder("spark.kubernetes.executor.lostCheck.maxAttempts")
.doc("Maximum number of attempts allowed for checking the reason of an executor loss " +
"before it is assumed that the executor failed.")
.version("2.3.0")
.intConf
.checkValue(value => value > 0, "Maximum attempts of checks of executor lost reason " +
"must be a positive integer")
.createWithDefault(10)
val WAIT_FOR_APP_COMPLETION =
ConfigBuilder("spark.kubernetes.submission.waitAppCompletion")
.doc("In cluster mode, whether to wait for the application to finish before exiting the " +
"launcher process.")
.version("2.3.0")
.booleanConf
.createWithDefault(true)
val REPORT_INTERVAL =
ConfigBuilder("spark.kubernetes.report.interval")
.doc("Interval between reports of the current app status in cluster mode.")
.version("2.3.0")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(interval => interval > 0, s"Logging interval must be a positive time value.")
.createWithDefaultString("1s")
val KUBERNETES_EXECUTOR_API_POLLING_INTERVAL =
ConfigBuilder("spark.kubernetes.executor.apiPollingInterval")
.doc("Interval between polls against the Kubernetes API server to inspect the " +
"state of executors.")
.version("2.4.0")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(interval => interval > 0, s"API server polling interval must be a" +
" positive time value.")
.createWithDefaultString("30s")
val KUBERNETES_EXECUTOR_API_POLLING_WITH_RESOURCE_VERSION =
ConfigBuilder("spark.kubernetes.executor.enablePollingWithResourceVersion")
.doc("If true, `resourceVersion` is set with `0` during invoking pod listing APIs " +
"in order to allow API Server-side caching. This should be used carefully.")
.version("3.3.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_EXECUTOR_EVENT_PROCESSING_INTERVAL =
ConfigBuilder("spark.kubernetes.executor.eventProcessingInterval")
.doc("Interval between successive inspection of executor events sent from the" +
" Kubernetes API.")
.version("2.4.0")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(interval => interval > 0, s"Event processing interval must be a positive" +
" time value.")
.createWithDefaultString("1s")
val MEMORY_OVERHEAD_FACTOR =
ConfigBuilder("spark.kubernetes.memoryOverheadFactor")
.doc("This sets the Memory Overhead Factor that will allocate memory to non-JVM jobs " +
"which in the case of JVM tasks will default to 0.10 and 0.40 for non-JVM jobs")
.version("2.4.0")
.doubleConf
.checkValue(mem_overhead => mem_overhead >= 0 && mem_overhead < 1,
"Ensure that memory overhead is a double between 0 --> 1.0")
.createWithDefault(0.1)
val PYSPARK_MAJOR_PYTHON_VERSION =
ConfigBuilder("spark.kubernetes.pyspark.pythonVersion")
.doc(
s"(Deprecated since Spark 3.1, please set '${PYSPARK_PYTHON.key}' and " +
s"'${PYSPARK_DRIVER_PYTHON.key}' configurations or $ENV_PYSPARK_PYTHON and " +
s"$ENV_PYSPARK_DRIVER_PYTHON environment variables instead.)")
.version("2.4.0")
.stringConf
.checkValue("3" == _,
"Python 2 was dropped from Spark 3.1, and only 3 is allowed in " +
"this configuration. Note that this configuration was deprecated in Spark 3.1. " +
s"Please set '${PYSPARK_PYTHON.key}' and '${PYSPARK_DRIVER_PYTHON.key}' " +
s"configurations or $ENV_PYSPARK_PYTHON and $ENV_PYSPARK_DRIVER_PYTHON environment " +
"variables instead.")
.createOptional
val KUBERNETES_KERBEROS_KRB5_FILE =
ConfigBuilder("spark.kubernetes.kerberos.krb5.path")
.doc("Specify the local location of the krb5.conf file to be mounted on the driver " +
"and executors for Kerberos. Note: The KDC defined needs to be " +
"visible from inside the containers ")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_KERBEROS_KRB5_CONFIG_MAP =
ConfigBuilder("spark.kubernetes.kerberos.krb5.configMapName")
.doc("Specify the name of the ConfigMap, containing the krb5.conf file, to be mounted " +
"on the driver and executors for Kerberos. Note: The KDC defined" +
"needs to be visible from inside the containers ")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_HADOOP_CONF_CONFIG_MAP =
ConfigBuilder("spark.kubernetes.hadoop.configMapName")
.doc("Specify the name of the ConfigMap, containing the HADOOP_CONF_DIR files, " +
"to be mounted on the driver and executors for custom Hadoop configuration.")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_KERBEROS_DT_SECRET_NAME =
ConfigBuilder("spark.kubernetes.kerberos.tokenSecret.name")
.doc("Specify the name of the secret where your existing delegation tokens are stored. " +
"This removes the need for the job user to provide any keytab for launching a job")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_KERBEROS_DT_SECRET_ITEM_KEY =
ConfigBuilder("spark.kubernetes.kerberos.tokenSecret.itemKey")
.doc("Specify the item key of the data where your existing delegation tokens are stored. " +
"This removes the need for the job user to provide any keytab for launching a job")
.version("3.0.0")
.stringConf
.createOptional
val APP_RESOURCE_TYPE =
ConfigBuilder("spark.kubernetes.resource.type")
.internal()
.doc("This sets the resource type internally")
.version("2.4.1")
.stringConf
.checkValues(Set(APP_RESOURCE_TYPE_JAVA, APP_RESOURCE_TYPE_PYTHON, APP_RESOURCE_TYPE_R))
.createOptional
val KUBERNETES_LOCAL_DIRS_TMPFS =
ConfigBuilder("spark.kubernetes.local.dirs.tmpfs")
.doc("If set to true then emptyDir volumes created to back SPARK_LOCAL_DIRS will have " +
"their medium set to Memory so that they will be created as tmpfs (i.e. RAM) backed " +
"volumes. This may improve performance but scratch space usage will count towards " +
"your pods memory limit so you may wish to request more memory.")
.version("3.0.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_DRIVER_PODTEMPLATE_FILE =
ConfigBuilder("spark.kubernetes.driver.podTemplateFile")
.doc("File containing a template pod spec for the driver")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_PODTEMPLATE_FILE =
ConfigBuilder("spark.kubernetes.executor.podTemplateFile")
.doc("File containing a template pod spec for executors")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_DRIVER_PODTEMPLATE_CONTAINER_NAME =
ConfigBuilder("spark.kubernetes.driver.podTemplateContainerName")
.doc("container name to be used as a basis for the driver in the given pod template")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_PODTEMPLATE_CONTAINER_NAME =
ConfigBuilder("spark.kubernetes.executor.podTemplateContainerName")
.doc("container name to be used as a basis for executors in the given pod template")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_AUTH_SUBMISSION_CONF_PREFIX =
"spark.kubernetes.authenticate.submission"
val KUBERNETES_TRUST_CERTIFICATES =
ConfigBuilder("spark.kubernetes.trust.certificates")
.doc("If set to true then client can submit to kubernetes cluster only with token")
.version("3.2.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_NODE_SELECTOR_PREFIX = "spark.kubernetes.node.selector."
val KUBERNETES_DRIVER_NODE_SELECTOR_PREFIX = "spark.kubernetes.driver.node.selector."
val KUBERNETES_EXECUTOR_NODE_SELECTOR_PREFIX = "spark.kubernetes.executor.node.selector."
val KUBERNETES_DELETE_EXECUTORS =
ConfigBuilder("spark.kubernetes.executor.deleteOnTermination")
.doc("If set to false then executor pods will not be deleted in case " +
"of failure or normal termination.")
.version("3.0.0")
.booleanConf
.createWithDefault(true)
val KUBERNETES_DYN_ALLOC_KILL_GRACE_PERIOD =
ConfigBuilder("spark.kubernetes.dynamicAllocation.deleteGracePeriod")
.doc("How long to wait for executors to shut down gracefully before a forceful kill.")
.version("3.0.0")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefaultString("5s")
val KUBERNETES_SUBMIT_GRACE_PERIOD =
ConfigBuilder("spark.kubernetes.appKillPodDeletionGracePeriod")
.doc("Time to wait for graceful deletion of Spark pods when spark-submit" +
" is used for killing an application.")
.version("3.0.0")
.timeConf(TimeUnit.SECONDS)
.createOptional
val KUBERNETES_FILE_UPLOAD_PATH =
ConfigBuilder("spark.kubernetes.file.upload.path")
.doc("Hadoop compatible file system path where files from the local file system " +
"will be uploaded to in cluster mode.")
.version("3.0.0")
.stringConf
.createOptional
val KUBERNETES_EXECUTOR_CHECK_ALL_CONTAINERS =
ConfigBuilder("spark.kubernetes.executor.checkAllContainers")
.doc("If set to true, all containers in the executor pod will be checked when reporting" +
"executor status.")
.version("3.1.0")
.booleanConf
.createWithDefault(false)
val KUBERNETES_EXECUTOR_MISSING_POD_DETECT_DELTA =
ConfigBuilder("spark.kubernetes.executor.missingPodDetectDelta")
.doc("When a registered executor's POD is missing from the Kubernetes API server's polled " +
"list of PODs then this delta time is taken as the accepted time difference between the " +
"registration time and the time of the polling. After this time the POD is considered " +
"missing from the cluster and the executor will be removed.")
.version("3.1.1")
.timeConf(TimeUnit.MILLISECONDS)
.checkValue(delay => delay > 0, "delay must be a positive time value")
.createWithDefaultString("30s")
val KUBERNETES_MAX_PENDING_PODS =
ConfigBuilder("spark.kubernetes.allocation.maxPendingPods")
.doc("Maximum number of pending PODs allowed during executor allocation for this " +
"application. Those newly requested executors which are unknown by Kubernetes yet are " +
"also counted into this limit as they will change into pending PODs by time. " +
"This limit is independent from the resource profiles as it limits the sum of all " +
"allocation for all the used resource profiles.")
.version("3.2.0")
.intConf
.checkValue(value => value > 0, "Maximum number of pending pods should be a positive integer")
.createWithDefault(Int.MaxValue)
val KUBERNETES_DRIVER_LABEL_PREFIX = "spark.kubernetes.driver.label."
val KUBERNETES_DRIVER_ANNOTATION_PREFIX = "spark.kubernetes.driver.annotation."
val KUBERNETES_DRIVER_SERVICE_ANNOTATION_PREFIX = "spark.kubernetes.driver.service.annotation."
val KUBERNETES_DRIVER_SECRETS_PREFIX = "spark.kubernetes.driver.secrets."
val KUBERNETES_DRIVER_SECRET_KEY_REF_PREFIX = "spark.kubernetes.driver.secretKeyRef."
val KUBERNETES_DRIVER_VOLUMES_PREFIX = "spark.kubernetes.driver.volumes."
val KUBERNETES_EXECUTOR_LABEL_PREFIX = "spark.kubernetes.executor.label."
val KUBERNETES_EXECUTOR_ANNOTATION_PREFIX = "spark.kubernetes.executor.annotation."
val KUBERNETES_EXECUTOR_SECRETS_PREFIX = "spark.kubernetes.executor.secrets."
val KUBERNETES_EXECUTOR_SECRET_KEY_REF_PREFIX = "spark.kubernetes.executor.secretKeyRef."
val KUBERNETES_EXECUTOR_VOLUMES_PREFIX = "spark.kubernetes.executor.volumes."
val KUBERNETES_VOLUMES_HOSTPATH_TYPE = "hostPath"
val KUBERNETES_VOLUMES_PVC_TYPE = "persistentVolumeClaim"
val KUBERNETES_VOLUMES_EMPTYDIR_TYPE = "emptyDir"
val KUBERNETES_VOLUMES_NFS_TYPE = "nfs"
val KUBERNETES_VOLUMES_MOUNT_PATH_KEY = "mount.path"
val KUBERNETES_VOLUMES_MOUNT_SUBPATH_KEY = "mount.subPath"
val KUBERNETES_VOLUMES_MOUNT_READONLY_KEY = "mount.readOnly"
val KUBERNETES_VOLUMES_OPTIONS_PATH_KEY = "options.path"
val KUBERNETES_VOLUMES_OPTIONS_CLAIM_NAME_KEY = "options.claimName"
val KUBERNETES_VOLUMES_OPTIONS_CLAIM_STORAGE_CLASS_KEY = "options.storageClass"
val KUBERNETES_VOLUMES_OPTIONS_MEDIUM_KEY = "options.medium"
val KUBERNETES_VOLUMES_OPTIONS_SIZE_LIMIT_KEY = "options.sizeLimit"
val KUBERNETES_VOLUMES_OPTIONS_SERVER_KEY = "options.server"
val KUBERNETES_DRIVER_ENV_PREFIX = "spark.kubernetes.driverEnv."
val KUBERNETES_DNSNAME_MAX_LENGTH = 63
}
|
nchammas/spark
|
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
|
Scala
|
apache-2.0
| 28,977
|
package gitbucket.core.service
import gitbucket.core.model._
import gitbucket.core.model.Profile._
import org.specs2.mutable.Specification
class PullRequestServiceSpec extends Specification with ServiceSpecBase with PullRequestService with IssuesService {
def swap(r: (Issue, PullRequest)) = (r._2 -> r._1)
"PullRequestService.getPullRequestFromBranch" should {
"""
|return pull request if exists pull request from `branch` to `defaultBranch` and not closed.
|return pull request if exists pull request from `branch` to othre branch and not closed.
|return None if all pull request is closed""".stripMargin.trim in { withTestDB { implicit se =>
generateNewUserWithDBRepository("user1", "repo1")
generateNewUserWithDBRepository("user1", "repo2")
generateNewUserWithDBRepository("user2", "repo1")
generateNewPullRequest("user1/repo1/master", "user1/repo1/head2") // not target branch
generateNewPullRequest("user1/repo1/head1", "user1/repo1/master") // not target branch ( swap from, to )
generateNewPullRequest("user1/repo1/master", "user2/repo1/head1") // othre user
generateNewPullRequest("user1/repo1/master", "user1/repo2/head1") // othre repository
val r1 = swap(generateNewPullRequest("user1/repo1/master2", "user1/repo1/head1"))
val r2 = swap(generateNewPullRequest("user1/repo1/master", "user1/repo1/head1"))
val r3 = swap(generateNewPullRequest("user1/repo1/master4", "user1/repo1/head1"))
getPullRequestFromBranch("user1", "repo1", "head1", "master") must_== Some(r2)
updateClosed("user1", "repo1", r2._1.issueId, true)
getPullRequestFromBranch("user1", "repo1", "head1", "master").get must beOneOf(r1, r2)
updateClosed("user1", "repo1", r1._1.issueId, true)
updateClosed("user1", "repo1", r3._1.issueId, true)
getPullRequestFromBranch("user1", "repo1", "head1", "master") must beNone
} }
}
}
|
doron123/gitbucket
|
src/test/scala/gitbucket/core/service/PullRequestServiceSpec.scala
|
Scala
|
apache-2.0
| 1,926
|
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qsu
import slamdata.Predef.{Map => SMap, _}
import quasar.contrib.iota._
import quasar.contrib.matryoshka.safe
import quasar.fp._
import quasar.qscript.{
construction,
Center,
Hole,
LeftSide,
LeftSide3,
RightSide,
RightSide3
}
import quasar.qscript.RecFreeS._
import quasar.qsu.{QScriptUniform => QSU}
import cats.{Monoid, MonoidK}
import cats.instances.map._
import matryoshka.BirecursiveT
import matryoshka.data.free._
import matryoshka.patterns.CoEnv
import scalaz.syntax.bind._
import scalaz.syntax.equal._
import shims.monoidToScalaz
/** Coalesces adjacent mappable regions of a single root. */
final class CoalesceUnaryMappable[T[_[_]]: BirecursiveT] private () extends QSUTTypes[T] {
import QSUGraph.Extractors._
import MappableRegion.MaximalUnary
private val mf = construction.Func[T]
private implicit val vertsMonoid: Monoid[QSUVerts[T]] = MonoidK[SMap[Symbol, ?]].algebra
def apply(graph: QSUGraph): QSUGraph = {
val coalesced = graph rewrite {
case g @ MaximalUnary(src, fm) if g.root =/= src.root =>
g.overwriteAtRoot(QScriptUniform.Map(src.root, fm.asRec))
case g @ AutoJoin2(left, right, combine) =>
val nodes = mapNodes(List(left, right))
if (nodes.isEmpty)
g
else {
val (l, lf) = nodes.getOrElse(0, (left.root, mf.Hole))
val (r, rf) = nodes.getOrElse(1, (right.root, mf.Hole))
val cmb = combine flatMap {
case LeftSide => lf >> mf.LeftSide
case RightSide => rf >> mf.RightSide
}
g.overwriteAtRoot(QSU.AutoJoin2(l, r, cmb))
}
case g @ AutoJoin3(left, center, right, combine) =>
val nodes = mapNodes(List(left, center, right))
if (nodes.isEmpty)
g
else {
val (l, lf) = nodes.getOrElse(0, (left.root, mf.Hole))
val (c, cf) = nodes.getOrElse(1, (center.root, mf.Hole))
val (r, rf) = nodes.getOrElse(2, (right.root, mf.Hole))
val cmb = combine flatMap {
case LeftSide3 => lf >> mf.LeftSide3
case Center => cf >> mf.Center
case RightSide3 => rf >> mf.RightSide3
}
g.overwriteAtRoot(QSU.AutoJoin3(l, c, r, cmb))
}
}
// Make all coalesced FreeMaps strict, eliminating all lazy nodes that may refer to a
// QSUGraph due to how `MaximumUnary` works. This is necessary to prevent loops in
// QSUGraph#reverseIndex when we attempt to compute `Free#hashCode`.
val strictVerts = coalesced foldMapDown { g =>
g.vertices(g.root) match {
case QScriptUniform.Map(src, fm) =>
val strictFm =
safe.transCata(fm.linearize)((ft: CoEnv[Hole, MapFunc, FreeMap]) => ft)
SMap(g.root -> QScriptUniform.Map(src, strictFm.asRec))
case other => SMap(g.root -> other)
}
}
QSUGraph(coalesced.root, strictVerts)
}
def mapNodes(gs: List[QSUGraph]): SMap[Int, (Symbol, FreeMap)] =
gs.zipWithIndex.foldLeft(SMap[Int, (Symbol, FreeMap)]()) {
case (acc, (Map(s, fm), i)) => acc.updated(i, (s.root, fm.linearize))
case (acc, _) => acc
}
}
object CoalesceUnaryMappable {
def apply[T[_[_]]: BirecursiveT](graph: QSUGraph[T]): QSUGraph[T] =
(new CoalesceUnaryMappable[T]).apply(graph)
}
|
quasar-analytics/quasar
|
qsu/src/main/scala/quasar/qsu/CoalesceUnaryMappable.scala
|
Scala
|
apache-2.0
| 3,920
|
package com.twitter.finatra.httpclient.test
import com.twitter.finagle.Service
import com.twitter.finagle.http.Method._
import com.twitter.finagle.http.{Method, Request, Response}
import com.twitter.finatra.utils.Resettable
import com.twitter.inject.app.Banner
import com.twitter.inject.{Injector, Logging}
import com.twitter.util.Future
import java.lang.annotation.{Annotation => JavaAnnotation}
import scala.collection._
import scala.collection.mutable.ArrayBuffer
object InMemoryHttpService {
def fromInjector[Ann <: JavaAnnotation : Manifest](injector: Injector): InMemoryHttpService = {
injector.instance[Service[Request, Response], Ann].asInstanceOf[InMemoryHttpService]
}
}
class InMemoryHttpService
extends Service[Request, Response]
with Resettable
with Logging {
private val responseMap = mutable.Map[RequestKey, ArrayBuffer[ResponseWithExpectedBody]]().withDefaultValue(ArrayBuffer())
val recordedRequests = ArrayBuffer[Request]()
var overrideResponse: Option[Response] = None
/* Service Apply */
def apply(request: Request): Future[Response] = synchronized {
recordedRequests += request
Future {
overrideResponse getOrElse lookupResponse(request)
}
}
/* Mock Support */
def mockGet(path: String, andReturn: Response, sticky: Boolean = false) {
mock(Get, path, andReturn, sticky)
}
def mockPost(path: String, withBody: String = null, andReturn: Response, sticky: Boolean = false) {
mock(Post, path, andReturn, sticky, Option(withBody))
}
def mockPut(path: String, withBody: String = null, andReturn: Response, sticky: Boolean = false) {
mock(Put, path, andReturn, sticky)
}
def mock(method: Method, path: String, andReturn: Response, sticky: Boolean, withBody: Option[String] = None): Unit = {
val existing = responseMap(RequestKey(method, path))
val newEntry = ResponseWithExpectedBody(andReturn, withBody, sticky = sticky)
responseMap(
RequestKey(method, path)) = existing :+ newEntry
}
override def reset() {
responseMap.clear()
recordedRequests.clear()
overrideResponse = None
}
def printRequests() {
Banner.banner("Requests")
for (request <- recordedRequests) {
println(request + " " + request.contentString)
}
}
/* Private */
private def lookupResponse(request: Request): Response = {
val key = RequestKey(request.method, request.uri)
val existing = responseMap(key)
if (existing.isEmpty) {
throw new Exception(key + " not mocked in\\n" + responseMap.mkString("\\n"))
}
if (request.method != Method.Get && hasExpectedBodies(existing))
lookupPostResponseWithBody(request, existing)
else if (existing.head.sticky)
existing.head.response
else
existing.remove(0).response
}
private def hasExpectedBodies(existing: ArrayBuffer[ResponseWithExpectedBody]): Boolean = {
existing exists {_.expectedBody.isDefined}
}
private def lookupPostResponseWithBody(request: Request, existing: ArrayBuffer[ResponseWithExpectedBody]): Response = {
val found = existing find {_.expectedBody == Some(request.contentString)} getOrElse {
throw new PostRequestWithIncorrectBodyException(request + " with expected body not mocked")
}
if (!found.sticky) {
existing -= found
}
found.response
}
case class RequestKey(
method: Method,
path: String)
case class ResponseWithExpectedBody(
response: Response,
expectedBody: Option[String],
sticky: Boolean)
}
|
syamantm/finatra
|
httpclient/src/test/scala/com/twitter/finatra/httpclient/test/InMemoryHttpService.scala
|
Scala
|
apache-2.0
| 3,520
|
package akka.ainterface.remote
import akka.actor.{Actor, ActorLogging, ActorRef, Props, Terminated}
import akka.ainterface.local.LocalNode
import akka.ainterface.remote.RemoteHubProtocol.{Accepted, Tick}
import akka.ainterface.remote.handshake.HandshakeInitiator
import akka.ainterface.util.actor.DynamicSupervisorProtocol.ChildRef
import akka.ainterface.util.actor.{ActorPathUtil, DynamicSupervisorProtocol}
import akka.ainterface.util.collection.BiMap
import akka.ainterface.{ControlMessage, NodeName, Target}
import akka.pattern.ask
import akka.util.Timeout
import scala.collection.immutable.Queue
import scala.concurrent.duration._
import scala.util.{Failure, Success}
/**
* A hub of remote nodes.
* Currently, all messages to remote nodes pass over this actor.
*/
private final class RemoteHub(localNode: LocalNode,
auth: Auth,
nodeStatusEventBus: RemoteNodeStatusEventBus,
nodeSupSup: ActorRef,
initiatorSupervisor: ActorRef,
tcpClient: ActorRef,
epmdClient: ActorRef) extends Actor with ActorLogging {
private[remote] var nodes: BiMap[NodeName, ActorRef] = BiMap.empty
private[remote] var pendings: Map[NodeName, (Option[ActorRef], Queue[ControlMessage])] = Map.empty
implicit private[this] val timeout = Timeout(10.seconds)
private[this] def initiatorProps(remoteNodeName: NodeName): Props = {
HandshakeInitiator.props(
localNode.nodeName,
remoteNodeName,
auth.getCookie(remoteNodeName),
tcpClient,
epmdClient,
isHidden
)
}
private[this] def startNode(nodeName: NodeName): Unit = {
val suffix = ActorPathUtil.orUUID(nodeName.asString)
val name = s"remote-node-supervisor-$suffix"
val props = RemoteNodeSupervisor.props(localNode, nodeName, nodeStatusEventBus)
nodeSupSup ! DynamicSupervisorProtocol.StartChild(self, props, name, nodeName)
}
override def receive: Receive = {
case RemoteHubProtocol.Send(event) =>
val target = event.target match {
case Target.Pid(pid) => NodeName(pid.nodeName)
case Target.Name(_, host) => host
}
nodes.get(target) match {
case Some(node) => node ! RemoteNodeProtocol.Send(event)
case None =>
pendings.get(target) match {
case Some((acceptor, buffer)) =>
pendings = pendings.updated(target, (acceptor, buffer.enqueue(event)))
case None =>
startNode(target)
pendings = pendings.updated(target, (None, Queue(event)))
}
}
case Accepted(nodeName, acceptor) =>
nodes.get(nodeName) match {
case Some(node) => node ! RemoteNodeProtocol.Accepted(acceptor)
case None =>
pendings.get(nodeName) match {
case Some((None, buffer)) =>
pendings = pendings.updated(nodeName, (Some(acceptor), buffer))
case Some((Some(old), buffer)) =>
log.debug("Accepted multiple connections from the same node. {}", nodeName)
context.stop(old)
pendings = pendings.updated(nodeName, (Some(acceptor), buffer))
case None =>
startNode(nodeName)
pendings = pendings.updated(nodeName, (Some(acceptor), Queue.empty))
}
}
case DynamicSupervisorProtocol.ChildRef(node, Some(id: NodeName)) =>
pendings.get(id) match {
case None => log.error("Received an unexpected child. {}", id)
case Some((acceptor, buffer)) =>
context.watch(node)
buffer.foreach { message =>
node ! RemoteNodeProtocol.Send(message)
}
acceptor match {
case Some(a) => node ! RemoteNodeProtocol.Accepted(a)
case None =>
import context.dispatcher
val start = DynamicSupervisorProtocol.StartChild(initiatorProps(id))
(initiatorSupervisor ? start).mapTo[ChildRef].onComplete {
case Success(ChildRef(initiator, _)) =>
node ! RemoteNodeProtocol.Initiate(initiator)
case Failure(_) => context.stop(node)
}
}
pendings = pendings - id
nodes = nodes + (id -> node)
}
case Tick =>
nodes.values.foreach(_ ! RemoteNodeProtocol.Tick)
case Terminated(ref) =>
nodes = (nodes.inverse - ref).inverse
}
}
private[remote] object RemoteHub {
def props(localNode: LocalNode,
auth: Auth,
nodeStatusEventBus: RemoteNodeStatusEventBus,
nodeSupSup: ActorRef,
initiatorSupervisor: ActorRef,
tcpClient: ActorRef,
epmdClient: ActorRef): Props = {
Props(
classOf[RemoteHub],
localNode,
auth,
nodeStatusEventBus,
nodeSupSup,
initiatorSupervisor,
tcpClient,
epmdClient
)
}
}
private[ainterface] object RemoteHubProtocol {
private[remote] final case class Accepted(name: NodeName, handshakeAcceptor: ActorRef)
private[remote] case object Tick
final case class Send(event: ControlMessage)
}
|
ainterface/ainterface
|
ainterface/src/main/scala/akka/ainterface/remote/RemoteHub.scala
|
Scala
|
apache-2.0
| 5,216
|
import scala.concurrent.duration._
class DurationTest {
//Should be triggering DurationInt implicit class inside duration package object
val timeout: FiniteDuration = /*start*/1 millis span/*end*///This is valid, millis and span highlighted in red.
}
//DurationConversions.spanConvert.R
|
ilinum/intellij-scala
|
testdata/typeInference/bugs5/SCL4938.scala
|
Scala
|
apache-2.0
| 291
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import java.text.SimpleDateFormat
import java.util.Locale
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.unsafe.types.CalendarInterval
class DateFunctionsSuite extends QueryTest with SharedSQLContext {
import testImplicits._
test("function current_date") {
val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())
val d1 = DateTimeUtils.fromJavaDate(df1.select(current_date()).collect().head.getDate(0))
val d2 = DateTimeUtils.fromJavaDate(
sql("""SELECT CURRENT_DATE()""").collect().head.getDate(0))
val d3 = DateTimeUtils.millisToDays(System.currentTimeMillis())
assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1)
}
test("function current_timestamp and now") {
val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
checkAnswer(df1.select(countDistinct(current_timestamp())), Row(1))
// Execution in one query should return the same value
checkAnswer(sql("""SELECT CURRENT_TIMESTAMP() = CURRENT_TIMESTAMP()"""), Row(true))
// Current timestamp should return the current timestamp ...
val before = System.currentTimeMillis
val got = sql("SELECT CURRENT_TIMESTAMP()").collect().head.getTimestamp(0).getTime
val after = System.currentTimeMillis
assert(got >= before && got <= after)
// Now alias
checkAnswer(sql("""SELECT CURRENT_TIMESTAMP() = NOW()"""), Row(true))
}
val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
val sdfDate = new SimpleDateFormat("yyyy-MM-dd", Locale.US)
val d = new Date(sdf.parse("2015-04-08 13:10:15").getTime)
val ts = new Timestamp(sdf.parse("2013-04-08 13:10:15").getTime)
test("timestamp comparison with date strings") {
val df = Seq(
(1, Timestamp.valueOf("2015-01-01 00:00:00")),
(2, Timestamp.valueOf("2014-01-01 00:00:00"))).toDF("i", "t")
checkAnswer(
df.select("t").filter($"t" <= "2014-06-01"),
Row(Timestamp.valueOf("2014-01-01 00:00:00")) :: Nil)
checkAnswer(
df.select("t").filter($"t" >= "2014-06-01"),
Row(Timestamp.valueOf("2015-01-01 00:00:00")) :: Nil)
}
test("date comparison with date strings") {
val df = Seq(
(1, Date.valueOf("2015-01-01")),
(2, Date.valueOf("2014-01-01"))).toDF("i", "t")
checkAnswer(
df.select("t").filter($"t" <= "2014-06-01"),
Row(Date.valueOf("2014-01-01")) :: Nil)
checkAnswer(
df.select("t").filter($"t" >= "2015"),
Row(Date.valueOf("2015-01-01")) :: Nil)
}
test("date format") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(date_format($"a", "y"), date_format($"b", "y"), date_format($"c", "y")),
Row("2015", "2015", "2013"))
checkAnswer(
df.selectExpr("date_format(a, 'y')", "date_format(b, 'y')", "date_format(c, 'y')"),
Row("2015", "2015", "2013"))
}
test("year") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(year($"a"), year($"b"), year($"c")),
Row(2015, 2015, 2013))
checkAnswer(
df.selectExpr("year(a)", "year(b)", "year(c)"),
Row(2015, 2015, 2013))
}
test("quarter") {
val ts = new Timestamp(sdf.parse("2013-11-08 13:10:15").getTime)
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(quarter($"a"), quarter($"b"), quarter($"c")),
Row(2, 2, 4))
checkAnswer(
df.selectExpr("quarter(a)", "quarter(b)", "quarter(c)"),
Row(2, 2, 4))
}
test("month") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(month($"a"), month($"b"), month($"c")),
Row(4, 4, 4))
checkAnswer(
df.selectExpr("month(a)", "month(b)", "month(c)"),
Row(4, 4, 4))
}
test("dayofmonth") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(dayofmonth($"a"), dayofmonth($"b"), dayofmonth($"c")),
Row(8, 8, 8))
checkAnswer(
df.selectExpr("day(a)", "day(b)", "dayofmonth(c)"),
Row(8, 8, 8))
}
test("dayofyear") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(dayofyear($"a"), dayofyear($"b"), dayofyear($"c")),
Row(98, 98, 98))
checkAnswer(
df.selectExpr("dayofyear(a)", "dayofyear(b)", "dayofyear(c)"),
Row(98, 98, 98))
}
test("hour") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(hour($"a"), hour($"b"), hour($"c")),
Row(0, 13, 13))
checkAnswer(
df.selectExpr("hour(a)", "hour(b)", "hour(c)"),
Row(0, 13, 13))
}
test("minute") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(minute($"a"), minute($"b"), minute($"c")),
Row(0, 10, 10))
checkAnswer(
df.selectExpr("minute(a)", "minute(b)", "minute(c)"),
Row(0, 10, 10))
}
test("second") {
val df = Seq((d, sdf.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(second($"a"), second($"b"), second($"c")),
Row(0, 15, 15))
checkAnswer(
df.selectExpr("second(a)", "second(b)", "second(c)"),
Row(0, 15, 15))
}
test("weekofyear") {
val df = Seq((d, sdfDate.format(d), ts)).toDF("a", "b", "c")
checkAnswer(
df.select(weekofyear($"a"), weekofyear($"b"), weekofyear($"c")),
Row(15, 15, 15))
checkAnswer(
df.selectExpr("weekofyear(a)", "weekofyear(b)", "weekofyear(c)"),
Row(15, 15, 15))
}
test("function date_add") {
val st1 = "2015-06-01 12:34:56"
val st2 = "2015-06-02 12:34:56"
val t1 = Timestamp.valueOf(st1)
val t2 = Timestamp.valueOf(st2)
val s1 = "2015-06-01"
val s2 = "2015-06-02"
val d1 = Date.valueOf(s1)
val d2 = Date.valueOf(s2)
val df = Seq((t1, d1, s1, st1), (t2, d2, s2, st2)).toDF("t", "d", "s", "ss")
checkAnswer(
df.select(date_add(col("d"), 1)),
Seq(Row(Date.valueOf("2015-06-02")), Row(Date.valueOf("2015-06-03"))))
checkAnswer(
df.select(date_add(col("t"), 3)),
Seq(Row(Date.valueOf("2015-06-04")), Row(Date.valueOf("2015-06-05"))))
checkAnswer(
df.select(date_add(col("s"), 5)),
Seq(Row(Date.valueOf("2015-06-06")), Row(Date.valueOf("2015-06-07"))))
checkAnswer(
df.select(date_add(col("ss"), 7)),
Seq(Row(Date.valueOf("2015-06-08")), Row(Date.valueOf("2015-06-09"))))
checkAnswer(df.selectExpr("DATE_ADD(null, 1)"), Seq(Row(null), Row(null)))
checkAnswer(
df.selectExpr("""DATE_ADD(d, 1)"""),
Seq(Row(Date.valueOf("2015-06-02")), Row(Date.valueOf("2015-06-03"))))
}
test("function date_sub") {
val st1 = "2015-06-01 12:34:56"
val st2 = "2015-06-02 12:34:56"
val t1 = Timestamp.valueOf(st1)
val t2 = Timestamp.valueOf(st2)
val s1 = "2015-06-01"
val s2 = "2015-06-02"
val d1 = Date.valueOf(s1)
val d2 = Date.valueOf(s2)
val df = Seq((t1, d1, s1, st1), (t2, d2, s2, st2)).toDF("t", "d", "s", "ss")
checkAnswer(
df.select(date_sub(col("d"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(col("t"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(col("s"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(col("ss"), 1)),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
checkAnswer(
df.select(date_sub(lit(null), 1)).limit(1), Row(null))
checkAnswer(df.selectExpr("""DATE_SUB(d, null)"""), Seq(Row(null), Row(null)))
checkAnswer(
df.selectExpr("""DATE_SUB(d, 1)"""),
Seq(Row(Date.valueOf("2015-05-31")), Row(Date.valueOf("2015-06-01"))))
}
test("time_add") {
val t1 = Timestamp.valueOf("2015-07-31 23:59:59")
val t2 = Timestamp.valueOf("2015-12-31 00:00:00")
val d1 = Date.valueOf("2015-07-31")
val d2 = Date.valueOf("2015-12-31")
val i = new CalendarInterval(2, 2000000L)
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d + $i"),
Seq(Row(Date.valueOf("2015-09-30")), Row(Date.valueOf("2016-02-29"))))
checkAnswer(
df.selectExpr(s"t + $i"),
Seq(Row(Timestamp.valueOf("2015-10-01 00:00:01")),
Row(Timestamp.valueOf("2016-02-29 00:00:02"))))
}
test("time_sub") {
val t1 = Timestamp.valueOf("2015-10-01 00:00:01")
val t2 = Timestamp.valueOf("2016-02-29 00:00:02")
val d1 = Date.valueOf("2015-09-30")
val d2 = Date.valueOf("2016-02-29")
val i = new CalendarInterval(2, 2000000L)
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d - $i"),
Seq(Row(Date.valueOf("2015-07-30")), Row(Date.valueOf("2015-12-30"))))
checkAnswer(
df.selectExpr(s"t - $i"),
Seq(Row(Timestamp.valueOf("2015-07-31 23:59:59")),
Row(Timestamp.valueOf("2015-12-31 00:00:00"))))
}
test("function add_months") {
val d1 = Date.valueOf("2015-08-31")
val d2 = Date.valueOf("2015-02-28")
val df = Seq((1, d1), (2, d2)).toDF("n", "d")
checkAnswer(
df.select(add_months(col("d"), 1)),
Seq(Row(Date.valueOf("2015-09-30")), Row(Date.valueOf("2015-03-31"))))
checkAnswer(
df.selectExpr("add_months(d, -1)"),
Seq(Row(Date.valueOf("2015-07-31")), Row(Date.valueOf("2015-01-31"))))
}
test("function months_between") {
val d1 = Date.valueOf("2015-07-31")
val d2 = Date.valueOf("2015-02-16")
val t1 = Timestamp.valueOf("2014-09-30 23:30:00")
val t2 = Timestamp.valueOf("2015-09-16 12:00:00")
val s1 = "2014-09-15 11:30:00"
val s2 = "2015-10-01 00:00:00"
val df = Seq((t1, d1, s1), (t2, d2, s2)).toDF("t", "d", "s")
checkAnswer(df.select(months_between(col("t"), col("d"))), Seq(Row(-10.0), Row(7.0)))
checkAnswer(df.selectExpr("months_between(t, s)"), Seq(Row(0.5), Row(-0.5)))
checkAnswer(df.selectExpr("months_between(t, s, true)"), Seq(Row(0.5), Row(-0.5)))
Seq(true, false).foreach { roundOff =>
checkAnswer(df.select(months_between(col("t"), col("d"), roundOff)),
Seq(Row(-10.0), Row(7.0)))
checkAnswer(df.withColumn("r", lit(false)).selectExpr("months_between(t, s, r)"),
Seq(Row(0.5), Row(-0.5)))
}
}
test("function last_day") {
val df1 = Seq((1, "2015-07-23"), (2, "2015-07-24")).toDF("i", "d")
val df2 = Seq((1, "2015-07-23 00:11:22"), (2, "2015-07-24 11:22:33")).toDF("i", "t")
checkAnswer(
df1.select(last_day(col("d"))),
Seq(Row(Date.valueOf("2015-07-31")), Row(Date.valueOf("2015-07-31"))))
checkAnswer(
df2.select(last_day(col("t"))),
Seq(Row(Date.valueOf("2015-07-31")), Row(Date.valueOf("2015-07-31"))))
}
test("function next_day") {
val df1 = Seq(("mon", "2015-07-23"), ("tuesday", "2015-07-20")).toDF("dow", "d")
val df2 = Seq(("th", "2015-07-23 00:11:22"), ("xx", "2015-07-24 11:22:33")).toDF("dow", "t")
checkAnswer(
df1.select(next_day(col("d"), "MONDAY")),
Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-07-27"))))
checkAnswer(
df2.select(next_day(col("t"), "th")),
Seq(Row(Date.valueOf("2015-07-30")), Row(Date.valueOf("2015-07-30"))))
}
test("function to_date") {
val d1 = Date.valueOf("2015-07-22")
val d2 = Date.valueOf("2015-07-01")
val d3 = Date.valueOf("2014-12-31")
val t1 = Timestamp.valueOf("2015-07-22 10:00:00")
val t2 = Timestamp.valueOf("2014-12-31 23:59:59")
val t3 = Timestamp.valueOf("2014-12-31 23:59:59")
val s1 = "2015-07-22 10:00:00"
val s2 = "2014-12-31"
val s3 = "2014-31-12"
val df = Seq((d1, t1, s1), (d2, t2, s2), (d3, t3, s3)).toDF("d", "t", "s")
checkAnswer(
df.select(to_date(col("t"))),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("d"))),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2015-07-01")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("s"))),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")), Row(null)))
checkAnswer(
df.selectExpr("to_date(t)"),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.selectExpr("to_date(d)"),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2015-07-01")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.selectExpr("to_date(s)"),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")), Row(null)))
// now with format
checkAnswer(
df.select(to_date(col("t"), "yyyy-MM-dd")),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("d"), "yyyy-MM-dd")),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2015-07-01")),
Row(Date.valueOf("2014-12-31"))))
checkAnswer(
df.select(to_date(col("s"), "yyyy-MM-dd")),
Seq(Row(Date.valueOf("2015-07-22")), Row(Date.valueOf("2014-12-31")), Row(null)))
// now switch format
checkAnswer(
df.select(to_date(col("s"), "yyyy-dd-MM")),
Seq(Row(null), Row(null), Row(Date.valueOf("2014-12-31"))))
// invalid format
checkAnswer(
df.select(to_date(col("s"), "yyyy-hh-MM")),
Seq(Row(null), Row(null), Row(null)))
checkAnswer(
df.select(to_date(col("s"), "yyyy-dd-aa")),
Seq(Row(null), Row(null), Row(null)))
// february
val x1 = "2016-02-29"
val x2 = "2017-02-29"
val df1 = Seq(x1, x2).toDF("x")
checkAnswer(
df1.select(to_date(col("x"))), Row(Date.valueOf("2016-02-29")) :: Row(null) :: Nil)
}
test("function trunc") {
val df = Seq(
(1, Timestamp.valueOf("2015-07-22 10:00:00")),
(2, Timestamp.valueOf("2014-12-31 00:00:00"))).toDF("i", "t")
checkAnswer(
df.select(trunc(col("t"), "YY")),
Seq(Row(Date.valueOf("2015-01-01")), Row(Date.valueOf("2014-01-01"))))
checkAnswer(
df.selectExpr("trunc(t, 'Month')"),
Seq(Row(Date.valueOf("2015-07-01")), Row(Date.valueOf("2014-12-01"))))
}
test("function date_trunc") {
val df = Seq(
(1, Timestamp.valueOf("2015-07-22 10:01:40.523")),
(2, Timestamp.valueOf("2014-12-31 05:29:06.876"))).toDF("i", "t")
checkAnswer(
df.select(date_trunc("YY", col("t"))),
Seq(Row(Timestamp.valueOf("2015-01-01 00:00:00")),
Row(Timestamp.valueOf("2014-01-01 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('MONTH', t)"),
Seq(Row(Timestamp.valueOf("2015-07-01 00:00:00")),
Row(Timestamp.valueOf("2014-12-01 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('DAY', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 00:00:00")),
Row(Timestamp.valueOf("2014-12-31 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('HOUR', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:00:00")),
Row(Timestamp.valueOf("2014-12-31 05:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('MINUTE', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:01:00")),
Row(Timestamp.valueOf("2014-12-31 05:29:00"))))
checkAnswer(
df.selectExpr("date_trunc('SECOND', t)"),
Seq(Row(Timestamp.valueOf("2015-07-22 10:01:40")),
Row(Timestamp.valueOf("2014-12-31 05:29:06"))))
checkAnswer(
df.selectExpr("date_trunc('WEEK', t)"),
Seq(Row(Timestamp.valueOf("2015-07-20 00:00:00")),
Row(Timestamp.valueOf("2014-12-29 00:00:00"))))
checkAnswer(
df.selectExpr("date_trunc('QUARTER', t)"),
Seq(Row(Timestamp.valueOf("2015-07-01 00:00:00")),
Row(Timestamp.valueOf("2014-10-01 00:00:00"))))
}
test("from_unixtime") {
val sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
val fmt2 = "yyyy-MM-dd HH:mm:ss.SSS"
val sdf2 = new SimpleDateFormat(fmt2, Locale.US)
val fmt3 = "yy-MM-dd HH-mm-ss"
val sdf3 = new SimpleDateFormat(fmt3, Locale.US)
val df = Seq((1000, "yyyy-MM-dd HH:mm:ss.SSS"), (-1000, "yy-MM-dd HH-mm-ss")).toDF("a", "b")
checkAnswer(
df.select(from_unixtime(col("a"))),
Seq(Row(sdf1.format(new Timestamp(1000000))), Row(sdf1.format(new Timestamp(-1000000)))))
checkAnswer(
df.select(from_unixtime(col("a"), fmt2)),
Seq(Row(sdf2.format(new Timestamp(1000000))), Row(sdf2.format(new Timestamp(-1000000)))))
checkAnswer(
df.select(from_unixtime(col("a"), fmt3)),
Seq(Row(sdf3.format(new Timestamp(1000000))), Row(sdf3.format(new Timestamp(-1000000)))))
checkAnswer(
df.selectExpr("from_unixtime(a)"),
Seq(Row(sdf1.format(new Timestamp(1000000))), Row(sdf1.format(new Timestamp(-1000000)))))
checkAnswer(
df.selectExpr(s"from_unixtime(a, '$fmt2')"),
Seq(Row(sdf2.format(new Timestamp(1000000))), Row(sdf2.format(new Timestamp(-1000000)))))
checkAnswer(
df.selectExpr(s"from_unixtime(a, '$fmt3')"),
Seq(Row(sdf3.format(new Timestamp(1000000))), Row(sdf3.format(new Timestamp(-1000000)))))
}
test("unix_timestamp") {
val date1 = Date.valueOf("2015-07-24")
val date2 = Date.valueOf("2015-07-25")
val ts1 = Timestamp.valueOf("2015-07-24 10:00:00.3")
val ts2 = Timestamp.valueOf("2015-07-25 02:02:02.2")
val s1 = "2015/07/24 10:00:00.5"
val s2 = "2015/07/25 02:02:02.6"
val ss1 = "2015-07-24 10:00:00"
val ss2 = "2015-07-25 02:02:02"
val fmt = "yyyy/MM/dd HH:mm:ss.S"
val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
checkAnswer(df.select(unix_timestamp(col("ts"))), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.select(unix_timestamp(col("ss"))), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.select(unix_timestamp(col("d"), fmt)), Seq(
Row(date1.getTime / 1000L), Row(date2.getTime / 1000L)))
checkAnswer(df.select(unix_timestamp(col("s"), fmt)), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.selectExpr("unix_timestamp(ts)"), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.selectExpr("unix_timestamp(ss)"), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.selectExpr(s"unix_timestamp(d, '$fmt')"), Seq(
Row(date1.getTime / 1000L), Row(date2.getTime / 1000L)))
checkAnswer(df.selectExpr(s"unix_timestamp(s, '$fmt')"), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
val x1 = "2015-07-24 10:00:00"
val x2 = "2015-25-07 02:02:02"
val x3 = "2015-07-24 25:02:02"
val x4 = "2015-24-07 26:02:02"
val ts3 = Timestamp.valueOf("2015-07-24 02:25:02")
val ts4 = Timestamp.valueOf("2015-07-24 00:10:00")
val df1 = Seq(x1, x2, x3, x4).toDF("x")
checkAnswer(df1.select(unix_timestamp(col("x"))), Seq(
Row(ts1.getTime / 1000L), Row(null), Row(null), Row(null)))
checkAnswer(df1.selectExpr("unix_timestamp(x)"), Seq(
Row(ts1.getTime / 1000L), Row(null), Row(null), Row(null)))
checkAnswer(df1.select(unix_timestamp(col("x"), "yyyy-dd-MM HH:mm:ss")), Seq(
Row(null), Row(ts2.getTime / 1000L), Row(null), Row(null)))
checkAnswer(df1.selectExpr(s"unix_timestamp(x, 'yyyy-MM-dd mm:HH:ss')"), Seq(
Row(ts4.getTime / 1000L), Row(null), Row(ts3.getTime / 1000L), Row(null)))
// invalid format
checkAnswer(df1.selectExpr(s"unix_timestamp(x, 'yyyy-MM-dd aa:HH:ss')"), Seq(
Row(null), Row(null), Row(null), Row(null)))
// february
val y1 = "2016-02-29"
val y2 = "2017-02-29"
val ts5 = Timestamp.valueOf("2016-02-29 00:00:00")
val df2 = Seq(y1, y2).toDF("y")
checkAnswer(df2.select(unix_timestamp(col("y"), "yyyy-MM-dd")), Seq(
Row(ts5.getTime / 1000L), Row(null)))
val now = sql("select unix_timestamp()").collect().head.getLong(0)
checkAnswer(sql(s"select cast ($now as timestamp)"), Row(new java.util.Date(now * 1000)))
}
test("to_unix_timestamp") {
val date1 = Date.valueOf("2015-07-24")
val date2 = Date.valueOf("2015-07-25")
val ts1 = Timestamp.valueOf("2015-07-24 10:00:00.3")
val ts2 = Timestamp.valueOf("2015-07-25 02:02:02.2")
val s1 = "2015/07/24 10:00:00.5"
val s2 = "2015/07/25 02:02:02.6"
val ss1 = "2015-07-24 10:00:00"
val ss2 = "2015-07-25 02:02:02"
val fmt = "yyyy/MM/dd HH:mm:ss.S"
val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
checkAnswer(df.selectExpr("to_unix_timestamp(ts)"), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.selectExpr("to_unix_timestamp(ss)"), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
checkAnswer(df.selectExpr(s"to_unix_timestamp(d, '$fmt')"), Seq(
Row(date1.getTime / 1000L), Row(date2.getTime / 1000L)))
checkAnswer(df.selectExpr(s"to_unix_timestamp(s, '$fmt')"), Seq(
Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L)))
val x1 = "2015-07-24 10:00:00"
val x2 = "2015-25-07 02:02:02"
val x3 = "2015-07-24 25:02:02"
val x4 = "2015-24-07 26:02:02"
val ts3 = Timestamp.valueOf("2015-07-24 02:25:02")
val ts4 = Timestamp.valueOf("2015-07-24 00:10:00")
val df1 = Seq(x1, x2, x3, x4).toDF("x")
checkAnswer(df1.selectExpr("to_unix_timestamp(x)"), Seq(
Row(ts1.getTime / 1000L), Row(null), Row(null), Row(null)))
checkAnswer(df1.selectExpr(s"to_unix_timestamp(x, 'yyyy-MM-dd mm:HH:ss')"), Seq(
Row(ts4.getTime / 1000L), Row(null), Row(ts3.getTime / 1000L), Row(null)))
// february
val y1 = "2016-02-29"
val y2 = "2017-02-29"
val ts5 = Timestamp.valueOf("2016-02-29 00:00:00")
val df2 = Seq(y1, y2).toDF("y")
checkAnswer(df2.select(unix_timestamp(col("y"), "yyyy-MM-dd")), Seq(
Row(ts5.getTime / 1000L), Row(null)))
// invalid format
checkAnswer(df1.selectExpr(s"to_unix_timestamp(x, 'yyyy-MM-dd bb:HH:ss')"), Seq(
Row(null), Row(null), Row(null), Row(null)))
}
test("to_timestamp") {
val date1 = Date.valueOf("2015-07-24")
val date2 = Date.valueOf("2015-07-25")
val ts_date1 = Timestamp.valueOf("2015-07-24 00:00:00")
val ts_date2 = Timestamp.valueOf("2015-07-25 00:00:00")
val ts1 = Timestamp.valueOf("2015-07-24 10:00:00")
val ts2 = Timestamp.valueOf("2015-07-25 02:02:02")
val s1 = "2015/07/24 10:00:00.5"
val s2 = "2015/07/25 02:02:02.6"
val ss1 = "2015-07-24 10:00:00"
val ss2 = "2015-07-25 02:02:02"
val fmt = "yyyy/MM/dd HH:mm:ss.S"
val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
checkAnswer(df.select(to_timestamp(col("ss"))),
df.select(unix_timestamp(col("ss")).cast("timestamp")))
checkAnswer(df.select(to_timestamp(col("ss"))), Seq(
Row(ts1), Row(ts2)))
checkAnswer(df.select(to_timestamp(col("s"), fmt)), Seq(
Row(ts1), Row(ts2)))
checkAnswer(df.select(to_timestamp(col("ts"), fmt)), Seq(
Row(ts1), Row(ts2)))
checkAnswer(df.select(to_timestamp(col("d"), "yyyy-MM-dd")), Seq(
Row(ts_date1), Row(ts_date2)))
}
test("datediff") {
val df = Seq(
(Date.valueOf("2015-07-24"), Timestamp.valueOf("2015-07-24 01:00:00"),
"2015-07-23", "2015-07-23 03:00:00"),
(Date.valueOf("2015-07-25"), Timestamp.valueOf("2015-07-25 02:00:00"),
"2015-07-24", "2015-07-24 04:00:00")
).toDF("a", "b", "c", "d")
checkAnswer(df.select(datediff(col("a"), col("b"))), Seq(Row(0), Row(0)))
checkAnswer(df.select(datediff(col("a"), col("c"))), Seq(Row(1), Row(1)))
checkAnswer(df.select(datediff(col("d"), col("b"))), Seq(Row(-1), Row(-1)))
checkAnswer(df.selectExpr("datediff(a, d)"), Seq(Row(1), Row(1)))
}
test("from_utc_timestamp with literal zone") {
val df = Seq(
(Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00"),
(Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00")
).toDF("a", "b")
checkAnswer(
df.select(from_utc_timestamp(col("a"), "PST")),
Seq(
Row(Timestamp.valueOf("2015-07-23 17:00:00")),
Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
checkAnswer(
df.select(from_utc_timestamp(col("b"), "PST")),
Seq(
Row(Timestamp.valueOf("2015-07-23 17:00:00")),
Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
}
test("from_utc_timestamp with column zone") {
val df = Seq(
(Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00", "CET"),
(Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00", "PST")
).toDF("a", "b", "c")
checkAnswer(
df.select(from_utc_timestamp(col("a"), col("c"))),
Seq(
Row(Timestamp.valueOf("2015-07-24 02:00:00")),
Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
checkAnswer(
df.select(from_utc_timestamp(col("b"), col("c"))),
Seq(
Row(Timestamp.valueOf("2015-07-24 02:00:00")),
Row(Timestamp.valueOf("2015-07-24 17:00:00"))))
}
test("to_utc_timestamp with literal zone") {
val df = Seq(
(Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00"),
(Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00")
).toDF("a", "b")
checkAnswer(
df.select(to_utc_timestamp(col("a"), "PST")),
Seq(
Row(Timestamp.valueOf("2015-07-24 07:00:00")),
Row(Timestamp.valueOf("2015-07-25 07:00:00"))))
checkAnswer(
df.select(to_utc_timestamp(col("b"), "PST")),
Seq(
Row(Timestamp.valueOf("2015-07-24 07:00:00")),
Row(Timestamp.valueOf("2015-07-25 07:00:00"))))
}
test("to_utc_timestamp with column zone") {
val df = Seq(
(Timestamp.valueOf("2015-07-24 00:00:00"), "2015-07-24 00:00:00", "PST"),
(Timestamp.valueOf("2015-07-25 00:00:00"), "2015-07-25 00:00:00", "CET")
).toDF("a", "b", "c")
checkAnswer(
df.select(to_utc_timestamp(col("a"), col("c"))),
Seq(
Row(Timestamp.valueOf("2015-07-24 07:00:00")),
Row(Timestamp.valueOf("2015-07-24 22:00:00"))))
checkAnswer(
df.select(to_utc_timestamp(col("b"), col("c"))),
Seq(
Row(Timestamp.valueOf("2015-07-24 07:00:00")),
Row(Timestamp.valueOf("2015-07-24 22:00:00"))))
}
test("SPARK-23715: to/from_utc_timestamp can retain the previous behavior") {
withSQLConf(SQLConf.REJECT_TIMEZONE_IN_STRING.key -> "false") {
checkAnswer(
sql("SELECT from_utc_timestamp('2000-10-10 00:00:00+00:00', 'GMT+1')"),
Row(Timestamp.valueOf("2000-10-09 18:00:00")))
}
}
}
|
tejasapatil/spark
|
sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
|
Scala
|
apache-2.0
| 28,003
|
package org.apache.spark.sql.types
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.unsafe.types.UTF8String
import ch.ninecode.model.BasicElement
import ch.ninecode.model.Element
/**
* User-defined type for [[ch.ninecode.model.Element]].
* HACK:
* This needs to be defined within package org.apache.spark.sql.types
* because UserDefinedType became private with Spark 2.0.x
* it will be reintroduced with Spark 2.1.x.
*
* @see https://issues.apache.org/jira/browse/SPARK-13326
*/
class ElementUDT extends UserDefinedType[Element]
{
// The following type and it's serialization took a lot of trial and error.
// This is what didn't work for a data type for sup:
// this
// - leads to infinite recursion and stack overflow
// new StructType ()
// - results in illegal index (1) because something doesn't use member count
// NullType
// - works at the cluster level (only uses serialization),
// but a second "smarter" deserialization at the client always returns null in the GenericInternalRow
// stub
// - where stub is an instance of another class ElementUDT_stub extends UserDefinedType[Element]
// results in the same serialization/deserialization issues as above, but one level away
// StructType (StructField ("sup", StringType, true) :: Nil)
// - works except for hive-thriftserver
// where it doesn't handle user defined types (UDT)
// see addNonNullColumnValue in https://github.com/apache/spark/blob/master/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala
// results in scala.MatchError: ch.ninecode.model.ElementUDT@7c008354 (of class ch.ninecode.model.ElementUDT)
override def sqlType: DataType = NullType
override def pyUDT: String = "org.apache.spark.sql.types.ElementUDT"
override def serialize (obj: Element): Any =
{
val output = new Array[Any](1)
output(0) = UTF8String.fromString(obj.id.toString)
val r = new GenericInternalRow(output)
r
}
override def deserialize (datum: Any): Element =
{
if (null == datum)
BasicElement(null, "")
else
datum match
{
case _: UnsafeRow => // only GenericInternalRow and InternalRow are used, kept for reference
BasicElement(null, "")
case _: GenericInternalRow =>
BasicElement(null, "")
case _: InternalRow =>
BasicElement(null, "")
case _: Any =>
BasicElement(null, "")
}
}
override def userClass: Class[Element] = classOf[Element]
override def equals (o: Any): Boolean =
{
o match
{
case _: ElementUDT => true
case _ => false
}
}
override def hashCode (): Int = classOf[ElementUDT].getName.hashCode()
override def typeName: String = "element"
override def asNullable: ElementUDT = this
}
object ElementRegistration
{
def register (): Unit =
{
if (!UDTRegistration.exists(classOf[Element].getName))
UDTRegistration.register(classOf[Element].getName, classOf[ElementUDT].getName)
}
}
|
derrickoswald/CIMScala
|
CIMReader/src/main/scala/org/apache/spark/sql/types/ElementUDT.scala
|
Scala
|
mit
| 3,447
|
package archery
import scala.collection.mutable.{ArrayBuffer, PriorityQueue}
import scala.math.{min, max}
import scala.util.Try
object RTree {
/**
* Construct an empty RTree.
*/
def empty[A]: RTree[A] = new RTree(Node.empty[A], 0)
/**
* Construct an RTree from a sequence of entries.
*/
def apply[A](entries: Entry[A]*): RTree[A] =
entries.foldLeft(RTree.empty[A])(_ insert _)
}
/**
* This is the magnificent RTree, which makes searching ad-hoc
* geographic data fast and fun.
*
* The RTree wraps a node called 'root' that is the actual root of the
* tree structure. RTree also keeps track of the total size of the
* tree (something that individual nodes don't do).
*/
case class RTree[A](root: Node[A], size: Int) {
/**
* Typesafe equality test.
*
* In order to be considered equal, two trees must have the same
* number of entries, and each entry found in one should be found in
* the other.
*/
def ===(that: RTree[A]): Boolean =
size == that.size && entries.forall(that.contains)
/**
* Universal equality test.
*
* Trees can only be equal to other trees. Unlike some other
* containers, the trees must be parameterized on the same type, or
* else the comparison will fail.
*
* This means comparing an RTree[Int] and an RTree[BigInt] will
* always return false.
*/
override def equals(that: Any): Boolean =
that match {
case rt: RTree[_] =>
Try(this === rt.asInstanceOf[RTree[A]]).getOrElse(false)
case _ =>
false
}
/**
* Universal hash code method.
*/
override def hashCode(): Int = {
var x = 0xbadd0995
val it = entries
while (it.hasNext) x ^= (it.next.hashCode * 777 + 1)
x
}
/**
* Insert a value into the tree at (x, y), returning a new tree.
*/
def insert(x: Float, y: Float, value: A): RTree[A] =
insert(Entry(Point(x, y), value))
/**
* Insert an entry into the tree, returning a new tree.
*/
def insert(entry: Entry[A]): RTree[A] = {
val r = root.insert(entry) match {
case Left(rs) => Branch(rs, rs.foldLeft(Box.empty)(_ expand _.box))
case Right(r) => r
}
RTree(r, size + 1)
}
/**
* Insert entries into the tree, returning a new tree.
*/
def insertAll(entries: Iterable[Entry[A]]): RTree[A] =
entries.foldLeft(this)(_ insert _)
/**
* Remove an entry from the tree, returning a new tree.
*
* If the entry was not present, the tree is simply returned.
*/
def remove(entry: Entry[A]): RTree[A] =
root.remove(entry) match {
case None =>
this
case Some((es, None)) =>
es.foldLeft(RTree.empty[A])(_ insert _)
case Some((es, Some(node))) =>
es.foldLeft(RTree(node, size - es.size - 1))(_ insert _)
}
/**
* Remove entries from the tree, returning a new tree.
*/
def removeAll(entries: Iterable[Entry[A]]): RTree[A] =
entries.foldLeft(this)(_ remove _)
/**
* Return a sequence of all entries found in the given search space.
*/
def search(space: Box): Seq[Entry[A]] =
root.search(space, _ => true)
/**
* Return a sequence of all entries found in the given search space.
*/
def search(space: Box, f: Entry[A] => Boolean): Seq[Entry[A]] =
root.search(space, f)
/**
* Return a sequence of all entries intersecting the given search space.
*/
def searchIntersection(space: Box): Seq[Entry[A]] =
root.searchIntersection(space, _ => true)
/**
* Return a sequence of all entries intersecting the given search space.
*/
def searchIntersection(space: Box, f: Entry[A] => Boolean): Seq[Entry[A]] =
root.searchIntersection(space, f)
/**
* Construct a result an initial value, the entries found in a
* search space, and a binary function `f`.
*
* rtree.foldSearch(space, init)(f)
*
* is equivalent to (but more efficient than):
*
* rtree.search(space).foldLeft(init)(f)
*/
def foldSearch[B](space: Box, init: B)(f: (B, Entry[A]) => B): B =
root.foldSearch(space, init)(f)
/**
* Return a sequence of all entries found in the given search space.
*/
def nearest(pt: Point): Option[Entry[A]] =
root.nearest(pt, Float.PositiveInfinity).map(_._2)
/**
* Return a sequence of all entries found in the given search space.
*/
def nearestK(pt: Point, k: Int): IndexedSeq[Entry[A]] =
if (k < 1) {
Vector.empty
} else {
implicit val ord = Ordering.by[(Double, Entry[A]), Double](_._1)
val pq = PriorityQueue.empty[(Double, Entry[A])]
root.nearestK(pt, k, Double.PositiveInfinity, pq)
val arr = new Array[Entry[A]](pq.size)
var i = arr.length - 1
while (i >= 0) {
val (_, e) = pq.dequeue
arr(i) = e
i -= 1
}
arr
}
/**
* Return a count of all entries found in the given search space.
*/
def count(space: Box): Int =
root.count(space)
/**
* Return whether or not the value exists in the tree at (x, y).
*/
def contains(x: Float, y: Float, value: A): Boolean =
root.contains(Entry(Point(x, y), value))
/**
* Return whether or not the given entry exists in the tree.
*/
def contains(entry: Entry[A]): Boolean =
root.contains(entry)
/**
* Map the entry values from A to B.
*/
def map[B](f: A => B): RTree[B] =
RTree(root.map(f), size)
/**
* Return an iterator over all entries in the tree.
*/
def entries: Iterator[Entry[A]] =
root.iterator
/**
* Return an iterator over all values in the tree.
*/
def values: Iterator[A] =
entries.map(_.value)
/**
* Return a nice depiction of the tree.
*
* This method should only be called on small-ish trees! It will
* print one line for every branch, leaf, and entry, so for a tree
* with thousands of entries this will result in a very large
* string!
*/
def pretty: String = root.pretty
override def toString: String =
s"RTree(<$size entries>)"
}
|
arunma/archery
|
core/src/main/scala/archery/RTree.scala
|
Scala
|
mit
| 6,005
|
package net.hearthstats.ui
import net.hearthstats.core.HeroClass
import net.hearthstats.core.HearthstoneMatch
trait HearthstatsPresenter {
def setOpponentClass(heroClass: HeroClass): Unit
def setYourClass(heroClass: HeroClass): Unit
def setOpponentName(n: String): Unit
def setCoin(coin: Boolean): Unit
def matchSubmitted(m: HearthstoneMatch, description: String): Unit
}
|
HearthStats/HearthStats.net-Uploader
|
companion/src/main/scala/net/hearthstats/ui/HearthstatsPresenter.scala
|
Scala
|
bsd-3-clause
| 383
|
package com.twitter.finagle.thrift
import com.twitter.finagle.Service
import com.twitter.util.{Return, Throw, Promise, Time}
import org.apache.thrift.protocol.{TBinaryProtocol, TMessage, TMessageType}
import org.apache.thrift.transport.TMemoryBuffer
import org.junit.runner.RunWith
import org.mockito.{Matchers, ArgumentCaptor}
import org.mockito.Mockito.{verify, when}
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class SeqIdFilterTest extends FunSuite with MockitoSugar with OneInstancePerTest {
val protocolFactory = new TBinaryProtocol.Factory()
def mkmsg(tmsg: TMessage, strictWrite: Boolean) = {
val trans = new TMemoryBuffer(24)
val oprot = (new TBinaryProtocol.Factory(false, strictWrite)).getProtocol(trans)
oprot.writeMessageBegin(tmsg)
oprot.writeMessageEnd()
trans.getArray()
}
def getmsg(buf: Array[Byte]) =
new InputBuffer(buf, protocolFactory)().readMessageBegin
for (seqId <- Seq(0, 1, -1, 123, -123, Int.MaxValue, Int.MinValue)) {
testFilter("strict(%d)".format(seqId), seqId, mkmsg(_, true))
testFilter("nonstrict(%s)".format(seqId), seqId, mkmsg(_, false))
}
def testFilter(how: String, seqId: Int, mkmsg: TMessage => Array[Byte]) {
val service = mock[Service[ThriftClientRequest, Array[Byte]]]
val p = new Promise[Array[Byte]]
when(service(Matchers.any[ThriftClientRequest])).thenReturn(p)
val filter = new SeqIdFilter
val filtered = filter andThen service
test("SeqIdFilter(%s) maintain seqids passed in by the client".format(how)) {
val f = filtered(new ThriftClientRequest(mkmsg(new TMessage("proc", TMessageType.CALL, seqId)), false))
assert(f.poll == None)
val req = ArgumentCaptor.forClass(classOf[ThriftClientRequest])
verify(service).apply(req.capture)
p.setValue(mkmsg(new TMessage("proc", TMessageType.REPLY, getmsg(req.getValue.message).seqid)))
f.poll match {
case Some(Return(buf)) => assert(getmsg(buf).seqid == seqId)
case _ => fail()
}
}
test("SeqIdFilter(%s) use its own seqids to the server".format(how)) {Time.withCurrentTimeFrozen { _ =>
val filtered = new SeqIdFilter andThen service
val expected = (new scala.util.Random(Time.now.inMilliseconds)).nextInt()
val f = filtered(new ThriftClientRequest(mkmsg(new TMessage("proc", TMessageType.CALL, seqId)), false))
val req = ArgumentCaptor.forClass(classOf[ThriftClientRequest])
verify(service).apply(req.capture)
assert(getmsg(req.getValue.message).seqid == expected)
}}
test("SeqIdFilter(%s) fail when sequence ids are out of order".format(how)) { Time.withCurrentTimeFrozen { _ =>
val filtered = new SeqIdFilter andThen service
val expected = (new scala.util.Random(Time.now.inMilliseconds)).nextInt()
val f = filtered(new ThriftClientRequest(mkmsg(new TMessage("proc", TMessageType.CALL, seqId)), false))
p.setValue(mkmsg(new TMessage("proc", TMessageType.REPLY, 1111)))
assert(f.poll match {
case Some(Throw(SeqMismatchException(1111, expected))) => true
case _ => false
})
}}
def mustExcept(bytes: Array[Byte], exceptionMsg: String) {
filtered(new ThriftClientRequest(bytes, false)).poll match {
case Some(Throw(exc: IllegalArgumentException)) => assert(exc.getMessage == exceptionMsg)
case _ => fail()
}
}
test("SeqIdFilter(%s) must not modify the underlying request buffer".format(how)) {
val reqBuf = mkmsg(new TMessage("proc", TMessageType.CALL, 0))
val origBuf = reqBuf.clone()
filtered(new ThriftClientRequest(reqBuf, false))
verify(service).apply(Matchers.any[ThriftClientRequest])
assert(reqBuf.toSeq == origBuf.toSeq)
}
test("SeqIdFilter(%s) handle empty TMessage".format(how)) {
mustExcept(Array(), "short header")
}
test("SeqIdFilter(%s) handle short name size".format(how)) {
mustExcept(Array(-128, 1, 0, 0, 0, 0, 0), "short name size")
}
test("SeqIdFilter(%s) handle old short buffer".format(how)) {
mustExcept(Array(0, 0, 0, 1, 0, 0, 0, 0, 1), "short buffer")
}
}
}
|
koshelev/finagle
|
finagle-thrift/src/test/scala/com/twitter/finagle/thrift/SeqIdFilterTest.scala
|
Scala
|
apache-2.0
| 4,262
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.image
import java.awt.Color
import java.awt.color.ColorSpace
import java.io.ByteArrayInputStream
import javax.imageio.ImageIO
import scala.collection.JavaConverters._
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.input.PortableDataStream
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.sql.types._
/**
* :: Experimental ::
* Defines the image schema and methods to read and manipulate images.
*/
@Experimental
@Since("2.3.0")
object ImageSchema {
val undefinedImageType = "Undefined"
/**
* (Scala-specific) OpenCV type mapping supported
*/
val ocvTypes: Map[String, Int] = Map(
undefinedImageType -> -1,
"CV_8U" -> 0, "CV_8UC1" -> 0, "CV_8UC3" -> 16, "CV_8UC4" -> 24
)
/**
* (Java-specific) OpenCV type mapping supported
*/
val javaOcvTypes: java.util.Map[String, Int] = ocvTypes.asJava
/**
* Schema for the image column: Row(String, Int, Int, Int, Int, Array[Byte])
*/
val columnSchema = StructType(
StructField("origin", StringType, true) ::
StructField("height", IntegerType, false) ::
StructField("width", IntegerType, false) ::
StructField("nChannels", IntegerType, false) ::
// OpenCV-compatible type: CV_8UC3 in most cases
StructField("mode", IntegerType, false) ::
// Bytes in OpenCV-compatible order: row-wise BGR in most cases
StructField("data", BinaryType, false) :: Nil)
val imageFields: Array[String] = columnSchema.fieldNames
/**
* DataFrame with a single column of images named "image" (nullable)
*/
val imageSchema = StructType(StructField("image", columnSchema, true) :: Nil)
/**
* Gets the origin of the image
*
* @return The origin of the image
*/
def getOrigin(row: Row): String = row.getString(0)
/**
* Gets the height of the image
*
* @return The height of the image
*/
def getHeight(row: Row): Int = row.getInt(1)
/**
* Gets the width of the image
*
* @return The width of the image
*/
def getWidth(row: Row): Int = row.getInt(2)
/**
* Gets the number of channels in the image
*
* @return The number of channels in the image
*/
def getNChannels(row: Row): Int = row.getInt(3)
/**
* Gets the OpenCV representation as an int
*
* @return The OpenCV representation as an int
*/
def getMode(row: Row): Int = row.getInt(4)
/**
* Gets the image data
*
* @return The image data
*/
def getData(row: Row): Array[Byte] = row.getAs[Array[Byte]](5)
/**
* Default values for the invalid image
*
* @param origin Origin of the invalid image
* @return Row with the default values
*/
private[spark] def invalidImageRow(origin: String): Row =
Row(Row(origin, -1, -1, -1, ocvTypes(undefinedImageType), Array.ofDim[Byte](0)))
/**
* Convert the compressed image (jpeg, png, etc.) into OpenCV
* representation and store it in DataFrame Row
*
* @param origin Arbitrary string that identifies the image
* @param bytes Image bytes (for example, jpeg)
* @return DataFrame Row or None (if the decompression fails)
*/
private[spark] def decode(origin: String, bytes: Array[Byte]): Option[Row] = {
val img = try {
ImageIO.read(new ByteArrayInputStream(bytes))
} catch {
// Catch runtime exception because `ImageIO` may throw unexcepted `RuntimeException`.
// But do not catch the declared `IOException` (regarded as FileSystem failure)
case _: RuntimeException => null
}
if (img == null) {
None
} else {
val isGray = img.getColorModel.getColorSpace.getType == ColorSpace.TYPE_GRAY
val hasAlpha = img.getColorModel.hasAlpha
val height = img.getHeight
val width = img.getWidth
val (nChannels, mode) = if (isGray) {
(1, ocvTypes("CV_8UC1"))
} else if (hasAlpha) {
(4, ocvTypes("CV_8UC4"))
} else {
(3, ocvTypes("CV_8UC3"))
}
val imageSize = height * width * nChannels
assert(imageSize < 1e9, "image is too large")
val decoded = Array.ofDim[Byte](imageSize)
// Grayscale images in Java require special handling to get the correct intensity
if (isGray) {
var offset = 0
val raster = img.getRaster
for (h <- 0 until height) {
for (w <- 0 until width) {
decoded(offset) = raster.getSample(w, h, 0).toByte
offset += 1
}
}
} else {
var offset = 0
for (h <- 0 until height) {
for (w <- 0 until width) {
val color = new Color(img.getRGB(w, h), hasAlpha)
decoded(offset) = color.getBlue.toByte
decoded(offset + 1) = color.getGreen.toByte
decoded(offset + 2) = color.getRed.toByte
if (hasAlpha) {
decoded(offset + 3) = color.getAlpha.toByte
}
offset += nChannels
}
}
}
// the internal "Row" is needed, because the image is a single DataFrame column
Some(Row(Row(origin, height, width, nChannels, mode, decoded)))
}
}
/**
* Read the directory of images from the local or remote source
*
* @note If multiple jobs are run in parallel with different sampleRatio or recursive flag,
* there may be a race condition where one job overwrites the hadoop configs of another.
* @note If sample ratio is less than 1, sampling uses a PathFilter that is efficient but
* potentially non-deterministic.
*
* @param path Path to the image directory
* @return DataFrame with a single column "image" of images;
* see ImageSchema for the details
*/
@deprecated("use `spark.read.format(\"image\").load(path)` and this `readImages` will be " +
"removed in 3.0.0.", "2.4.0")
def readImages(path: String): DataFrame = readImages(path, null, false, -1, false, 1.0, 0)
/**
* Read the directory of images from the local or remote source
*
* @note If multiple jobs are run in parallel with different sampleRatio or recursive flag,
* there may be a race condition where one job overwrites the hadoop configs of another.
* @note If sample ratio is less than 1, sampling uses a PathFilter that is efficient but
* potentially non-deterministic.
*
* @param path Path to the image directory
* @param sparkSession Spark Session, if omitted gets or creates the session
* @param recursive Recursive path search flag
* @param numPartitions Number of the DataFrame partitions,
* if omitted uses defaultParallelism instead
* @param dropImageFailures Drop the files that are not valid images from the result
* @param sampleRatio Fraction of the files loaded
* @return DataFrame with a single column "image" of images;
* see ImageSchema for the details
*/
@deprecated("use `spark.read.format(\"image\").load(path)` and this `readImages` will be " +
"removed in 3.0.0.", "2.4.0")
def readImages(
path: String,
sparkSession: SparkSession,
recursive: Boolean,
numPartitions: Int,
dropImageFailures: Boolean,
sampleRatio: Double,
seed: Long): DataFrame = {
require(sampleRatio <= 1.0 && sampleRatio >= 0, "sampleRatio should be between 0 and 1")
val session = if (sparkSession != null) sparkSession else SparkSession.builder().getOrCreate
val partitions =
if (numPartitions > 0) {
numPartitions
} else {
session.sparkContext.defaultParallelism
}
RecursiveFlag.withRecursiveFlag(recursive, session) {
SamplePathFilter.withPathFilter(sampleRatio, session, seed) {
val binResult = session.sparkContext.binaryFiles(path, partitions)
val streams = if (numPartitions == -1) binResult else binResult.repartition(partitions)
val convert = (origin: String, bytes: PortableDataStream) =>
decode(origin, bytes.toArray())
val images = if (dropImageFailures) {
streams.flatMap { case (origin, bytes) => convert(origin, bytes) }
} else {
streams.map { case (origin, bytes) =>
convert(origin, bytes).getOrElse(invalidImageRow(origin))
}
}
session.createDataFrame(images, imageSchema)
}
}
}
}
|
aosagie/spark
|
mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala
|
Scala
|
apache-2.0
| 9,131
|
package org.bitcoins.protocol.rpc.bitcoincore.blockchain.softforks
/**
* Created by Tom on 1/11/2016.
*/
trait SoftForks {
def id : String
def version : Int
def enforce : EnforcementProgress
def reject : RejectionProgress
}
case class SoftForksImpl(id : String, version : Int, enforce : EnforcementProgress,
reject : RejectionProgress) extends SoftForks
|
Christewart/scalacoin
|
src/main/scala/org/bitcoins/protocol/rpc/bitcoincore/blockchain/softforks/SoftForks.scala
|
Scala
|
mit
| 392
|
package sttp.client3.okhttp
import sttp.capabilities.WebSockets
import sttp.client3._
import sttp.client3.testing.ConvertToFuture
import sttp.client3.testing.websocket.{WebSocketBufferOverflowTest, WebSocketConcurrentTest, WebSocketTest}
import sttp.monad.{FutureMonad, MonadError}
import scala.concurrent.duration._
import scala.concurrent.{Future, blocking}
class OkHttpFutureWebsocketTest
extends WebSocketTest[Future]
with WebSocketBufferOverflowTest[Future]
with WebSocketConcurrentTest[Future] {
override val backend: SttpBackend[Future, WebSockets] = OkHttpFutureBackend()
override implicit val convertToFuture: ConvertToFuture[Future] = ConvertToFuture.future
override implicit val monad: MonadError[Future] = new FutureMonad()
override def throwsWhenNotAWebSocket: Boolean = true
override def bufferCapacity: Int = OkHttpBackend.DefaultWebSocketBufferCapacity.get
override def eventually[T](interval: FiniteDuration, attempts: Int)(f: => Future[T]): Future[T] = {
Future(blocking(Thread.sleep(interval.toMillis))).flatMap(_ => f).recoverWith {
case _ if attempts > 0 => eventually(interval, attempts - 1)(f)
}
}
override def concurrently[T](fs: List[() => Future[T]]): Future[List[T]] = Future.sequence(fs.map(_()))
}
|
softwaremill/sttp
|
okhttp-backend/src/test/scala/sttp/client3/okhttp/OkHttpFutureWebsocketTest.scala
|
Scala
|
apache-2.0
| 1,275
|
package pl.msitko.xml.bench
import pl.msitko.xml.parsing.XmlParser
import pl.msitko.xml.printing.XmlPrinter
object SmallRoundtripLens extends SmallRoundtrip {
override def roundtrip(input: String): String = {
val parsed = XmlParser.parse(input).right.get
XmlPrinter.print(parsed)
}
}
|
note/xml-lens
|
bench/src/main/scala/pl/msitko/xml/bench/SmallRoundtripLens.scala
|
Scala
|
mit
| 298
|
/*
Copyright 2016-17, Hasso-Plattner-Institut fuer Softwaresystemtechnik GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.hpi.ingestion.textmining.models
/**
* An Alias appearing in Wikipedia containing the data about the pages it points to and how often it appears.
*
* @param alias the alias as it appears in the readable text
* @param pages all Wikipedia pages this alias points to and how often it does
* @param pagesreduced all Wikipedia pages this alias points to and how often it does
* @param linkoccurrences in how many articles this alias appears as link
* @param totaloccurrences in how many articles this alias appears in the plain text
*/
case class Alias(
alias: String,
pages: Map[String, Int] = Map(),
pagesreduced: Map[String, Int] = Map(),
var linkoccurrences: Option[Int] = None,
var totaloccurrences: Option[Int] = None
)
|
bpn1/ingestion
|
src/main/scala/de/hpi/ingestion/textmining/models/Alias.scala
|
Scala
|
apache-2.0
| 1,398
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.