code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package com.lightning.walletapp.test import java.net.{Inet4Address, Inet6Address, InetAddress} import com.google.common.net.InetAddresses import com.lightning.walletapp.ln.Announcements import com.lightning.walletapp.ln.crypto.{Hmac256, Sphinx} import com.lightning.walletapp.ln.wire._ import com.lightning.walletapp.ln.wire.LightningMessageCodecs._ import scodec.bits.{BitVector, ByteVector} import fr.acinq.bitcoin.{Block, Crypto, Protocol} import fr.acinq.bitcoin.Crypto.{PrivateKey, PublicKey, Scalar} import fr.acinq.eclair.UInt64 import scala.util.Random class WireSpec { def randomKey: PrivateKey = PrivateKey({ val bin = Array.fill[Byte](32)(0) Random.nextBytes(bin) ByteVector.view(bin) }, compressed = true) def randomBytes(size: Int) = { val bin = new Array[Byte](size) Random.nextBytes(bin) ByteVector.view(bin) } def randomSignature: ByteVector = { val priv = randomBytes(32) val data = randomBytes(32) val (r, s) = Crypto.sign(data, PrivateKey(priv, compressed = true)) Crypto.encodeSignature(r, s) :+ fr.acinq.bitcoin.SIGHASH_ALL.toByte } def allTests = { def bin(size: Int, fill: Byte) = ByteVector.view(Array.fill[Byte](size)(fill)) def scalar(fill: Byte) = Scalar(bin(32, fill)) def point(fill: Byte) = Scalar(bin(32, fill)).toPoint def publicKey(fill: Byte) = PrivateKey(bin(32, fill), compressed = true).publicKey { println("encode/decode all kind of IPv6 addresses with ipv6address codec") { // IPv4 mapped val bin = BitVector.fromValidHex("00000000000000000000ffffae8a0b08") val ipv6 = Inet6Address.getByAddress(null, bin.toByteArray, null) val bin2 = ipv6address.encode(ipv6).require assert(bin == bin2) } { // regular IPv6 address val ipv6 = InetAddresses.forString("1080:0:0:0:8:800:200C:417A").asInstanceOf[Inet6Address] val bin = ipv6address.encode(ipv6).require val ipv62 = ipv6address.decode(bin).require.value assert(ipv6 == ipv62) } } { println("encode/decode with rgb codec") val color = (47.toByte, 255.toByte, 142.toByte) val bin = rgb.encode(color).toOption.get assert(bin == BitVector.fromValidHex("2f ff 8e")) val color2 = rgb.decode(bin).toOption.get.value assert(color == color2) } { println("encode/decode all kind of IPv6 addresses with ipv6address codec") { // IPv4 mapped val bin = BitVector.fromValidHex("00000000000000000000ffffae8a0b08") val ipv6 = Inet6Address.getByAddress(null, bin.toByteArray, null) val bin2 = ipv6address.encode(ipv6).require assert(bin == bin2) } { // regular IPv6 address val ipv6 = InetAddresses.forString("1080:0:0:0:8:800:200C:417A").asInstanceOf[Inet6Address] val bin = ipv6address.encode(ipv6).require val ipv62 = ipv6address.decode(bin).require.value assert(ipv6 == ipv62) } } { println("encode/decode with nodeaddress codec") { val ipv4addr = InetAddress.getByAddress(Array[Byte](192.toByte, 168.toByte, 1.toByte, 42.toByte)).asInstanceOf[Inet4Address] val nodeaddr = IPv4(ipv4addr, 4231) val bin = nodeaddress.encode(nodeaddr).require assert(bin == BitVector.fromValidHex("01 C0 A8 01 2A 10 87")) val nodeaddr2 = nodeaddress.decode(bin).require.value assert(nodeaddr == nodeaddr2) } { val ipv6addr = InetAddress.getByAddress(ByteVector.fromValidHex("2001 0db8 0000 85a3 0000 0000 ac1f 8001").toArray).asInstanceOf[Inet6Address] val nodeaddr = IPv6(ipv6addr, 4231) val bin = nodeaddress.encode(nodeaddr).require assert(bin == BitVector.fromValidHex("02 2001 0db8 0000 85a3 0000 0000 ac1f 8001 1087")) val nodeaddr2 = nodeaddress.decode(bin).require.value assert(nodeaddr == nodeaddr2) } } { println("encode/decode with signature codec") val sig = randomSignature val wire = LightningMessageCodecs.signature.encode(sig).toOption.get val sig1 = LightningMessageCodecs.signature.decode(wire).toOption.get.value assert(sig1 == sig) } { println("encode/decode with scalar codec") val value = Scalar(randomBytes(32)) val wire = LightningMessageCodecs.scalar.encode(value).toOption.get assert(wire.length == 256) val value1 = LightningMessageCodecs.scalar.decode(wire).toOption.get.value assert(value1 == value) } { println("encode/decode with point codec") val value = Scalar(randomBytes(32)).toPoint val wire = LightningMessageCodecs.point.encode(value).toOption.get assert(wire.length == 33 * 8) val value1 = LightningMessageCodecs.point.decode(wire).toOption.get.value assert(value1 == value) } { println("encode/decode with public key codec") val value = PrivateKey(randomBytes(32), compressed = true).publicKey val wire = LightningMessageCodecs.publicKey.encode(value).toOption.get assert(wire.length == 33 * 8) val value1 = LightningMessageCodecs.publicKey.decode(wire).toOption.get.value assert(value1 == value) } { println("encode/decode with zeropaddedstring codec") val c = zeropaddedstring { val alias = "IRATEMONK" val bin = c.encode(alias).toOption.get assert(bin == BitVector(alias.getBytes("UTF-8") ++ Array.fill[Byte](32 - alias.length)(0))) val alias2 = c.decode(bin).toOption.get.value assert(alias == alias2) } { val alias = "this-alias-is-exactly-32-B-long." val bin = c.encode(alias).toOption.get assert(bin == BitVector(alias.getBytes("UTF-8") ++ Array.fill[Byte](32 - alias.length)(0))) val alias2 = c.decode(bin).toOption.get.value assert(alias == alias2) } { val alias = "this-alias-is-far-too-long-because-we-are-limited-to-32-bytes" assert(c.encode(alias).isFailure) } } { println("encode/decode with uint64 codec") val expected = Map( UInt64(0) -> ByteVector.fromValidHex("0000000000000000"), UInt64(42) -> ByteVector.fromValidHex("000000000000002a"), UInt64(6211610197754262546L) -> ByteVector.fromValidHex("5634129078563412"), UInt64(ByteVector.fromValidHex("ffffffffffffffff")) -> ByteVector.fromValidHex("ffffffffffffffff") ).mapValues(_.toBitVector) for ((uint, ref) <- expected) { val encoded = uint64.encode(uint).require assert(ref == encoded) val decoded = uint64.decode(encoded).require.value assert(uint == decoded) } } { println("encode/decode with varint codec") val expected = Map( UInt64(0L) -> ByteVector.fromValidHex("00"), UInt64(42L) -> ByteVector.fromValidHex("2a"), UInt64(253L) -> ByteVector.fromValidHex("fd 00 fd"), UInt64(254L) -> ByteVector.fromValidHex("fd 00 fe"), UInt64(255L) -> ByteVector.fromValidHex("fd 00 ff"), UInt64(550L) -> ByteVector.fromValidHex("fd 02 26"), UInt64(998000L) -> ByteVector.fromValidHex("fe 00 0f 3a 70"), UInt64(1311768467284833366L) -> ByteVector.fromValidHex("ff 12 34 56 78 90 12 34 56"), UInt64.MaxValue -> ByteVector.fromValidHex("ff ff ff ff ff ff ff ff ff") ).mapValues(_.toBitVector) for ((uint, ref) <- expected) { val encoded = varint.encode(uint).require assert(ref == encoded, ref) val decoded = varint.decode(encoded).require.value assert(uint == decoded, uint) } } { println("decode invalid varint") val testCases = Seq( ByteVector.fromValidHex("fd"), // truncated ByteVector.fromValidHex("fe 01"), // truncated ByteVector.fromValidHex("fe"), // truncated ByteVector.fromValidHex("fe 12 34"), // truncated ByteVector.fromValidHex("ff"), // truncated ByteVector.fromValidHex("ff 12 34 56 78"), // truncated ByteVector.fromValidHex("fd 00 00"), // not minimally-encoded ByteVector.fromValidHex("fd 00 fc"), // not minimally-encoded ByteVector.fromValidHex("fe 00 00 00 00"), // not minimally-encoded ByteVector.fromValidHex("fe 00 00 ff ff"), // not minimally-encoded ByteVector.fromValidHex("ff 00 00 00 00 00 00 00 00"), // not minimally-encoded ByteVector.fromValidHex("ff 00 00 00 00 01 ff ff ff"), // not minimally-encoded ByteVector.fromValidHex("ff 00 00 00 00 ff ff ff ff") // not minimally-encoded ).map(_.toBitVector) for (testCase <- testCases) { assert(varint.decode(testCase).isFailure, testCase.toByteVector) } } { println("encode/decode with varintoverflow codec") val expected = Map( 0L -> ByteVector.fromValidHex("00"), 42L -> ByteVector.fromValidHex("2a"), 253L -> ByteVector.fromValidHex("fd 00 fd"), 254L -> ByteVector.fromValidHex("fd 00 fe"), 255L -> ByteVector.fromValidHex("fd 00 ff"), 550L -> ByteVector.fromValidHex("fd 02 26"), 998000L -> ByteVector.fromValidHex("fe 00 0f 3a 70"), 1311768467284833366L -> ByteVector.fromValidHex("ff 12 34 56 78 90 12 34 56"), Long.MaxValue -> ByteVector.fromValidHex("ff 7f ff ff ff ff ff ff ff") ).mapValues(_.toBitVector) for ((long, ref) <- expected) { val encoded = varintoverflow.encode(long).require assert(ref == encoded, ref) val decoded = varintoverflow.decode(encoded).require.value assert(long == decoded, long) } } { println("decode invalid varintoverflow") val testCases = Seq( ByteVector.fromValidHex("ff 80 00 00 00 00 00 00 00"), ByteVector.fromValidHex("ff ff ff ff ff ff ff ff ff") ).map(_.toBitVector) for (testCase <- testCases) { assert(varintoverflow.decode(testCase).isFailure, testCase.toByteVector) } } { println("encode/decode UInt64") val refs = Seq( UInt64(ByteVector.fromValidHex("ffffffffffffffff")), UInt64(ByteVector.fromValidHex("fffffffffffffffe")), UInt64(ByteVector.fromValidHex("efffffffffffffff")), UInt64(ByteVector.fromValidHex("effffffffffffffe")) ) assert(refs.forall(value => uint64.decode(uint64.encode(value).require).require.value == value)) } { println("encode/decode with prependmac codec") val mac = Hmac256(Protocol.Zeroes) val testCases = Seq( (uint64, UInt64(561), ByteVector.fromValidHex("d5b500b8843e19a34d8ab54740db76a7ea597e4ff2ada3827420f87c7e60b7c6 0000000000000231")), (varint, UInt64(65535), ByteVector.fromValidHex("71e17e5b97deb6916f7ad97a53650769d4e4f0b1e580ff35ca332200d61e765c fdffff")) ) for ((codec, expected, bin) <- testCases) { val macCodec = prependmac(codec, mac) val decoded = macCodec.decode(bin.toBitVector).require.value assert(decoded == expected) val encoded = macCodec.encode(expected).require.toByteVector assert(encoded == bin) } } { println("encode/decode all channel messages") val open = OpenChannel(randomBytes(32), randomBytes(32), 3, 4, 5, UInt64(6), 7, 8, 9, 10, 11, publicKey(1), point(2), point(3), point(4), point(5), point(6), ChannelFlags(0.toByte)) val accept = AcceptChannel(randomBytes(32), 3, UInt64(4), 5, 6, 7, 8, 9, publicKey(1), point(2), point(3), point(4), point(5), point(6)) val funding_created = FundingCreated(randomBytes(32), bin(32, 0), 3, randomSignature) val funding_signed = FundingSigned(randomBytes(32), randomSignature) val funding_locked = FundingLocked(randomBytes(32), point(2)) val update_fee = UpdateFee(randomBytes(32), 2) val shutdown = Shutdown(randomBytes(32), bin(47, 0)) val closing_signed = ClosingSigned(randomBytes(32), 2, randomSignature) val update_add_htlc = UpdateAddHtlc(randomBytes(32), 2, 3, bin(32, 0), 4) val update_fulfill_htlc = UpdateFulfillHtlc(randomBytes(32), 2, bin(32, 0)) val update_fail_htlc = UpdateFailHtlc(randomBytes(32), 2, bin(154, 0)) val update_fail_malformed_htlc = UpdateFailMalformedHtlc(randomBytes(32), 2, randomBytes(32), 1111) val commit_sig = CommitSig(randomBytes(32), randomSignature, randomSignature :: randomSignature :: randomSignature :: Nil) val revoke_and_ack = RevokeAndAck(randomBytes(32), scalar(0), point(1)) val channel_announcement = ChannelAnnouncement(randomSignature, randomSignature, randomSignature, randomSignature, bin(7, 9), Block.RegtestGenesisBlock.hash, 1, randomKey.publicKey, randomKey.publicKey, randomKey.publicKey, randomKey.publicKey) val node_announcement = NodeAnnouncement(randomSignature, bin(1, 2), 1, randomKey.publicKey, (100.toByte, 200.toByte, 300.toByte), "node-alias", IPv4(InetAddress.getByAddress(Array[Byte](192.toByte, 168.toByte, 1.toByte, 42.toByte)).asInstanceOf[Inet4Address], 42000) :: Nil) val channel_update = ChannelUpdate(randomSignature, Block.RegtestGenesisBlock.hash, 1, 2, 42, 0, 3, 4, 5, 6, None) val announcement_signatures = AnnouncementSignatures(randomBytes(32), 42, randomSignature, randomSignature) val ping = Ping(100, ByteVector.fromValidHex("01" * 10)) val pong = Pong(ByteVector.fromValidHex("01" * 10)) val channel_reestablish = ChannelReestablish(randomBytes(32), 242842L, 42L, None, None) val invoke_hosted_channel = InvokeHostedChannel(randomBytes(32), bin(47, 0), bin(112, 0)) val init_hosted_channel = InitHostedChannel(UInt64(6), 10, 20, 500000000L, 5000, 1000000, 1000000, ByteVector.empty) val state_override = StateOverride(50000L, 500000, 70000, 700000, randomSignature) val state_update = StateUpdate(50000L, 10, 20, randomSignature, isTerminal = false) val lcss1 = LastCrossSignedState(bin(47, 0), init_hosted_channel, 10000, 10000, 20000, 10, 20, List(update_add_htlc, update_add_htlc, update_add_htlc), List(update_add_htlc, update_add_htlc, update_add_htlc), randomSignature, randomSignature) val lcss2 = LastCrossSignedState(bin(47, 0), init_hosted_channel, 10000, 10000, 20000, 10, 20, Nil, List(update_add_htlc, update_add_htlc), randomSignature, randomSignature) val lcss3 = LastCrossSignedState(bin(47, 0), init_hosted_channel, 10000, 10000, 20000, 10, 20, List(update_add_htlc, update_add_htlc), Nil, randomSignature, randomSignature) val lcss4 = LastCrossSignedState(bin(47, 0), init_hosted_channel, 10000, 10000, 20000, 10, 20, Nil, Nil, randomSignature, randomSignature) val msgs: List[LightningMessage] = open :: accept :: funding_created :: funding_signed :: funding_locked :: update_fee :: shutdown :: closing_signed :: update_add_htlc :: update_fulfill_htlc :: update_fail_htlc :: update_fail_malformed_htlc :: commit_sig :: revoke_and_ack :: channel_announcement :: node_announcement :: channel_update :: announcement_signatures :: ping :: pong :: channel_reestablish :: invoke_hosted_channel :: init_hosted_channel :: state_override :: state_update :: lcss1 :: lcss2 :: lcss3 :: lcss4 :: Nil msgs foreach { msg => val encoded = lightningMessageCodec.encode(msg).require val decoded = lightningMessageCodec.decode(encoded).require assert(msg == decoded.value) } } { println("decode channel_update with htlc_maximum_msat") val bin = ByteVector.fromValidHex("010258fff7d0e987e2cdd560e3bb5a046b4efe7b26c969c2f51da1dceec7bcb8ae1b634790503d5290c1a6c51d681cf8f4211d27ed33a257dcc1102862571bf1792306226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f0005a100000200005bc75919010100060000000000000001000000010000000a000000003a699d00") val update = LightningMessageCodecs.lightningMessageCodec.decode(BitVector(bin.toArray)).require.value.asInstanceOf[ChannelUpdate] assert(update == ChannelUpdate(ByteVector.fromValidHex("3044022058fff7d0e987e2cdd560e3bb5a046b4efe7b26c969c2f51da1dceec7bcb8ae1b0220634790503d5290c1a6c51d681cf8f4211d27ed33a257dcc1102862571bf1792301"), ByteVector.fromValidHex("06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f"), 0x5a10000020000L, 1539791129, 1, 1, 6, 1, 1, 10, Some(980000000L))) val nodeId = PublicKey(ByteVector.fromValidHex("03370c9bac836e557eb4f017fe8f9cc047f44db39c1c4e410ff0f7be142b817ae4")) assert(Announcements.checkSig(update, nodeId)) val bin2 = ByteVector.view(LightningMessageCodecs.lightningMessageCodec.encode(update).require.toByteArray) assert(bin == bin2) } } }
btcontract/lnwallet
app/src/main/java/com/lightning/walletapp/test/WireSpec.scala
Scala
apache-2.0
16,770
package org.jetbrains.plugins.scala.util import com.intellij.psi.util.PsiTreeUtil import com.intellij.psi.{PsiClass, PsiElement, PsiPackage} import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.api.statements.ScTypeAlias import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject import org.jetbrains.plugins.scala.lang.psi.types.result.TypeResult import org.jetbrains.plugins.scala.lang.psi.types.{ScType, ScTypeExt} /** * @author Alexander Podkhalyuzin */ object ScEquivalenceUtil { def areClassesEquivalent(clazz1: PsiClass, clazz2: PsiClass): Boolean = { if (clazz1 == clazz2) return true if (clazz1.name != clazz2.name) return false val containingClazz1: PsiClass = clazz1.containingClass val containingClass2: PsiClass = clazz2.containingClass if (containingClazz1 != null) { if (containingClass2 != null) { if (!areClassesEquivalent(containingClazz1, containingClass2)) return false } else return false } else if (containingClass2 != null) return false if (clazz1.qualifiedName != clazz2.qualifiedName) return false val isSomeClassLocalOrAnonymous = clazz1.qualifiedName == null || clazz2.qualifiedName == null || (PsiTreeUtil.getContextOfType(clazz1, true, classOf[PsiClass]) != null && clazz1.getContainingClass == null) || (PsiTreeUtil.getContextOfType(clazz2, true, classOf[PsiClass]) != null && clazz2.getContainingClass == null) if (isSomeClassLocalOrAnonymous) return false clazz1 match { case _: ScObject => clazz2.isInstanceOf[ScObject] case _ => !clazz2.isInstanceOf[ScObject] } } def arePackagesEquivalent(p1: PsiPackage, p2: PsiPackage): Boolean = { p1 != null && p2 != null && p1.getManager == p2.getManager && p1.getQualifiedName == p2.getQualifiedName } private def areTypeAliasesEquivalent(ta1: ScTypeAlias, ta2: ScTypeAlias): Boolean = { def equiv(tr1: TypeResult[ScType], tr2: TypeResult[ScType]): Boolean = { if (tr1.isEmpty || tr2.isEmpty) false else tr1.get.equiv(tr2.get) } if (ta1.isExistentialTypeAlias && ta2.isExistentialTypeAlias) { equiv(ta1.lowerBound, ta2.lowerBound) && equiv(ta1.upperBound, ta2.upperBound) } else ta1 == ta2 } def smartEquivalence(elem1: PsiElement, elem2: PsiElement): Boolean = { (elem1, elem2) match { case (clazz1: PsiClass, clazz2: PsiClass) => areClassesEquivalent(clazz1, clazz2) case (p1: PsiPackage, p2: PsiPackage) => arePackagesEquivalent(p1, p2) case (ta1: ScTypeAlias, ta2: ScTypeAlias) => areTypeAliasesEquivalent(ta1, ta2) case _ => elem1 == elem2 } } }
ilinum/intellij-scala
src/org/jetbrains/plugins/scala/util/ScEquivalenceUtil.scala
Scala
apache-2.0
2,693
package org.jetbrains.plugins.scala package codeInsight package intention package types import com.intellij.openapi.editor.Editor import com.intellij.psi.PsiElement import org.jetbrains.annotations.Nls import org.jetbrains.plugins.scala.ScalaBundle.message import org.jetbrains.plugins.scala.lang.psi.TypeAdjuster import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, ScTypedPattern, ScWildcardPattern} import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScExpression, ScUnderscoreSection} import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunctionDefinition, ScPatternDefinition, ScVariableDefinition} import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory import org.jetbrains.plugins.scala.lang.psi.types.api.presentation.ScTypeText import org.jetbrains.plugins.scala.lang.psi.types.{BaseTypes, ScType, ScTypeExt, TypePresentationContext} /** * Author: Svyatoslav Ilinskiy * Date: 22.12.15. */ class MakeTypeMoreSpecificIntention extends AbstractTypeAnnotationIntention { import MakeTypeMoreSpecificIntention._ override def getFamilyName: String = FamilyName override protected def descriptionStrategy: Strategy = new Strategy { override def variableWithType(variable: ScVariableDefinition, typeElement: ScTypeElement): Boolean = setTextIfCanBeMoreSpecific(variable.declaredType, variable.expr, message("make.type.more.specific")) override def valueWithType(value: ScPatternDefinition, typeElement: ScTypeElement): Boolean = setTextIfCanBeMoreSpecific(value.declaredType, value.expr, message("make.type.more.specific")) override def functionWithType(function: ScFunctionDefinition, typeElement: ScTypeElement): Boolean = setTextIfCanBeMoreSpecific(function.returnType.toOption, function.body, message("make.type.more.specific.fun")) private def setTextIfCanBeMoreSpecific(declTypeOpt: Option[ScType], exprOpt: Option[ScExpression], @Nls text: String): Boolean = { if (canBeMoreSpecific(declTypeOpt, exprOpt)) { setText(text) true } else { false } } override def functionWithoutType(function: ScFunctionDefinition): Boolean = false override def valueWithoutType(value: ScPatternDefinition): Boolean = false override def variableWithoutType(variable: ScVariableDefinition): Boolean = false override def patternWithoutType(pattern: ScBindingPattern): Boolean = false override def wildcardPatternWithoutType(pattern: ScWildcardPattern): Boolean = false override def patternWithType(pattern: ScTypedPattern): Boolean = false override def parameterWithoutType(param: ScParameter): Boolean = false override def parameterWithType(param: ScParameter): Boolean = false override def underscoreSectionWithoutType(underscore: ScUnderscoreSection) = false override def underscoreSectionWithType(underscore: ScUnderscoreSection) = false } override protected def invocationStrategy(maybeEditor: Option[Editor]): Strategy = new Strategy { private def doTemplate(te: ScTypeElement, declaredType: ScType, dynamicType: ScType, context: PsiElement): Unit = { val types = computeBaseTypes(declaredType, dynamicType).sortWith((t1, t2) => t1.conforms(t2)) if (types.size == 1) { val replaced = te.replace(ScalaPsiElementFactory.createTypeElementFromText(types.head.canonicalText, te.getContext, te)) TypeAdjuster.markToAdjust(replaced) } else { implicit val tpc: TypePresentationContext = TypePresentationContext(context) val texts = types.map(ScTypeText(_)) val expr = new ChooseTypeTextExpression(texts, ScTypeText(declaredType)) startTemplate(te, context.getParent, expr, maybeEditor.get) } } override def functionWithType(function: ScFunctionDefinition, typeElement: ScTypeElement): Boolean = { for { body <- function.body if maybeEditor.isDefined tp <- body.`type`() declared <- typeElement.`type`() } doTemplate(typeElement, declared, tp, function) true } override def valueWithType(value: ScPatternDefinition, typeElement: ScTypeElement): Boolean = { for { body <- value.expr if maybeEditor.isDefined tp <- body.`type`() declared <- typeElement.`type`() } doTemplate(typeElement, declared, tp, value) true } override def variableWithType(variable: ScVariableDefinition, typeElement: ScTypeElement): Boolean = { for { body <- variable.expr if maybeEditor.isDefined tp <- body.`type`() declared <- typeElement.`type`() } doTemplate(typeElement, declared, tp, variable) true } } } object MakeTypeMoreSpecificIntention { private[types] val FamilyName: String = message("make.type.more.specific") private def computeBaseTypes(declaredType: ScType, dynamicType: ScType): Seq[ScType] = { val baseTypes = dynamicType +: BaseTypes.get(dynamicType) baseTypes.filter(_.conforms(declaredType)) .filter(!_.equiv(declaredType)) } private def canBeMoreSpecific(declTypeOpt: Option[ScType], exprOpt: Option[ScExpression]): Boolean = { val baseTypes = for { declared <- declTypeOpt expr <- exprOpt tp <- expr.`type`().toOption } yield computeBaseTypes(declared, tp) baseTypes.exists(_.nonEmpty) } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInsight/intention/types/MakeTypeMoreSpecificIntention.scala
Scala
apache-2.0
5,657
package fpinscala.state trait RNG { def nextInt: (Int, RNG) // Should generate a random `Int`. We'll later define other functions in terms of `nextInt`. } object RNG { // NB - this was called SimpleRNG in the book text case class Simple(seed: Long) extends RNG { def nextInt: (Int, RNG) = { val newSeed = (seed * 0x5DEECE66DL + 0xBL) & 0xFFFFFFFFFFFFL // `&` is bitwise AND. We use the current seed to generate a new seed. val nextRNG = Simple(newSeed) // The next state, which is an `RNG` instance created from the new seed. val n = (newSeed >>> 16).toInt // `>>>` is right binary shift with zero fill. The value `n` is our new pseudo-random integer. (n, nextRNG) // The return value is a tuple containing both a pseudo-random integer and the next `RNG` state. } } type Rand[+A] = RNG => (A, RNG) val int: Rand[Int] = _.nextInt def unit[A](a: A): Rand[A] = rng => (a, rng) def map[A,B](s: Rand[A])(f: A => B): Rand[B] = rng => { val (a, rng2) = s(rng) (f(a), rng2) } // helper method, not nested so that it can be tested: def intToNonNegativeInt(i: Int): Int = if (i >= 0) i else -(i + 1) // i.e. use 2's complement to handle the situation where i1 = Int.MinValue def nonNegativeInt(rng: RNG): (Int, RNG) = { val (i1, rng2) = rng.nextInt (intToNonNegativeInt(i1), rng2) } def double(rng: RNG): (Double, RNG) = { val (i1, rng1) = nonNegativeInt(rng) val frac = i1 / (Int.MaxValue.toDouble + 1) (frac, rng1) } def intDouble(rng: RNG): ((Int,Double), RNG) = { val (i1, rng1) = rng.nextInt val (d1, rng2) = double(rng1) ((i1, d1), rng2) } def doubleInt(rng: RNG): ((Double,Int), RNG) = { val (d1, rng1) = double(rng) val (i1, rng2) = rng1.nextInt ((d1, i1), rng2) } def double3(rng: RNG): ((Double,Double,Double), RNG) = { val (d1, rng1) = double(rng) val (d2, rng2) = double(rng1) val (d3, rng3) = double(rng2) ((d1, d2, d3), rng3) } def ints(count: Int)(rng: RNG): (List[Int], RNG) = if (count == 0) (List(), rng) else { val (i, r1) = rng.nextInt val (lst, r2) = ints(count - 1)(r1) (i :: lst, r2) } def doubleViaMap(rng: RNG): (Double, RNG) = (map(int) { intToNonNegativeInt(_) / (Int.MaxValue.toDouble + 1) })(rng) def map2[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] = rng => { val (a, rng1) = ra(rng) val (b, rng2) = rb(rng1) val c = f(a, b) (c, rng2) } def sequence[A](fs: List[Rand[A]]): Rand[List[A]] = { fs.foldRight((r: RNG) => (List[A](), r)) { (r: Rand[A], rs: Rand[List[A]]) => map2(r, rs)(_ :: _) } } def intsViaSequence(count: Int)(rng: RNG): (List[Int], RNG) = { (sequence(List.fill(count)(int)))(rng) } // After looking at the model answer, I should have done this rather: def intsViaSequenceAsRand(count: Int): Rand[List[Int]] = (sequence(List.fill(count)(int))) def flatMap[A,B](f: Rand[A])(g: A => Rand[B]): Rand[B] = rng => { val (a, r2) = f(rng) g(a)(r2) } /* Explanation of "i - mod + n - 1" formula below: * * i - mod n is the start of the block of n numbers that i is in. * If the final block fits into the Int range perfectly, then * adding "n-1" allows i to be any item in the block, * * including the final or (n-1)th item, without overflowing. * But adding n would cause overflow even if the final block fitted perfectly. * If the final block doesn't fit, then the (n-1)th item will overflow. */ def nonNegativeLessThan(n: Int): Rand[Int] = flatMap(nonNegativeInt) { i: Int => { val mod = i % n if (i - mod + n - 1 < 0) { // try again, using the next random number: nonNegativeLessThan(n) } else { // return the correct value and the current RNG: unit(i % n) } } } def mapUsingFlatMap[A,B](s: Rand[A])(f: A => B): Rand[B] = flatMap(s) { a => unit(f(a)) } def map2UsingFlatMap[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] = flatMap(ra) { a => flatMap(rb) { b => unit(f(a, b)) } } // Checked against model answer. Just use map, not flatMap, in the inner computation // For testing mapUsingFlatMap: def doubleViaMapUsingFlatMap(rng: RNG): (Double, RNG) = (mapUsingFlatMap(int) { intToNonNegativeInt(_) / (Int.MaxValue.toDouble + 1) })(rng) } case class State[S,+A](run: S => (A, S)) { def map[B](f: A => B): State[S, B] = State(s => { val (a, s1) = run(s) (f(a), s1) } ) def map2[B,C](sb: State[S, B])(f: (A, B) => C): State[S, C] = State(s => { val (a, s1) = run(s) val (b, s2) = sb.run(s1) (f(a,b), s2) } ) def flatMap[B](f: A => State[S, B]): State[S, B] = State(s => { val (a, s1) = run(s) f(a).run(s1) } ) } sealed trait Input case object Coin extends Input case object Turn extends Input case class Machine(locked: Boolean, candies: Int, coins: Int) object State { def unit[S, A](a: A): State[S, A] = State(s => (a, s)) def sequence[S, A](states: List[State[S, A]]): State[S, List[A]] = { val ini = unit[S, List[A]](List()) states.foldRight(ini) { (state, lstState) => state.map2(lstState)(_ :: _) } } type Rand[A] = State[RNG, A] // Naive implementation of the vending machine simulator: def simulateMachineNaively(inputs: List[Input]): State[Machine, (Int, Int)] = { State[Machine, (Int, Int)]( pre => { val states = inputs.map(inputToState) val finalMachine = states.foldLeft(pre) { (pre, r) => { val ((_, _), post) = r.run(pre) // It feels wrong to generate then ignore the (Int, Int) pairs post } } ((finalMachine.coins, finalMachine.candies), finalMachine) } ) } def inputToState(input: Input): State[Machine, (Int, Int)] = { input match { case Coin => coinToState case Turn => turnToState } } val coinToState = State[Machine, (Int, Int)]{ pre => val post = if (pre.locked && pre.candies > 0) pre.copy(locked = false, coins = pre.coins + 1) else pre ((post.coins, post.candies), post) } val turnToState = State[Machine, (Int, Int)] { pre => val post = if (pre.locked || pre.candies == 0) pre else pre.copy(locked = true, candies = pre.candies - 1) ((post.coins, post.candies), post) } /* One issue with the above approach, is that the (coins, candies) "A value" * is artificially created and then ignored most of the time. */ // In practice, there was no need for modify. Avoid States until the end: def getMachineModifier(input: Input): Machine => Machine = input match { case Coin => coinMachineModifier case Turn => turnMachineModifier } val coinMachineModifier: Machine => Machine = pre => if (pre.locked && pre.candies > 0) pre.copy(locked = false, coins = pre.coins + 1) else pre val turnMachineModifier: Machine => Machine = pre => if (pre.locked || pre.candies == 0) pre else pre.copy(locked = true, candies = pre.candies - 1) def simulateMachineViaModifiers(inputs: List[Input]): State[Machine, (Int, Int)] = { State[Machine, (Int, Int)]( pre => { val modifiers = inputs.map(getMachineModifier(_)) val post = modifiers.foldLeft(pre) { (mach, modif) => modif(mach) } ((post.coins, post.candies ), post) } ) } /* The model answer uses get and modify * i.e. the approach advised in section 6.6. * * It also uses Sequence, but in a kludgy way. * It feels like an alternative to Sequence is needed * which only returns the final value and state e.g. combine. * Re-write the model answer using combine: */ // Name with underscores, otherwise Applicative.scala breaks... def _modify[S](f: S => S): State[S, Unit] = for { s <- _get _ <- _set(f(s)) } yield () def _get[S]: State[S, S] = State(s => (s, s)) def _set[S](s: S): State[S, Unit] = State(_ => ((), s)) def combine[S, A](stateActions: List[State[S, A]]): State[S, A] = stateActions.reduce((s1, s2) => s1.map2(s2) { (a1, a2) => a2 }) def modifyStateWithInput(i: Input)(s: Machine): Machine = { (i, s) match { case (_, Machine(_, 0, _)) => s case (Turn, Machine(true, _, _)) => s case (Coin, Machine(false, _, _)) => s case (Turn, Machine(_, cdy, _)) => s.copy(locked=true, candies = cdy - 1) case (Coin, Machine(_, _, cns)) => s.copy(locked=false, coins = cns + 1) } } def simulateMachineViaModify(inputs: List[Input]): State[Machine, (Int, Int)] = { State( pre => { val states = inputs.map(i => _modify(modifyStateWithInput(i))) val postState = combine(states) val (_, post) = postState.run(pre) ((post.coins, post.candies ), post) } ) } val simulateMachine = simulateMachineViaModifiers _ }
AndrewTweddle/fpinscala
exercises/src/main/scala/fpinscala/state/State.scala
Scala
mit
9,250
package com.avsystem.commons package serialization import org.scalatest.funsuite.AnyFunSuite case class CodeSizeTester00( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester00 { implicit val codec: GenCodec[CodeSizeTester00] = GenCodec.materialize[CodeSizeTester00] } case class CodeSizeTester01( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester01 { implicit val codec: GenCodec[CodeSizeTester01] = GenCodec.materialize } case class CodeSizeTester02( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester02 { implicit val codec: GenCodec[CodeSizeTester02] = GenCodec.materialize } case class CodeSizeTester03( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester03 { implicit val codec: GenCodec[CodeSizeTester03] = GenCodec.materialize } case class CodeSizeTester04( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester04 { implicit val codec: GenCodec[CodeSizeTester04] = GenCodec.materialize } case class CodeSizeTester05( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester05 { implicit val codec: GenCodec[CodeSizeTester05] = GenCodec.materialize } case class CodeSizeTester06( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester06 { implicit val codec: GenCodec[CodeSizeTester06] = GenCodec.materialize } case class CodeSizeTester07( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester07 { implicit val codec: GenCodec[CodeSizeTester07] = GenCodec.materialize } case class CodeSizeTester08( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester08 { implicit val codec: GenCodec[CodeSizeTester08] = GenCodec.materialize } case class CodeSizeTester09( int: Int, string: String, double: Double, map: Map[String, List[Boolean]], people: Set[Person] ) object CodeSizeTester09 { implicit val codec: GenCodec[CodeSizeTester09] = GenCodec.materialize } case class Person(name: String, birthYear: Int, planet: String = "Earth") object Person { implicit val codec: GenCodec[Person] = GenCodec.materialize } class CodeSizeTester extends AnyFunSuite { ignore("fake test to see how much JS is generated") { println(CodeSizeTester00.codec.write(null, null)) // println(CodeSizeTester01.codec.write(null, null)) println(CodeSizeTester00.codec.read(null)) // println(CodeSizeTester01.codec.read(null)) } }
AVSystem/scala-commons
commons-core/src/test/scala/com/avsystem/commons/serialization/CodeSizeTester.scala
Scala
mit
2,950
package org.openapitools.models import io.circe._ import io.finch.circe._ import io.circe.generic.semiauto._ import io.circe.java8.time._ import org.openapitools._ /** * * @param Underscoreclass * @param expectedBuildNumber * @param id * @param pipeline * @param queuedTime */ case class QueueItemImpl(Underscoreclass: Option[String], expectedBuildNumber: Option[Int], id: Option[String], pipeline: Option[String], queuedTime: Option[Int] ) object QueueItemImpl { /** * Creates the codec for converting QueueItemImpl from and to JSON. */ implicit val decoder: Decoder[QueueItemImpl] = deriveDecoder implicit val encoder: ObjectEncoder[QueueItemImpl] = deriveEncoder }
cliffano/swaggy-jenkins
clients/scala-finch/generated/src/main/scala/org/openapitools/models/QueueItemImpl.scala
Scala
mit
786
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.examples import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.util.CarbonProperties import org.apache.carbondata.examples.util.ExampleUtils object CarbonExample { def main(args: Array[String]) { val cc = ExampleUtils.createCarbonContext("CarbonExample") val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv" // Specify timestamp format based on raw data CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd") cc.sql("DROP TABLE IF EXISTS t3") // Create table, 6 dimensions, 1 measure cc.sql(""" CREATE TABLE IF NOT EXISTS t3 (ID Int, date Timestamp, country String, name String, phonetype String, serialname char(10), salary Int) STORED BY 'carbondata' """) // Currently there are two data loading flows in CarbonData, one uses Kettle as ETL tool // in each node to do data loading, another uses a multi-thread framework without Kettle (See // AbstractDataLoadProcessorStep) // Load data with Kettle cc.sql(s""" LOAD DATA LOCAL INPATH '$testData' into table t3 """) // Perform a query cc.sql(""" SELECT country, count(salary) AS amount FROM t3 WHERE country IN ('china','france') GROUP BY country """).show() // Load data without kettle cc.sql(s""" LOAD DATA LOCAL INPATH '$testData' into table t3 OPTIONS('USE_KETTLE'='false') """) // Perform a query cc.sql(""" SELECT country, count(salary) AS amount FROM t3 WHERE country IN ('china','france') GROUP BY country """).show() // Drop table cc.sql("DROP TABLE IF EXISTS t3") } }
JihongMA/incubator-carbondata
examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
Scala
apache-2.0
2,698
/** * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.edda.elasticsearch import com.netflix.edda.Record import com.netflix.edda.RecordSet import com.netflix.edda.Collection import com.netflix.edda.Datastore import com.netflix.edda.Utils import com.netflix.edda.RequestId import org.joda.time.DateTime import java.util.Date import org.slf4j.LoggerFactory import org.elasticsearch.index.query.FilterBuilders import org.elasticsearch.index.query.FilterBuilder import org.elasticsearch.index.query.QueryBuilders import org.elasticsearch.index.query.QueryBuilder import org.elasticsearch.action.search.SearchRequestBuilder import org.elasticsearch.action.WriteConsistencyLevel import org.elasticsearch.action.support.replication.ReplicationType import org.elasticsearch.search.sort.SortOrder import org.elasticsearch.search.SearchHitField import org.elasticsearch.search.facet.FacetBuilders import org.elasticsearch.search.facet.terms.TermsFacet import org.elasticsearch.common.settings.ImmutableSettings import org.elasticsearch.client.Client import org.elasticsearch.node.NodeBuilder import org.elasticsearch.common.settings.Settings import org.elasticsearch.client.transport.TransportClient import org.elasticsearch.common.transport.InetSocketTransportAddress import org.elasticsearch.rest.RestStatus // /** helper object to store common ElasticSearch related routines */ object ElasticSearchDatastore { import org.joda.time.format.ISODateTimeFormat val basicDateTime = ISODateTimeFormat.dateTime val basicDateTimeNoMillis = ISODateTimeFormat.dateTimeNoMillis /** converts a ElasticSearch source object to a Record */ def esToRecord(obj: Any): Record = { obj match { case o: java.util.Map[_,_] => Record( Option(o.get("id")).getOrElse(o.get("_id")).asInstanceOf[String], Option(o.get("ftime")) match { case Some(date:String) => basicDateTime.parseDateTime(date) case _ => Option(o.get("ctime")) match { case Some(date:String) => basicDateTime.parseDateTime(date) case _ => null } }, Option(o.get("ctime")) match { case Some(date:String) => basicDateTime.parseDateTime(date) case _ => null }, Option(o.get("stime")) match { case Some(date:String) => basicDateTime.parseDateTime(date) case _ => null }, Option(o.get("ltime")) match { case Some(date: String) => basicDateTime.parseDateTime(date) case _ => null }, Option(o.get("mtime")) match { case Some(date: String) => basicDateTime.parseDateTime(date) case _ => null }, Option(o.get("data")) match { case Some(data) => esToScala(data) case _ => null }, Option(o.get("tags")) match { case Some(tags) => esToScala(tags).asInstanceOf[Map[String,Any]] case _ => Map[String,Any]() } ) case other => throw new java.lang.RuntimeException("cannot turn " + other + " into a Record") } } // map "data.foo" key to data -> Map(foo -> ....) def esFieldsFixup(obj: java.util.Map[String,SearchHitField]): java.util.Map[String,AnyRef] = { val newObj = new java.util.HashMap[String,AnyRef]() import collection.JavaConverters._ obj.asScala.foreach(kv => { if( kv._1.contains('.') ) { // should only be 2 parts, parts.head and parts.tail.head // we force only 2 parts when calling addFields when searching, more than // 2 fields we cannot tell what data type the field should be val parts = kv._1.split('.') if( !newObj.containsKey(parts.head) ) { newObj.put(parts.head, new java.util.HashMap[String,AnyRef]) } newObj.get(parts.head).asInstanceOf[java.util.Map[String,AnyRef]].put(parts.tail.head, kv._2.getValue) } else newObj.put(kv._1, kv._2.getValue) }) newObj } private val dateTimeNoMillisRx = """^\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\dZ$""".r private val dateTimeRx = """^\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(?:[.]\\d\\d?\\d?)Z$""".r private[this] val logger = LoggerFactory.getLogger(getClass) /** converts a ElasticSearch java object to a corresponding Scala basic object */ def esToScala(obj: Any): Any = { import collection.JavaConverters._ obj match { case o: java.util.Map[_,_] => { o.keySet.asScala.map(key => (key.asInstanceOf[String] -> esToScala(o.get(key)))).toMap } case o: java.util.Collection[_] => { List.empty[Any] ++ o.asScala.map(esToScala(_)) } case dateTimeNoMillisRx() => basicDateTimeNoMillis.parseDateTime(obj.asInstanceOf[String]) case dateTimeRx() => basicDateTime.parseDateTime(obj.asInstanceOf[String]) case o: Date => new DateTime(o) case o: AnyRef => o case null => null case other => throw new java.lang.RuntimeException("esToScala: don't know how to handle: " + other) } } def esToJson(rec: Record): String = { Utils.toJson(rec.toMap, Utils.dateFormatter) } /** dispatch the match operator to the correct matching routine. */ protected def esFilterOp(key: String, value: Any, op: String): FilterBuilder = { // $eq $ne $gt $lt $gte $lte $exists $in $nin $regex op match { case "$eq" => Option(value) match { case None => FilterBuilders.missingFilter(key).nullValue(true).existence(true) case Some(value) => FilterBuilders.termFilter(key, value) } case "$ne" => FilterBuilders.notFilter(esFilterOp(key, value, "$eq")) case "$gt" => value match { case v: String => FilterBuilders.rangeFilter(key).from(v).includeLower(false) case _ => FilterBuilders.numericRangeFilter(key).from(value).includeLower(false) } case "$gte" => value match { case v: String => FilterBuilders.rangeFilter(key).from(v).includeLower(true) case _ => FilterBuilders.numericRangeFilter(key).from(value).includeLower(true) } case "$lt" => value match { case v: String => FilterBuilders.rangeFilter(key).to(v).includeUpper(false) case _ => FilterBuilders.numericRangeFilter(key).to(value).includeUpper(false) } case "$lte" => value match { case v: String => FilterBuilders.rangeFilter(key).to(v).includeUpper(true) case _ => FilterBuilders.numericRangeFilter(key).to(value).includeUpper(true) } case "$exists" => FilterBuilders.missingFilter(key).existence(true) case "$in" => value.asInstanceOf[Seq[Any]].head match { case _: String => FilterBuilders.inFilter(key, value.asInstanceOf[Seq[String]]:_*) case _: Long => FilterBuilders.inFilter(key, value.asInstanceOf[Seq[Long]]:_*) case _: Int => FilterBuilders.inFilter(key, value.asInstanceOf[Seq[Int]]:_*) case _: Double => FilterBuilders.inFilter(key, value.asInstanceOf[Seq[Double]]:_*) case _: Float => FilterBuilders.inFilter(key, value.asInstanceOf[Seq[Float]]:_*) case _: AnyRef => FilterBuilders.inFilter(key, value.asInstanceOf[Seq[AnyRef]]:_*) } case "$nin" => FilterBuilders.notFilter(esFilterOp(key, value, "$in")) case "$regex" => throw new java.lang.UnsupportedOperationException("$regex query not supported") case unk => throw new java.lang.RuntimeException("uknown match operation: " + unk) } } def esFilter(queryMap: Map[String, Any]): FilterBuilder = { val filters = queryMap.map { // { key: { $op1: val, $op2: val } } ==> case (key: String, value: Map[_, _]) => { if( value.size > 1 ) { FilterBuilders.andFilter( value.asInstanceOf[Map[String,Any]].map( kv => esFilter( Map(key -> (kv._1, kv._2)) )).toSeq:_* ) } else { val kv = value.asInstanceOf[Map[String,Any]].head esFilter( Map(key -> (kv._1, kv._2)) ) } } // { $or: [ {key: value}, {key: value} ] } case ("$or", value: Seq[_]) => { val filters = value.asInstanceOf[Seq[Map[String,Any]]].map(esFilter(_)) FilterBuilders.orFilter(filters:_*) } // { $and: [ {key: value}, {key: value} ] } case ("$and", value: Seq[_]) => { val filters = value.asInstanceOf[Seq[Map[String,Any]]].map(esFilter(_)) FilterBuilders.andFilter(filters:_*) } // { key1: { $op1: val }, key2: { $op2: val } } case (key: String, (op: String, value: Any)) => esFilterOp(key, value, op) case (key: String, value: Any) => esFilterOp(key,value,"$eq") case (key: String, null) => esFilterOp(key,null,"$eq") } toSeq if( queryMap.size > 1 ) { FilterBuilders.andFilter(filters.toSeq:_*) } else filters.head } def esQuery(queryMap: Map[String, Any]): QueryBuilder = { if( queryMap.isEmpty ) QueryBuilders.matchAllQuery else QueryBuilders.constantScoreQuery(esFilter(queryMap)) } var clients: Map[String,Client] = Map(); def initClient(name: String): Client = { this.synchronized { val cluster = Utils.getProperty("edda", "elasticsearch.cluster", name, "elasticsearch").get; val addresses = Utils.getProperty("edda", "elasticsearch.address", name, "127.0.0.1:9300").get; if( ! clients.contains(cluster + "-" + addresses) ) { val settings: Settings = ImmutableSettings.settingsBuilder().put("cluster.name", cluster).build() clients += cluster + "-" + addresses -> addresses.split(',').fold(new TransportClient(settings))( (client, addr) => { val parts = addr.asInstanceOf[String].split(':') client.asInstanceOf[TransportClient].addTransportAddress(new InetSocketTransportAddress(parts.head, parts.tail.head.toInt)) } ).asInstanceOf[Client] } clients(cluster + "-" + addresses) } } def createIndex(client: Client, name: String, shards: Int, replicas: Int) { val ixClient = client.admin().indices() if( ! ixClient.prepareExists(name).execute().actionGet().isExists() ) { val settings = ImmutableSettings.settingsBuilder(). put("index.number_of_shards", shards). put("index.number_of_replicas",replicas). build() try { ixClient.prepareCreate(name). setSettings(settings). addMapping("_default_", io.Source.fromInputStream(getClass.getResourceAsStream("/elasticsearch/mappings/_default_.json")).mkString). execute. actionGet } catch { case e: org.elasticsearch.indices.IndexAlreadyExistsException => Unit // someone already beat us to it, ignore this } } } } /** [[com.netflix.edda.Datastore]] subclass that allows ElasticSearch to be used * * @param name the name of the collection the datastore is for */ class ElasticSearchDatastore(val name: String) extends Datastore { import Collection.RetentionPolicy._ import ElasticSearchDatastore._ lazy val client = initClient(name) private[this] val logger = LoggerFactory.getLogger(getClass) private val lowerName = name.toLowerCase private val aliasName = lowerName.replaceAll("[.]", "_"); private val liveAliasName = aliasName + "_live" private val writeAliasName = aliasName + "_write" private val docType = lowerName.split('.').takeRight(2).mkString("_") private lazy val monitorIndexName = Utils.getProperty("edda", "monitor.collectionName", "elasticsearch", "sys.monitor").get.replaceAll("[.]","_") private lazy val retentionPolicy = Utils.getProperty("edda.collection", "retentionPolicy", name, "ALL") private val writeConsistencyProp = Utils.getProperty("edda", "elasticsearch.writeConsistency", name, "quorum") private def writeConsistency = WriteConsistencyLevel.fromString( writeConsistencyProp.get ) private val replicationTypeProp = Utils.getProperty("edda", "elasticsearch.replicationType", name, "async") private def replicationType = ReplicationType.fromString( replicationTypeProp.get ) private lazy val scanBatchSize = Utils.getProperty("edda", "elasticsearch.scanBatchSize", name, "1000"); private lazy val scanCursorDuration = Utils.getProperty("edda", "elasticsearch.scanCursorDuration", name, "60000"); private lazy val bulkBatchSize = Utils.getProperty("edda", "elasticsearch.bulkBatchSize", name, "0"); def init() { // we create 1 index for each account. We version the index (.1) in case we need // to add other indexes in the future (in case we run out of room with the first // indexes) val nameParts = lowerName.split('.') val indexName = if( nameParts.size == 2 ) "edda_1" else { Utils.getProperty("edda", "elasticsearch.index", name, nameParts.take(nameParts.size - 2).mkString("_") + "_1").get } createIndex( client, indexName, Utils.getProperty("edda", "elasticsearch.shards", name, "15").get.toInt, Utils.getProperty("edda", "elasticsearch.replicas", name, "2").get.toInt ) val ixClient = client.admin().indices() val mapping = io.Source.fromInputStream(getClass.getResourceAsStream("/elasticsearch/mappings/default.json")).mkString ixClient.preparePutMapping(indexName).setType(docType).setSource("{\\""+docType+"\\": " + mapping + "}").setIgnoreConflicts(true).execute.actionGet // put new mapping in case it has changed // make sure collection alias exists if( ! ixClient.prepareExists(aliasName).execute().actionGet().isExists() ) { ixClient.prepareAliases().addAlias(indexName, aliasName, FilterBuilders.typeFilter(docType)).execute.actionGet } // make sure live alias exists if( ! ixClient.prepareExists(liveAliasName).execute().actionGet().isExists() ) { ixClient.prepareAliases().addAlias( indexName, liveAliasName, FilterBuilders.andFilter( FilterBuilders.typeFilter(docType), FilterBuilders.missingFilter("ltime").nullValue(true).existence(true) ) ).execute.actionGet } // make sure the write alias exists if( ! ixClient.prepareExists(writeAliasName).execute().actionGet().isExists() ) { ixClient.prepareAliases().addAlias(indexName, writeAliasName).execute.actionGet } } /** perform query on data store, see [[com.netflix.edda.Queryable.query()]] */ def query(queryMap: Map[String, Any], limit: Int, keys: Set[String], replicaOk: Boolean)(implicit req: RequestId): Seq[Record] = { // if query is for "null" ltime, then use the .live index alias val (alias, query) = if( queryMap.contains("ltime") && queryMap("ltime") == null ) { (liveAliasName, queryMap - "ltime") } else { (aliasName, queryMap) } val idQuery = keys.size == 1 && keys.contains("id") val builder = if( idQuery ) { val facet = FacetBuilders.termsFacet("f").field("id").size(1000000).facetFilter(esFilter(query)).order(TermsFacet.ComparatorType.TERM) client.prepareSearch().setIndices(alias).setSize(0).addFacet(facet) } else { client.prepareSearch().setIndices(alias).setQuery(esQuery(query)) } queryMap.get("id") match { case Some(id: String) => builder.setRouting(id) case _ => } if( !replicaOk ) builder.setPreference("_primary") if( limit > 0 || idQuery ) fetch(builder, limit, keys) else scan(builder, keys) } /** load records from data store, used at Collection start-up to prime in-memory cache and to refresh * in-memory cache when we are not the leader * * @param replicaOk specify if we can load from a read-replica in the data store when there are * redundant systems running for high-availability. */ def load(replicaOk: Boolean)(implicit req: RequestId): RecordSet = { val builder = client.prepareSearch().setIndices(liveAliasName) if( !replicaOk ) builder.setPreference("_primary") RecordSet(scan(builder), Map("mtime" -> collectionModified() )) } def fetch(search: SearchRequestBuilder, limit: Int, keys: Set[String])(implicit req: RequestId): Seq[Record] = { import collection.JavaConverters.iterableAsScalaIterableConverter import org.elasticsearch.action.search.SearchResponse import org.elasticsearch.action.search.SearchType val t0 = System.nanoTime() try { val idQuery = keys.size == 1 && keys.contains("id") val builder = search.setTypes(docType).setSearchType(SearchType.DFS_QUERY_THEN_FETCH); if( !idQuery ) { builder.addSort("stime", SortOrder.DESC).setFrom(0).setSize(limit) } // add fields, but only 2 deep, beyond that we cannot infer the document structure from the response if( keys.size > 0 ) builder.addFields((keys + "stime").map(s => s.split('.').take(2).mkString(".")).toSet.toSeq:_*) if (logger.isDebugEnabled) { val ix = builder.request.indices.mkString(",") logger.debug(s"$req [$ix] fetch: $builder") } val searchResp = builder.execute().actionGet(); if( searchResp.getFailedShards() > 0 ) { val failures = searchResp.getShardFailures() failures.foreach( failure => { val ix = builder.request.indices.mkString(",") logger.error(s"$req [$ix] search shard failure on ${failure.shardId} reason: ${failure.reason} on query: $builder") }) throw new java.lang.RuntimeException(builder.request.indices.mkString(",") + " shard failures") } if( idQuery ) { val now = DateTime.now searchResp.getFacets().facet[TermsFacet]("f").getEntries().asScala.map(f => f.getTerm().toString()).toSet.map( (id: String) => Record(id,null) ).toSeq } else { searchResp.getHits().asScala.map(r => { try esToRecord(if(keys.size > 0) esFieldsFixup(r.getFields) else r.getSource) catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req$this failed to parse record: ${r.getSource}", e) throw e } } }).toSeq.sortWith((a, b) => a.stime.isAfter(b.stime)) } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this fetch lapse: ${lapse}ms") } } def scan( search: SearchRequestBuilder, keys: Set[String] = Set())(implicit req: RequestId): Seq[Record] = { import collection.JavaConverters.iterableAsScalaIterableConverter import org.elasticsearch.action.search.SearchResponse import org.elasticsearch.action.search.SearchType import org.elasticsearch.common.unit.TimeValue val t0 = System.nanoTime() val builder = search. setTypes(docType). setSearchType(SearchType.SCAN). setScroll(new TimeValue(scanCursorDuration.get.toInt)). setSize(scanBatchSize.get.toInt) // add fields, but only 2 deep, beyond that we cannot infer the document structure from the response if( keys.size > 0 ) builder.addFields((keys + "stime").map(s => s.split('.').take(2).mkString(".")).toSet.toSeq:_*) if (logger.isDebugEnabled) { val ix = builder.request.indices.mkString(",") logger.debug(s"$req [$ix] scan: $builder") } var scrollResp = builder.execute().actionGet() var keepLooping = true var seq: Seq[Record] = Seq() try { while (keepLooping) { scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(scanCursorDuration.get.toInt)).execute().actionGet() // get shard failures if( scrollResp.getFailedShards() > 0 ) { val failures = scrollResp.getShardFailures() failures.foreach( failure => { val ix = builder.request.indices.mkString(",") logger.error(s"$req [$ix] search shard failure on ${failure.shardId} reason: ${failure.reason} on query: $builder") }) throw new java.lang.RuntimeException(builder.request.indices.mkString(",") + " shard failures") } // check for timeout if( scrollResp.isTimedOut ) { val ix = builder.request.indices.mkString(",") logger.error(s"$req $ix search timed out on query: $builder") throw new java.util.concurrent.TimeoutException(builder.request.indices.mkString(",") + " scanning query failed after " + scanCursorDuration.get + "ms timeout") } seq = seq ++ scrollResp.getHits().asScala.map(r => { try esToRecord(if(keys.size > 0) esFieldsFixup(r.getFields) else r.getSource) catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req$this failed to parse record: ${r.getSource}", e) throw e } } }) //Break condition: No hits are returned if (scrollResp.getHits().hits().length == 0) { keepLooping = false } } seq.sortWith((a, b) => a.stime.isAfter(b.stime)) } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this scan lapse: ${lapse}ms") } } /** make changes to the data store depending on the Collection delta found after a Crawl result */ def update(d: Collection.Delta)(implicit req: RequestId): Collection.Delta = { var toRemove: Seq[Record] = Seq(); val records = d.removed ++ d.added ++ d.changed.flatMap( pair => { // only update oldRecord if the stime is changed, this allows // for inplace updates when we dont want to create new document // revision, but still want the record updated if (pair.oldRecord.stime == pair.newRecord.stime) { Seq(pair.newRecord) } else if (Collection.RetentionPolicy.withName(retentionPolicy.get) == LAST) { toRemove = pair.oldRecord +: toRemove Seq(pair.newRecord) } else { Seq(pair.oldRecord, pair.newRecord) } }) if( Collection.RetentionPolicy.withName(retentionPolicy.get) == LIVE ) { val purge = records.filter(_.ltime != null) val updating = records.filter(_.ltime == null) upsert(updating) remove(purge ++ toRemove) } else { upsert(records) remove(toRemove) } markCollectionModified d } def collectionModified()(implicit req: RequestId): DateTime = { // if query is for "null" ltime, then use the .live index alias val t0 = System.nanoTime() try { val response = client.prepareGet(monitorIndexName, "collection_mark", name).execute().actionGet() if( response == null || !response.isExists ) DateTime.now else { esToRecord(response.getSource).mtime } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this get collection_mark: ${lapse}ms") } } def markCollectionModified()(implicit req: RequestId) = { val markRec = Record(name, Map("updated" -> DateTime.now, "id" -> name, "type" -> "collection")) val t0 = System.nanoTime() try { client.prepareIndex(monitorIndexName, "collection_mark"). setId(markRec.id). setSource(esToJson(markRec)). setConsistencyLevel(writeConsistency). setReplicationType(replicationType). execute(). actionGet(); } catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req failed to index record: $markRec", e) throw e } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this update collection_mark: ${lapse}ms") } } protected def upsert(record: Record)(implicit req: RequestId) { val t0 = System.nanoTime() try { client.prepareIndex(writeAliasName, docType). setId(record.toId()). setRouting(record.id). setSource(esToJson(record)). setConsistencyLevel(writeConsistency). setReplicationType(replicationType). execute(). actionGet(); } catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req failed to index record: $record", e) throw e } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this upsert: ${lapse}ms") } } protected def upsert(records: Seq[Record])(implicit req: RequestId) { if( records.size <= 0 ) return val t0 = System.nanoTime() try { val iter: Iterator[Seq[Record]] = if(bulkBatchSize.get.toInt > 0) records.sliding(bulkBatchSize.get.toInt) else List(records).iterator iter.foreach( recs => { val bulk = client.prepareBulk recs.foreach( rec => { bulk.add( client.prepareIndex(writeAliasName, docType). setId(rec.id + "|" + rec.stime.getMillis). setRouting(rec.id). setSource(esToJson(rec)). setConsistencyLevel(writeConsistency). setReplicationType(replicationType) ) }) val response = bulk.execute.actionGet if( response.hasFailures() ) { val err = this + " failed to bulk index: " + response.buildFailureMessage() if (logger.isErrorEnabled) logger.error(s"$req $err") throw new java.lang.RuntimeException(err) } }) } catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req failed to bulk index records", e) } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this bulk upsert lapse: ${lapse}ms") } } protected def remove(record: Record)(implicit req: RequestId) { val t0 = System.nanoTime() try { val response = client.prepareDelete(writeAliasName, docType, record.toId()). setRouting(record.id). execute(). actionGet(); if( response.isNotFound() ) { logger.error(s"$req$this failed to delete '${record.toId()}': Not Found") } } catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req failed to delete record: $record", e) throw e } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this remove lapse: ${lapse}ms") } } override def remove(queryMap: Map[String, Any])(implicit req: RequestId) { val t0 = System.nanoTime() try { val response = client.prepareDeleteByQuery(writeAliasName). setTypes(docType). setQuery(esQuery(queryMap)). execute(). actionGet() // FIXME need to upgrade elasticsearch so that DeleteByQueryResponse has status() member // if( response.status() != RestStatus.OK ) { // val err = this + " failed to delete with query " + queryMap.toString // if (logger.isErrorEnabled) logger.error(s"$req $err") // throw new java.lang.RuntimeException(err) // } } catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req failed to delete records: $queryMap", e) throw e } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this remove by query lapse: ${lapse}ms") } } protected def remove(records: Seq[Record])(implicit req: RequestId) { if( records.size <= 0 ) return val t0 = System.nanoTime() try { val iter: Iterator[Seq[Record]] = if(bulkBatchSize.get.toInt > 0) records.sliding(bulkBatchSize.get.toInt) else List(records).iterator iter.foreach( recs => { val bulk = client.prepareBulk recs.foreach( rec => { bulk.add( client.prepareDelete(writeAliasName, docType, rec.id + "|" + rec.stime.getMillis).setRouting(rec.id) ) }) val response = bulk.execute.actionGet if( response.hasFailures() ) { if (logger.isErrorEnabled) logger.error(s"$req$this failed to bulk delete: ${response.buildFailureMessage()}") } }) } catch { case e: Exception => { if (logger.isErrorEnabled) logger.error(s"$req failed to bulk index records", e) } } finally { val t1 = System.nanoTime() val lapse = (t1 - t0) / 1000000; if (logger.isInfoEnabled) logger.info(s"$req$this bulk remove lapse: ${lapse}ms") } } override def toString = "[ElasticSearchDatastore " + name + "]" }
gitlon/edda
src/main/scala/com/netflix/edda/elasticsearch/ElasticSearchDatastore.scala
Scala
apache-2.0
29,256
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.tools import org.scalatest.{FunSuite, Resources, Retries, OptionValues} import sbt.testing.{Framework => _, _} import org.scalatest.SharedHelpers.{EventRecordingReporter, createTempDirectory} import org.scalatest.exceptions.NotAllowedException import org.scalatest.tagobjects.Retryable import java.io.File import Retries._ import OptionValues._ class FrameworkSuite extends FunSuite { override def withFixture(test: NoArgTest) = { if (isRetryable(test)) withRetry { super.withFixture(test) } else super.withFixture(test) } class TestEventHandler extends EventHandler { private var errorEvents = List[Event]() private var failureEvents = List[Event]() private var skippedEvents = List[Event]() private var successEvents = List[Event]() private var ignoredEvents = List[Event]() private var pendingEvents = List[Event]() private var canceledEvents = List[Event]() override def handle(event: Event): Unit = { event.status match { case Status.Success => successEvents ::= event case Status.Error => errorEvents ::= event case Status.Failure => failureEvents ::= event case Status.Skipped => skippedEvents ::= event case Status.Ignored => ignoredEvents ::= event case Status.Pending => pendingEvents ::= event case Status.Canceled => canceledEvents ::= event } } def errorEventsReceived = errorEvents.reverse def failureEventsReceived = failureEvents.reverse def skippedEventsReceived = skippedEvents.reverse def successEventsReceived = successEvents.reverse def ignoredEventsReceived = ignoredEvents.reverse def pendingEventsReceived = pendingEvents.reverse def canceledEventsReceived = canceledEvents.reverse } class TestLogger extends Logger { private var errorList = List[String]() private var warnList = List[String]() private var infoList = List[String]() private var debugList = List[String]() private var traceList = List[Throwable]() def ansiCodesSupported = false def error(msg: String): Unit = { errorList ::= msg } def warn(msg: String): Unit = { warnList ::= msg } def info(msg: String): Unit = { infoList ::= msg } def debug(msg: String): Unit = { debugList ::= msg } def trace(t: Throwable): Unit = { traceList ::= t } def errorReceived = errorList.reverse def warnReceived = warnList.reverse def infoReceived = infoList.reverse def debugReceived = debugList.reverse def traceReceived = traceList.reverse } test("framework name") { assert(new Framework().name === "ScalaTest") } test("fingerprints contains 2 test fingerprints, they are SubclassFingerprint for org.scalatest.Suite and AnnotatedFingerprint for org.scalatest.WrapWith") { val framework = new Framework val fingerprints = framework.fingerprints assert(fingerprints.size === 2) val testFingerprint = fingerprints(0).asInstanceOf[sbt.testing.SubclassFingerprint] assert(testFingerprint.isModule === false) assert(testFingerprint.superclassName === "org.scalatest.Suite") assert(testFingerprint.requireNoArgConstructor === true) val annotatedFingerprint = fingerprints(1).asInstanceOf[sbt.testing.AnnotatedFingerprint] assert(annotatedFingerprint.isModule === false) assert(annotatedFingerprint.annotationName === "org.scalatest.WrapWith") } val testClassLoader = getClass.getClassLoader val subClassFingerprint = new sbt.testing.SubclassFingerprint { def superclassName = "org.scalatest.Suite" def isModule = false def requireNoArgConstructor = true } val framework = new Framework val subclassFingerprint = new SubclassFingerprint { def superclassName = "org.scalatest.Suite" def isModule = false def requireNoArgConstructor = true } val annotatedFingerprint = new AnnotatedFingerprint { def annotationName = "org.scalatest.WrapWith" def isModule = false } def assertSuiteSuccessEvent(event: Event, suiteClassName: String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Success === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case testSelector: TestSelector => assert(testName === testSelector.testName) case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuiteSuccessEvent(event: Event, suiteClassName: String, suiteId:String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Success === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case nestedTestSelector: NestedTestSelector => assert(suiteId === nestedTestSelector.suiteId) assert(testName === nestedTestSelector.testName) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } def assertSuiteFailureEvent(event: Event, suiteClassName: String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Failure === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(event.throwable.isDefined) val selector = event.selector selector match { case testSelector: TestSelector => assert(testName === testSelector.testName) case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuiteFailureEvent(event: Event, suiteClassName: String, suiteId:String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Failure === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(event.throwable.isDefined) val selector = event.selector selector match { case nestedTestSelector: NestedTestSelector => assert(suiteId === nestedTestSelector.suiteId) assert(testName === nestedTestSelector.testName) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } def assertSuiteErrorEvent(event: Event, suiteClassName: String, fingerprint: Fingerprint): Unit = { assert(Status.Error === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(event.throwable.isDefined) val selector = event.selector selector match { case suiteSelector: SuiteSelector => // Nothing more to check, just make sure it is SuiteSelector. case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuiteErrorEvent(event: Event, suiteClassName: String, suiteId:String, fingerprint: Fingerprint): Unit = { assert(Status.Error === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(event.throwable.isDefined) val selector = event.selector selector match { case nestedSuiteSelector: NestedSuiteSelector => assert(suiteId === nestedSuiteSelector.suiteId) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } def assertSuiteSkippedEvent(event: Event, suiteClassName: String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Skipped === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case testSelector: TestSelector => assert(testName === testSelector.testName) case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertSuiteIgnoredEvent(event: Event, suiteClassName: String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Ignored === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration === -1) assert(!event.throwable.isDefined) val selector = event.selector selector match { case testSelector: TestSelector => assert(testName === testSelector.testName) case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertSuitePendingEvent(event: Event, suiteClassName: String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Pending === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case testSelector: TestSelector => assert(testName === testSelector.testName) case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertSuiteCanceledEvent(event: Event, suiteClassName: String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Canceled === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case testSelector: TestSelector => assert(testName === testSelector.testName) case _ => fail("Expected to get TestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuiteSkippedEvent(event: Event, suiteClassName: String, suiteId:String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Skipped === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case nestedTestSelector: NestedTestSelector => assert(suiteId === nestedTestSelector.suiteId) assert(testName === nestedTestSelector.testName) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuiteIgnoredEvent(event: Event, suiteClassName: String, suiteId:String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Ignored === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration === -1) assert(!event.throwable.isDefined) val selector = event.selector selector match { case nestedTestSelector: NestedTestSelector => assert(suiteId === nestedTestSelector.suiteId) assert(testName === nestedTestSelector.testName) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuitePendingEvent(event: Event, suiteClassName: String, suiteId:String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Pending === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case nestedTestSelector: NestedTestSelector => assert(suiteId === nestedTestSelector.suiteId) assert(testName === nestedTestSelector.testName) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } def assertNestedSuiteCanceledEvent(event: Event, suiteClassName: String, suiteId:String, testName: String, fingerprint: Fingerprint): Unit = { assert(Status.Canceled === event.status) assert(suiteClassName === event.fullyQualifiedName) assert(fingerprint === event.fingerprint) assert(event.duration >= 0) assert(!event.throwable.isDefined) val selector = event.selector selector match { case nestedTestSelector: NestedTestSelector => assert(suiteId === nestedTestSelector.suiteId) assert(testName === nestedTestSelector.testName) case _ => fail("Expected to get NestedTestSelector, but got: " + selector.getClass.getName) } } test("ScalaTestRunner.task should return task that run whole suite when fullyQualifiedName = valid class name, explicitlySpecified = false and selectors = Array(SuiteSelector)") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 3) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 2", subclassFingerprint) assertSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("ScalaTestRunner.task should return empty task array when fullyQualifiedName = valid class name, explicitlySpecified = false, selectors = Array(SuiteSelector)" + "and the suite class is marked as @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 0) } finally { runner.done() } } test("When suite is neither subclass of org.scalatest.Suite or annotated with WrapWith and explicitlySpecified is true, IllegalArgumentException will be thrown when task executes") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { intercept[IllegalArgumentException] { val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.NotASuite", subclassFingerprint, true, Array(new SuiteSelector)))) assert(tasks.size === 1) val notASuiteTask = tasks(0) notASuiteTask.execute(new TestEventHandler, Array(new TestLogger)) } } finally { runner.done() } } test("When suite is neither subclass of org.scalatest.Suite or annotated with WrapWith and explicitlySpecified is false, no task will be returned") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.NotASuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 0) } finally { runner.done() } } test("When an invalid suite class name is passed into to task(fullyQualifiedName: String, fingerprint: Fingerprint), IllegalArgumentException " + "will be thrown") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { intercept[IllegalArgumentException] { val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoesNotExist", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val doesNotExistTask = tasks(0) doesNotExistTask.execute(new TestEventHandler, Array(new TestLogger)) } } finally { runner.done() } } test("Nested suites will be executed in task(fullyQualifiedName: String, fingerprint: Fingerprint), no nested task will be returned") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 9) assertNestedSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 1", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 2", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 3", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(3), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 2", "nested 2 test 1", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(4), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 2", "nested 2 test 2", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(5), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 2", "nested 2 test 3", subclassFingerprint) assertSuiteSuccessEvent(successEvents(6), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(7), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "test 2", subclassFingerprint) assertSuiteSuccessEvent(successEvents(8), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) assert(nestedTasks.size === 0) } finally { runner.done() } } test("Ignore, pending, failed, canceled, suite aborted events should be translated and reported correctly for the suite and its nested suites") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size == 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length == 3) assertNestedSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 1", "nested 1 success", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 2", "nested 2 success", subclassFingerprint) assertSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "success", subclassFingerprint) val failureEvents = testEventHandler.failureEventsReceived assert(failureEvents.length == 3) assertNestedSuiteFailureEvent(failureEvents(0), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 1", "nested 1 failed", subclassFingerprint) assertNestedSuiteFailureEvent(failureEvents(1), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 2", "nested 2 failed", subclassFingerprint) assertSuiteFailureEvent(failureEvents(2), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "failed", subclassFingerprint) val errorEvents = testEventHandler.errorEventsReceived assert(errorEvents.length == 1) assertNestedSuiteErrorEvent(errorEvents(0), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 3", subclassFingerprint) val skippedEvents = testEventHandler.skippedEventsReceived assert(skippedEvents.length == 0) val ignoredEvents = testEventHandler.ignoredEventsReceived assert(ignoredEvents.length == 3) assertNestedSuiteIgnoredEvent(ignoredEvents(0), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 1", "nested 1 ignored", subclassFingerprint) assertNestedSuiteIgnoredEvent(ignoredEvents(1), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 2", "nested 2 ignored", subclassFingerprint) assertSuiteIgnoredEvent(ignoredEvents(2), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "ignored", subclassFingerprint) val pendingEvents = testEventHandler.pendingEventsReceived assert(pendingEvents.length == 3) assertNestedSuitePendingEvent(pendingEvents(0), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 1", "nested 1 pending", subclassFingerprint) assertNestedSuitePendingEvent(pendingEvents(1), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 2", "nested 2 pending", subclassFingerprint) assertSuitePendingEvent(pendingEvents(2), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "pending", subclassFingerprint) val canceledEvents = testEventHandler.canceledEventsReceived assert(canceledEvents.length == 3) assertNestedSuiteCanceledEvent(canceledEvents(0), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 1", "nested 1 canceled", subclassFingerprint) assertNestedSuiteCanceledEvent(canceledEvents(1), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "nested 2", "nested 2 canceled", subclassFingerprint) assertSuiteCanceledEvent(canceledEvents(2), "org.scalatest.tools.scalasbt.SuiteWithFailedSkippedTests", "canceled", subclassFingerprint) } finally { runner.done() } } test("SuiteSelector should select and run test(s) in selected suite") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector())))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 3) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 2", subclassFingerprint) assertSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) } finally { runner.done() } } test("SuiteSelector should select and run test(s) in selected suite when it is explicitly specified, even when the selected suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, true, Array(new SuiteSelector())))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 3) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 2", subclassFingerprint) assertSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) } finally { runner.done() } } test("TestSelector should select and run selected test(s) in suite, excluding nested suites") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new TestSelector("test 1"), new TestSelector("test 3"))), new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, false, Array(new TestSelector("test 2"))))) assert(tasks.size === 2) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) val testEventHandler2 = new TestEventHandler val task2 = tasks(1) val task2NestedSuites = task2.execute(testEventHandler2, Array(new TestLogger)) val successEvents2 = testEventHandler2.successEventsReceived assert(successEvents2.length === 1) assertSuiteSuccessEvent(successEvents2(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "test 2", subclassFingerprint) assert(testEventHandler2.errorEventsReceived.length === 0) assert(testEventHandler2.failureEventsReceived.length === 0) assert(testEventHandler2.skippedEventsReceived.length === 0) assert(task2NestedSuites.size == 0) } finally { runner.done() } } test("TestSelector should select and run selected test(s) in suite when it is explicitly specified, even when the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, true, Array(new TestSelector("test 1"), new TestSelector("test 3"))))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("TestSelector should not select and run selected test(s) in suite when it is not explicitly specified and the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, false, Array(new TestSelector("test 1"), new TestSelector("test 3"))))) assert(tasks.size === 0) } finally { runner.done() } } test("TestWildcardSelector should select and run selected test(s) using wildcard in suite, excluding nested suites") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new TestWildcardSelector("est 1"), new TestWildcardSelector("st 3"))), new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, false, Array(new TestWildcardSelector("t 2"))))) assert(tasks.size === 2) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) val testEventHandler2 = new TestEventHandler val task2 = tasks(1) val task2NestedSuites = task2.execute(testEventHandler2, Array(new TestLogger)) val successEvents2 = testEventHandler2.successEventsReceived assert(successEvents2.length === 1) assertSuiteSuccessEvent(successEvents2(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "test 2", subclassFingerprint) assert(testEventHandler2.errorEventsReceived.length === 0) assert(testEventHandler2.failureEventsReceived.length === 0) assert(testEventHandler2.skippedEventsReceived.length === 0) assert(task2NestedSuites.size == 0) } finally { runner.done() } } test("TestWildcardSelector should select and run selected test(s) in suite when it is explicitly specified, even when the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, true, Array(new TestWildcardSelector("st 1"), new TestWildcardSelector("est 3"))))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("TestWildcardSelector should not select and run selected test(s) in suite when it is not explicitly specified and the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, false, Array(new TestWildcardSelector("est 1"), new TestWildcardSelector("t 3"))))) assert(tasks.size === 0) } finally { runner.done() } } test("NestedSuiteSelector should select and run test(s) in selected nested suite when it is explicitly specified, even if the selected nested suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, true, Array(new NestedSuiteSelector("nested 1"))))) assert(tasks.size == 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size == 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length == 3) assertNestedSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 1", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 2", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("NestedSuiteSelector should select and run test(s) in selected nested suite when it is not explicitly specified, even if the selected nested suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, false, Array(new NestedSuiteSelector("nested 1"))))) assert(tasks.size === 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size === 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 3) assertNestedSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 1", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 2", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("NestedTestSelector should select and run selected test(s) in selected nested suite when it is explicitly specified, even if the selected nested suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, false, Array(new NestedTestSelector("nested 1", "nested 1 test 1"), new NestedTestSelector("nested 2", "nested 2 test 3"))))) assert(tasks.size == 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size == 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length == 2) assertNestedSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 1", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 2", "nested 2 test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length == 0) assert(testEventHandler.failureEventsReceived.length == 0) assert(testEventHandler.skippedEventsReceived.length == 0) } finally { runner.done() } } test("NestedTestSelector should select and run selected test(s) in selected nested suite when it is not explicitly specified, even if the selected nested suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithNestedSuites", subclassFingerprint, false, Array(new NestedTestSelector("nested 1", "nested 1 test 1"), new NestedTestSelector("nested 2", "nested 2 test 3"))))) assert(tasks.size === 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size === 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertNestedSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 1", "nested 1 test 1", subclassFingerprint) assertNestedSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SuiteWithNestedSuites", "nested 2", "nested 2 test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("ScalaTestRunner should return summary when 'done' is called, and throw IllegalStateException if 'done' method is called twice.") { val runner = framework.runner(Array("-oW"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector())))) assert(tasks.size === 1) val task = tasks(0) task.execute(new TestEventHandler, Array(testLogger)) val summaryText = runner.done.split("\\n") assert(summaryText.size === 5) assert(summaryText(0).startsWith("Run completed in ")) assert(summaryText(1) === "Total number of tests run: 3") assert(summaryText(2) === "Suites: completed 1, aborted 0") assert(summaryText(3) === "Tests: succeeded 3, failed 0, canceled 0, ignored 0, pending 0") assert(summaryText(4) === "All tests passed.") intercept[IllegalStateException] { runner.done() } } finally { try { // Just to make sure runner.done() has been called to avoid hanging thread runner.done() } catch { case _: IllegalStateException => // Do nothing } } } test("ScalaTestRunner using -oWI should return summary that contains failed and canceled test reminder when 'done' is called") { val runner = framework.runner(Array("-oWI"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithFailedCanceledTests", subclassFingerprint, false, Array(new SuiteSelector())))) assert(tasks.size === 1) val task = tasks(0) task.execute(new TestEventHandler, Array(testLogger)) val summaryText = runner.done.split("\\n") assert(summaryText.size === 13) assert(summaryText(0).startsWith("Run completed in ")) assert(summaryText(1) === "Total number of tests run: 2") assert(summaryText(2) === "Suites: completed 1, aborted 0") assert(summaryText(3) === "Tests: succeeded 1, failed 1, canceled 1, ignored 1, pending 1") assert(summaryText(4) === "*** 1 TEST FAILED ***") assert(summaryText(5) === "SuiteWithFailedCanceledTests:") assert(summaryText(6) === "") assert(summaryText(7) === "- failed *** FAILED ***") assert(summaryText(8) === " org.scalatest.exceptions.TestFailedException was thrown. (SuiteWithFailedCanceledTests.scala:24)") assert(summaryText(9) === "SuiteWithFailedCanceledTests:") assert(summaryText(10) === "") assert(summaryText(11) === "- canceled !!! CANCELED !!!") assert(summaryText(12) === " org.scalatest.exceptions.TestCanceledException was thrown. (SuiteWithFailedCanceledTests.scala:25)") } finally { try { // Just to make sure runner.done() has been called to avoid hanging thread runner.done() } catch { case _: IllegalStateException => // Do nothing } } } test("ScalaTestRunner using -oWIK should return summary that contains failed test reminder only (without canceled test) when 'done' is called") { val runner = framework.runner(Array("-oWIK"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SuiteWithFailedCanceledTests", subclassFingerprint, false, Array(new SuiteSelector())))) assert(tasks.size === 1) val task = tasks(0) task.execute(new TestEventHandler, Array(testLogger)) val summaryText = runner.done.split("\\n") assert(summaryText.size === 9) assert(summaryText(0).startsWith("Run completed in ")) assert(summaryText(1) === "Total number of tests run: 2") assert(summaryText(2) === "Suites: completed 1, aborted 0") assert(summaryText(3) === "Tests: succeeded 1, failed 1, canceled 1, ignored 1, pending 1") assert(summaryText(4) === "*** 1 TEST FAILED ***") assert(summaryText(5) === "SuiteWithFailedCanceledTests:") assert(summaryText(6) === "") assert(summaryText(7) === "- failed *** FAILED ***") assert(summaryText(8) === " org.scalatest.exceptions.TestFailedException was thrown. (SuiteWithFailedCanceledTests.scala:24)") } finally { try { // Just to make sure runner.done() has been called to avoid hanging thread runner.done() } catch { case _: IllegalStateException => // Do nothing } } } test("ScalaTest Task's tags method should return 'cpu' when suite class is annotated with @CPU") { val runner = framework.runner(Array("-oW"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.CPUTaggedSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val taskTags = task.tags assert(taskTags.size === 1) assert(taskTags(0) === "cpu") } finally { runner.done() } } test("ScalaTest Task's tags method should return 'network' when suite class is annotated with @Network") { val runner = framework.runner(Array("-oW"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.NetworkTaggedSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val taskTags = task.tags assert(taskTags.size === 1) assert(taskTags(0) === "network") } finally { runner.done() } } test("ScalaTest Task's tags method should return 'disk' when suite class is annotated with @Disk") { val runner = framework.runner(Array("-oW"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DiskTaggedSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val taskTags = task.tags assert(taskTags.size === 1) assert(taskTags(0) === "disk") } finally { runner.done() } } test("ScalaTest Task's tags method should return 'custom' when suite class is annotated with @TagAnnotation('custom')") { val runner = framework.runner(Array("-oW"), Array.empty, testClassLoader) try { val testLogger = new TestLogger val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.CustomTaggedSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val taskTags = task.tags assert(taskTags.size === 1) assert(taskTags(0) === "custom") } finally { runner.done() } } test("ScalaTest Task's taskDef method should return TaskDef that defines the task") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val suiteSelector = new SuiteSelector(); val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array.empty), new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, false, Array(suiteSelector)), new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, true, Array(suiteSelector)))) assert(tasks.length === 2) val task1 = tasks(0) val taskDef1 = task1.taskDef assert(taskDef1.fullyQualifiedName === "org.scalatest.tools.scalasbt.SampleSuite") assert(taskDef1.fingerprint === subclassFingerprint) assert(taskDef1.explicitlySpecified === false) assert(taskDef1.selectors.length === 0) val task2 = tasks(1) val taskDef2 = task2.taskDef assert(taskDef2.fullyQualifiedName === "org.scalatest.tools.scalasbt.DoNotDiscoverSuite") assert(taskDef2.fingerprint === subclassFingerprint) assert(taskDef2.explicitlySpecified === true) val task2Selectors = taskDef2.selectors assert(task2Selectors.length === 1) assert(task2Selectors(0) === suiteSelector) } finally { runner.done() } } test("-l argument can be used to exclude test") { val runner = framework.runner(Array("-l", "org.scalatest.tools.scalasbt.SampleSuite.SlowTest"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("-n argument can be used to include test") { val runner = framework.runner(Array("-n", "org.scalatest.tools.scalasbt.SampleSuite.SlowTest"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 1) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 2", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("-w should execute suites that match the specified package and its sub packages") { val runner = framework.runner(Array("-w", "org.scalatest.tools"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)), new TaskDef("org.scalatest.tools.FrameworkSuite", subclassFingerprint, false, Array(new SuiteSelector)), new TaskDef("org.scalatest.SuiteSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 2) } finally { runner.done() } } test("-m should execute suites that match the specified package and not its sub packages") { val runner = framework.runner(Array("-m", "org.scalatest.tools"), Array.empty, testClassLoader) try { val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)), new TaskDef("org.scalatest.tools.FrameworkSuite", subclassFingerprint, false, Array(new SuiteSelector)), new TaskDef("org.scalatest.SuiteSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val runner2 = framework.runner(Array("-m", "org.scalatest.concurrent"), Array.empty, testClassLoader) val tasks2 = runner2.tasks(Array(new TaskDef("org.scalatest.enablers.NoParamSpec", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks2.size === 0) } finally { runner.done() } } // Now in 0.13.0-RC4 when there are 2 TaskDef with same class name different fingerprint, only one of it will be passed in. // We can't rely on fingerprint for this check anymore. /*test("a suite should be filtered out when fingerprint is subclassFingerprint and it is not accessible, even though it is annotated with @WrapWith") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) val tasks = runner.tasks(Array(new TaskDef("org.scalatest.SavesConfigMapSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 0) }*/ test("Framework.runner should throw IllegalArgumentException when -s is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-s", "org.scalatest.tools.scalasbt.SampleSuite"), Array.empty, testClassLoader) } assert(iae.getMessage === "Specifying a suite (-s <suite>) or nested suite (-i <nested suite>) is not supported when running ScalaTest from sbt; Please use sbt's test-only instead.") } test("Framework.runner should throw IllegalArgumentException when -j is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-j", "org.scalatest.tools.scalasbt.SampleJUnitSuite"), Array.empty, testClassLoader) } assert(iae.getMessage === "Running JUnit tests (-j <junit>) is not supported when running ScalaTest from sbt.") } test("Framework.runner should throw IllegalArgumentException when -b is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-b", "org.scalatest.tools.scalasbt.SampleSuite"), Array.empty, testClassLoader) } assert(iae.getMessage === "Running TestNG tests (-b <testng>) is not supported when running ScalaTest from sbt.") } test("Framework.runner should throw IllegalArgumentException when -P is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-P"), Array.empty, testClassLoader) } assert(iae.getMessage === "-P without specifying <numthreads> is not supported when running ScalaTest from sbt, please use sbt parallel configuration instead.") } test("Framework.runner accept without problem when -P 4 is passed in") { framework.runner(Array("-P4"), Array.empty, testClassLoader) } test("Framework.runner should throw IllegalArgumentException when -P0 is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-P0"), Array.empty, testClassLoader) } assert(iae.getMessage === "-P with negative or zero thread number is invalid, please pass in a positive thread number instead.") } test("Framework.runner should throw IllegalArgumentException when -P-1 is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-P-1"), Array.empty, testClassLoader) } assert(iae.getMessage === "-P with negative or zero thread number is invalid, please pass in a positive thread number instead.") } test("Framework.runner should throw IllegalArgumentException when -PS is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-PS"), Array.empty, testClassLoader) } assert(iae.getMessage === "-PS is not supported when running ScalaTest from sbt, please use sbt parallel and logBuffered configuration instead.") } test("Framework.runner should throw IllegalArgumentException when -R is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-R"), Array.empty, testClassLoader) } assert(iae.getMessage === "Specifying a runpath (-R <runpath>) is not supported when running ScalaTest from sbt.") } test("Framework.runner should throw IllegalArgumentException when -A is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-A", "again.txt"), Array.empty, testClassLoader) } assert(iae.getMessage === "Run again (-A) is not supported when running ScalaTest from sbt; Please use sbt's test-quick instead.") } test("Framework.runner should throw IllegalArgumentException when -q is passed in") { val iae = intercept[IllegalArgumentException] { framework.runner(Array("-q", "Spec"), Array.empty, testClassLoader) } assert(iae.getMessage === "Discovery suffixes (-q) is not supported when running ScalaTest from sbt; Please use sbt's test-only or test filter instead.") } test("Framework.runner should be able to pass in test sorting timeout with -T") { framework.runner(Array("-T", "100"), Array.empty, testClassLoader) } private def makeSureDone(runners: Runner*)(fun: => Unit): Unit = { try { fun } finally { runners.foreach { r => try { r.done() } catch { case e: Throwable => // Just do nothing } } } } test("Framework.runner should be able to pass in custom reporter via -C", Retryable) { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 3) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("-y should do nothing when the task to execute is a chosen style", Retryable) { val runner = framework.runner(Array("-y", "org.scalatest.funsuite.AnyFunSuite", "-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 3) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 3) assert(recordingRep.suiteCompletedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("-y should get SuiteAborted event with NotAllowedException when the task to execute is not a chosen style") { val runner = framework.runner(Array("-y", "org.scalatest.funspec.AnyFunSpec", "-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 0) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 0) val suiteAbortedEvents = recordingRep.suiteAbortedEventsReceived assert(suiteAbortedEvents.size === 1) suiteAbortedEvents(0).throwable match { case Some(e: NotAllowedException) => assert(e.getMessage === Resources.notTheChosenStyle("org.scalatest.funsuite.AnyFunSuite", "org.scalatest.funspec.AnyFunSpec")) case _ => fail("Expected SuiteAborted to carry NotAllowedException, but it did not.") } case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("should fire SuiteAborted event when after function in BeforeAndAfter throws RuntimeException") { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.FaulthyBeforeAndAfterSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 1) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 1) assert(recordingRep.suiteCompletedEventsReceived.size === 0) assert(recordingRep.suiteAbortedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("should fire SuiteAborted event when afterAll function in BeforeAndAfterAll throws RuntimeException") { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.FaulthyBeforeAndAfterAllSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 1) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 1) assert(recordingRep.suiteCompletedEventsReceived.size === 0) assert(recordingRep.suiteAbortedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("should fire SuiteAborted event when afterAll function in BeforeAndAfterAllConfigMap throws RuntimeException") { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.FaulthyBeforeAndAfterAllConfigMapSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 1) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 1) assert(recordingRep.suiteCompletedEventsReceived.size === 0) assert(recordingRep.suiteAbortedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("should fire SuiteAborted event when afterEach function in BeforeAndAfterEach throws RuntimeException") { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.FaulthyBeforeAndAfterEachSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 1) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 1) assert(recordingRep.suiteCompletedEventsReceived.size === 0) assert(recordingRep.suiteAbortedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("should fire SuiteAborted event when afterEach function in BeforeAndAfterEachTestData throws RuntimeException") { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.FaulthyBeforeAndAfterEachTestDataSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 1) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 1) assert(recordingRep.suiteCompletedEventsReceived.size === 0) assert(recordingRep.suiteAbortedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("-W should cause AlertProvided to be fired") { val runner = framework.runner(Array("-W", "1", "1", "-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SlowSampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) assert(testEventHandler.successEventsReceived.size === 1) assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 1) assert(recordingRep.alertProvidedEventsReceived.size > 0) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("Framework should work correctly with fork mode", Retryable) { val mainRunner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) val remoteArgs = mainRunner.remoteArgs() val subRunner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), remoteArgs, testClassLoader) makeSureDone(mainRunner, subRunner) { val testEventHandler = new TestEventHandler val tasks = subRunner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 3) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 2", subclassFingerprint) assertSuiteSuccessEvent(successEvents(2), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) assert(mainRunner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val mainScalatestRunner = mainRunner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] assert(subRunner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val subScalatestRunner = subRunner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] subScalatestRunner.done() mainScalatestRunner.done() mainScalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.testSucceededEventsReceived.size === 3) assert(recordingRep.alertProvidedEventsReceived.size === 1) assert(recordingRep.noteProvidedEventsReceived.size === 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } test("SuitedAbored fired from nested suite should be reported as error correctly") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.AbortedSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size == 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 0) val failureEvents = testEventHandler.failureEventsReceived assert(failureEvents.length === 0) val errorEvents = testEventHandler.errorEventsReceived assert(errorEvents.length === 1) val skippedEvents = testEventHandler.skippedEventsReceived assert(skippedEvents.length === 0) val ignoredEvents = testEventHandler.ignoredEventsReceived assert(ignoredEvents.length === 0) val pendingEvents = testEventHandler.pendingEventsReceived assert(pendingEvents.length === 0) val canceledEvents = testEventHandler.canceledEventsReceived assert(canceledEvents.length === 0) } finally { runner.done() } } test("Nested suite should use passed-in args.configMap") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.NestedConfigMapSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size == 0) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 1) val failureEvents = testEventHandler.failureEventsReceived assert(failureEvents.length === 0) val errorEvents = testEventHandler.errorEventsReceived assert(errorEvents.length === 0) val skippedEvents = testEventHandler.skippedEventsReceived assert(skippedEvents.length === 0) val ignoredEvents = testEventHandler.ignoredEventsReceived assert(ignoredEvents.length === 0) val pendingEvents = testEventHandler.pendingEventsReceived assert(pendingEvents.length === 0) val canceledEvents = testEventHandler.canceledEventsReceived assert(canceledEvents.length === 0) } finally { runner.done() } } test("should accept -z and added it as TestWildcardSelector, which then select and run selected test(s) using wildcard in suite, excluding nested suites") { val runner = framework.runner(Array("-z", "est 1", "-z", "st 3"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array.empty))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("should accept -z and added it as TestWildcardSelector, which then select and run selected test(s) in suite when it is explicitly specified, even when the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array("-z", "st 1", "-z", "est 3"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, true, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("should accept -z and added it as TestWildcardSelector, which then do not select and run selected test(s) in suite when it is not explicitly specified and the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array("-z", "est 1", "-z", "t 3"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 0) } finally { runner.done() } } // ####### test("should accept -t and added it as TestSelector, which then select and run selected test(s) in suite, excluding nested suites") { val runner = framework.runner(Array("-t", "test 1", "-t", "test 3"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array.empty))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.SampleSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.SampleSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("should accept -t and added it as TestSelector, which then select and run selected test(s) in suite when it is explicitly specified, even when the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array("-t", "test 1", "-t", "test 3"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, true, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) task.execute(testEventHandler, Array(new TestLogger)) val successEvents = testEventHandler.successEventsReceived assert(successEvents.length === 2) assertSuiteSuccessEvent(successEvents(0), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 1", subclassFingerprint) assertSuiteSuccessEvent(successEvents(1), "org.scalatest.tools.scalasbt.DoNotDiscoverSuite", "test 3", subclassFingerprint) assert(testEventHandler.errorEventsReceived.length === 0) assert(testEventHandler.failureEventsReceived.length === 0) assert(testEventHandler.skippedEventsReceived.length === 0) } finally { runner.done() } } test("should accept -t and added it as TestSelector, which then do not select and run selected test(s) in suite when it is not explicitly specified and the suite is annotated with @DoNotDiscover") { val runner = framework.runner(Array("-t", "test 1", "-t", "test 3"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.DoNotDiscoverSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 0) } finally { runner.done() } } test("ScalaTestRunner.task should return task and its nested task that tags method returns collection that contains tag inherited from superclass") { val runner = framework.runner(Array.empty, Array.empty, testClassLoader) try { val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleInheritedSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val resultTags = task.tags assert(resultTags.contains("cpu")) val testEventHandler = new TestEventHandler val nestedTasks = task.execute(testEventHandler, Array(new TestLogger)) assert(nestedTasks.size == 0) } finally { runner.done() } } test("Framework.runner should use passed in -o config in sub-process") { val mainRunner = framework.runner(Array("-oDF"), Array.empty, testClassLoader) makeSureDone(mainRunner) { val remoteArgs = mainRunner.remoteArgs val subRunner = framework.runner(Array("-oDF"), remoteArgs, testClassLoader) makeSureDone(subRunner) { assert(subRunner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = subRunner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] assert(scalatestRunner.useSbtLogInfoReporter) assert(scalatestRunner.presentAllDurations) assert(scalatestRunner.presentInColor) assert(scalatestRunner.presentShortStackTraces) assert(scalatestRunner.presentFullStackTraces) assert(!scalatestRunner.presentUnformatted) assert(!scalatestRunner.presentReminder) assert(!scalatestRunner.presentReminderWithShortStackTraces) assert(!scalatestRunner.presentReminderWithFullStackTraces) assert(!scalatestRunner.presentReminderWithoutCanceledTests) } } } test("Framework.runner should use the first -o when multiple -o is passed in") { val runner = framework.runner(Array("-oW", "-oDF"), Array.empty, testClassLoader) makeSureDone(runner) { assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] assert(scalatestRunner.useSbtLogInfoReporter) assert(!scalatestRunner.presentAllDurations) assert(!scalatestRunner.presentInColor) assert(!scalatestRunner.presentShortStackTraces) assert(!scalatestRunner.presentFullStackTraces) assert(!scalatestRunner.presentUnformatted) assert(!scalatestRunner.presentReminder) assert(!scalatestRunner.presentReminderWithShortStackTraces) assert(!scalatestRunner.presentReminderWithFullStackTraces) assert(!scalatestRunner.presentReminderWithoutCanceledTests) } } test("Framework.runner should use the first -o when multiple -e is passed in") { val runner = framework.runner(Array("-eW", "-eDF"), Array.empty, testClassLoader) makeSureDone(runner) { assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] val repConfig = scalatestRunner.repConfig val configSet = repConfig.standardErrReporterConfiguration.value.configSet assert(configSet.size == 1) assert(configSet.head == PresentWithoutColor) } } test("Framework.runner should not print out test succeeded event when -oC is passed") { val runner = framework.runner(Array("-oC"), Array.empty, testClassLoader) try { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.SampleSuite", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) val logger = new TestLogger task.execute(testEventHandler, Array(logger)) assert(logger.infoReceived.length == 3) assert(logger.infoReceived(0) == "SampleSuite:") assert(logger.infoReceived(1) == " + This is an alert! ") assert(logger.infoReceived(2) == " + This is an update! ") } finally { runner.done() } } test("Framework.runner should fire SuiteStarting and SuiteAborted when error raised during construction") { val runner = framework.runner(Array("-C", classOf[EventRecordingReporter].getName), Array.empty, testClassLoader) makeSureDone(runner) { val testEventHandler = new TestEventHandler val tasks = runner.tasks(Array(new TaskDef("org.scalatest.tools.scalasbt.AbortedSuite2", subclassFingerprint, false, Array(new SuiteSelector)))) assert(tasks.size === 1) val task = tasks(0) intercept[VirtualMachineError] { task.execute(testEventHandler, Array(new TestLogger)) } assert(runner.isInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner]) val scalatestRunner = runner.asInstanceOf[org.scalatest.tools.Framework#ScalaTestRunner] scalatestRunner.done() scalatestRunner.dispatchReporter.reporters.find(_.isInstanceOf[EventRecordingReporter]) match { case Some(recordingRep : EventRecordingReporter) => assert(recordingRep.suiteStartingEventsReceived.size == 1) assert(recordingRep.suiteAbortedEventsReceived.size == 1) case _ => fail("Expected to find EventRecordingReporter, but not found.") } } } }
dotty-staging/scalatest
scalatest-test/src/test/scala/org/scalatest/tools/FrameworkSuite.scala
Scala
apache-2.0
85,238
package com.catinthedark.ld36.network import java.util.concurrent.ConcurrentLinkedQueue import com.badlogic.gdx.math.Vector2 import com.catinthedark.common.Const import com.catinthedark.lib.Pipe import com.catinthedark.lib.network.Message import com.catinthedark.models._ trait NetworkControl extends Runnable { var isConnected: Option[Unit] = None var isMain: Boolean = false val onMovePipe = new Pipe[(Vector2, Float, Boolean)]() val onShootPipe = new Pipe[(Vector2, String)]() val onJumpPipe = new Pipe[(Vector2, Float, Float)]() val onEnemyDisconnected = new Pipe[Unit]() val onServerHello = new Pipe[Unit]() val onGameStatePipe = new Pipe[GameStateModel]() val onRoundEndsPipe = new Pipe[GameStateModel]() var moveMessage: MoveMessage = _ var throwBrickMessage: ThrowBrickMessage = _ private var lastSyncTime: Long = System.nanoTime private var _deltaTime: Float = 0 def deltaTime = _deltaTime private def sync(): Unit ={ if (moveMessage != null) { processOut(moveMessage) moveMessage = null } if (throwBrickMessage != null) { processOut(throwBrickMessage) throwBrickMessage = null } } def tick(): Unit = { val time = System.nanoTime() _deltaTime = (time - lastSyncTime) / 1000000000.0f if (deltaTime >= Const.Networking.tickDelay / 1000f) { lastSyncTime = time sync() } } def hello(name: String): Unit = { processOut(HelloMessage(name)) } def move(speed: Vector2, angle: Float, state: State): Unit = { if (moveMessage == null) { moveMessage = MoveMessage( speed.x, speed.y, angle, MessageConverter.stateToString(state)) } else { moveMessage = moveMessage.copy( moveMessage.speedX + speed.x, moveMessage.speedY + speed.y, angle, MessageConverter.stateToString(state)) } } def throwBrick(pos: Vector2, force: Float, angle: Float): Unit = { throwBrickMessage = ThrowBrickMessage(pos.x, pos.y, force, angle) } def processIn() = { while(!bufferIn.isEmpty) bufferIn.poll()() } def processOut(message: Message) def dispose(): Unit = { isConnected = None } protected val bufferIn = new ConcurrentLinkedQueue[() => Unit]() protected def onGameStarted(msg: (String)) = println(s"Received GameStart package $msg") protected def onGameState(gameState: (GameStateModel)) = bufferIn.add(() => onGameStatePipe(gameState)) protected def onRoundEnds(gameState: (GameStateModel)) = bufferIn.add(() => onRoundEndsPipe(gameState)) }
cat-in-the-dark/old48_36_game
client/src/main/scala/com/catinthedark/ld36/network/NetworkControl.scala
Scala
mit
2,562
package org.fedoraproject.mobile import scala.language.existentials import Implicits._ import android.app.{ Activity, Fragment } import android.content.{ Context, Intent } import android.os.Bundle import android.preference.PreferenceManager import android.support.v4.app.ActionBarDrawerToggle import android.support.v4.view.GravityCompat import android.util.Log import android.view.{ LayoutInflater, MenuItem, View, ViewGroup } import android.widget.{ AdapterView, ArrayAdapter, TextView, Toast } import scalaz._, Scalaz._ import scalaz.effect.IO sealed trait Delegation { def d: Class[_] def icon: Int def name: String } sealed case class ActivityDelegation( override val d: Class[_ <: Activity], override val icon: Int, override val name: String) extends Delegation sealed case class FragmentDelegation( override val d: Class[_ <: Fragment], override val icon: Int, override val name: String) extends Delegation sealed class NavAdapter( context: Context, resource: Int, delegations: NonEmptyList[Delegation]) extends ArrayAdapter[String](context, resource, delegations.map(_.name).list.toArray) { override def getView(position: Int, convertView: View, parent: ViewGroup): View = { val delegation: Option[Delegation] = delegations.index(position) val layout = LayoutInflater.from(context) .inflate(R.layout.drawer_list_item, parent, false) .asInstanceOf[TextView] // TODO: asInstanceOf // TODO: purify this. delegation.cata( delegation => { layout.setCompoundDrawablesWithIntrinsicBounds(delegation.icon, 0, 0, 0) layout.setText(delegation.name) layout }, { // The delegation doesn't exist, so return the empty TextView. layout }) } } class MainActivity extends util.Views { // Blech - can we do something about this? private var drawerToggle: Option[ActionBarDrawerToggle] = None override def onPostCreate(bundle: Bundle): Unit = { super.onPostCreate(bundle) setContentView(R.layout.navdrawer) val navMap: NonEmptyList[Delegation] = NonEmptyList( FragmentDelegation( classOf[FedmsgNewsfeedFragment], R.drawable.ic_status, "Newsfeed"), FragmentDelegation( classOf[StatusFragment], R.drawable.ic_status, getString(R.string.infrastructure_status)), ActivityDelegation( classOf[PackageSearchActivity], R.drawable.ic_search, getString(R.string.package_search)), FragmentDelegation( classOf[BadgesLeaderboardFragment], R.drawable.ic_badges, getString(R.string.badges_leaderboard)), ActivityDelegation( classOf[FedmsgRegisterActivity], R.drawable.ic_fedmsg, getString(R.string.register_fmn)), ActivityDelegation( classOf[UserActivity], R.drawable.ic_preferences, "Profile UI Demo"), FragmentDelegation( classOf[PreferencesFragment], R.drawable.ic_preferences, getString(R.string.preferences)) ) val title = getTitle val drawerLayout = findView(TR.drawer_layout) drawerToggle = Some( new ActionBarDrawerToggle( this, drawerLayout, R.drawable.ic_drawer, R.string.open, R.string.close) { override def onDrawerClosed(view: View): Unit = { super.onDrawerClosed(view) getActionBar.setTitle(title) invalidateOptionsMenu } override def onDrawerOpened(view: View): Unit = { super.onDrawerOpened(view) getActionBar.setTitle(title) invalidateOptionsMenu } } ) val drawerList = findView(TR.left_drawer) getActionBar.setDisplayHomeAsUpEnabled(true) getActionBar.setHomeButtonEnabled(true) drawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START) drawerToggle.map(drawerLayout.setDrawerListener) drawerToggle.map(_.syncState) drawerList.setAdapter( new NavAdapter( this, android.R.layout.simple_list_item_1, navMap)) drawerList.setOnItemClickListener(new AdapterView.OnItemClickListener { def onItemClick(parent: AdapterView[_], view: View, position: Int, id: Long) { spawn(navMap.index(position)).unsafePerformIO // TODO drawerList.setItemChecked(position, true) drawerLayout.closeDrawer(drawerList) } }) // Default fragment spawn(Some(navMap.head)).unsafePerformIO // TODO val sharedPref = PreferenceManager.getDefaultSharedPreferences(this) val checkUpdates = sharedPref.getBoolean("check_updates", true) // If the user hasn't disabled updates... if (checkUpdates) { // If the most recent build failed, there's no point in doing anything // else. Updates.getJenkinsLastBuildStatus.runAsync(_.fold( // Error getting build status err => { Log.e("MainActivity", err.toString) () }, res => res.fold( // Error parsing JSON from Jenkins err => { Log.e("MainActivity", err.toString) () }, x => x match { case Updates.JenkinsFailure => { Log.v("MainActivity", "Last Jenkins build failed. Skipping update.") () } case Updates.JenkinsSuccess => { Log.v("MainActivity", "Last Jenkins build was successful.") Updates.compareVersion(this).runAsync(_.fold( // Error getting GitHub info err => { Log.e("MainActivity", err.toString) () }, res => res.fold( t => { Log.v("MainActivity", "Already up to date") () }, f => { runOnUiThread(Updates.presentDialog(MainActivity.this)) () } ) )) } } ) )) } } // TODO: Task instead of IO? private def spawn(x: Option[Delegation]): IO[Unit] = IO { x match { case Some(ActivityDelegation(c, i, s)) => { val intent = new Intent(MainActivity.this, c) getActionBar.setTitle(s) startActivity(intent) } case Some(FragmentDelegation(c, i, s)) => { getActionBar.setTitle(s) val fragment = c.newInstance val fragmentManager = getFragmentManager fragmentManager.beginTransaction .replace(R.id.content_frame, fragment) .commit() } case _ => { Toast.makeText( MainActivity.this, R.string.android_getview_error, Toast.LENGTH_LONG).show } } () } override def onOptionsItemSelected(item: MenuItem): Boolean = drawerToggle.cata( t => if (t.onOptionsItemSelected(item)) true else super.onOptionsItemSelected(item), super.onOptionsItemSelected(item)) }
fedora-infra/mobile
src/main/scala/activity/MainActivity.scala
Scala
mpl-2.0
7,187
import akka.actor._ import akka.http.scaladsl.Http import akka.http.scaladsl.model.ws.{Message, TextMessage} import akka.http.scaladsl.server.Directives import akka.stream.scaladsl.{Flow, Sink, Source} import akka.stream.stage.{Context, PushStage, SyncDirective, TerminationDirective} import akka.stream.{ActorMaterializer, OverflowStrategy} import upickle._ case class ChatMessage(sender: String, message: String) sealed trait ChatEvent case class NewParticipant(name: String, subscriber: ActorRef) extends ChatEvent case class ParticipantLeft(name: String) extends ChatEvent case class ReceivedMessage(sender: String, message: String) extends ChatEvent { def toChatMessage: ChatMessage = ChatMessage(sender, message) } object ChatExample extends App { import Directives._ implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() val chatActor = system.actorOf(Props(new Actor { var subscribers = Set.empty[ActorRef] def receive: Receive = { case NewParticipant(name, subscriber) ⇒ context.watch(subscriber) subscribers += subscriber sendAdminMessage(s"$name joined!") case msg: ReceivedMessage ⇒ dispatch(msg.toChatMessage) case ParticipantLeft(person) ⇒ sendAdminMessage(s"$person left!") case Terminated(sub) ⇒ subscribers -= sub } def sendAdminMessage(msg: String): Unit = dispatch(ChatMessage("admin", msg)) def dispatch(msg: ChatMessage): Unit = subscribers.foreach(_ ! msg) })) def chatInSink(sender: String) = Sink.actorRef[ChatEvent](chatActor, ParticipantLeft(sender)) def chatFlow(sender: String): Flow[String, ChatMessage, Unit] = { val in = Flow[String] .map(ReceivedMessage(sender, _)) .to(chatInSink(sender)) val out = Source.actorRef[ChatMessage](1, OverflowStrategy.fail) .mapMaterializedValue(chatActor ! NewParticipant(sender, _)) Flow.fromSinkAndSource(in, out) } def greeterWebsocketService(sender: String): Flow[Message, Message, Unit] = Flow[Message] .collect { case TextMessage.Strict(msg) ⇒ msg }.via(chatFlow(sender)) .map { case c@ChatMessage(sender, message) ⇒ { TextMessage.Strict(write(c)) } } .via(reportErrorsFlow) def websocketChatFlow(sender: String): Flow[Message, Message, Unit] = Flow[Message] .collect { case TextMessage.Strict(msg) ⇒ msg } .via(chatFlow(sender)) .map { case c@ChatMessage(sender, message) ⇒ { TextMessage.Strict(write(c)) } }.via(reportErrorsFlow) def reportErrorsFlow[T]: Flow[T, T, Unit] = Flow[T] .transform(() ⇒ new PushStage[T, T] { def onPush(elem: T, ctx: Context[T]): SyncDirective = ctx.push(elem) override def onUpstreamFailure(cause: Throwable, ctx: Context[T]): TerminationDirective = { println(s"WS stream failed with $cause") super.onUpstreamFailure(cause, ctx) } }) val route = path("park") { get { parameter("chat") { name => handleWebsocketMessages(greeterWebsocketService(name)) } } } import system.dispatcher val binding = Http().bindAndHandle(route, "localhost", 8080) binding .flatMap(_.unbind()) // trigger unbinding from the port .onComplete(_ ⇒ system.terminate()) }
focusj/rest-api-with-akka-http
src/main/scala/ChatExample.scala
Scala
mit
3,452
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package models import org.joda.time.LocalDateTime import play.api.libs.json.{JsString, Json, Reads, Writes} case class BulkPreviousRequest(uploadReference: String, reference: String, timestamp: LocalDateTime, processedDateTime: LocalDateTime) object BulkPreviousRequest { implicit val timestampReads = Reads[LocalDateTime](js => js.validate[String].map[LocalDateTime](dtString => LocalDateTime.parse(dtString) ) ) implicit val timestampWrites = new Writes[LocalDateTime]{ def writes(localDateTime: LocalDateTime) = JsString(localDateTime.toString) } implicit val formats = Json.format[BulkPreviousRequest] implicit def defaultOrdering: Ordering[BulkPreviousRequest] = Ordering.fromLessThan(_.processedDateTime isAfter _.processedDateTime) }
hmrc/gmp-frontend
app/models/BulkPreviousRequest.scala
Scala
apache-2.0
1,383
/* * Copyright (c) 2014-2021 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.execution.schedulers import monix.execution.Scheduler /** Forces a real asynchronous boundary before executing the * given [[TrampolinedRunnable]]. * * Sometimes you want to execute multiple [[TrampolinedRunnable]] * instances as a batch, with the functionality provided by * schedulers implementing [[BatchingScheduler]], * however you might need the very first execution to force an * asynchronous boundary. * * @param start is the [[TrampolinedRunnable]] instance that will get * executed and that is supposed to trigger the execution of * other trampolined runnables * * @param s is the scheduler that gets used for execution. */ final case class StartAsyncBatchRunnable(start: TrampolinedRunnable, s: Scheduler) extends Runnable with Serializable { def run(): Unit = { // Scheduler might not be an actual `BatchingScheduler`, in which case // we don't want to create an extra asynchronous boundary. if (s.features.contains(Scheduler.BATCHING)) s.execute(start) else start.run() } }
monix/monix
monix-execution/shared/src/main/scala/monix/execution/schedulers/StartAsyncBatchRunnable.scala
Scala
apache-2.0
1,752
package org.pliu.iot.sim import org.apache.spark.sql.SparkSession import org.apache.spark.sql.functions._ import org.apache.spark.sql.streaming.{Trigger, OutputMode} import scala.concurrent.duration._ import com.typesafe.config._ object streaming { def main(args: Array[String]): Unit = { val appconf = ConfigFactory.load("iotsim") val kafkaBrokers = appconf.getString("iotsim.kafkaBrokers") val kafkaTopic = appconf.getString("iotsim.kafkaTopic") val maxOffsetsPerTrigger = appconf.getInt("iotsim.maxOffsetsPerTrigger") val watermark = appconf.getString("iotsim.watermark") val tumblingWindow = appconf.getString("iotsim.tumblingWindow") val triggerInterval = appconf.getString("iotsim.triggerInterval") val workingDir = appconf.getString("iotsim.devicelogWorkingDir") val devicelogDir = appconf.getString("iotsim.devicelogDir") val devicelogCheckpointDir = appconf.getString("iotsim.devicelogCheckpointDir") val biCheckpointDir = appconf.getString("iotsim.biCheckpointDir") val messageFormat = appconf.getString("iotsim.messageFormat") val spark = SparkSession .builder .appName("iotsim") .getOrCreate import spark.implicits._ //for testing, use file source instead of Kafka //val dfraw = spark.readStream.schema(devicelogSchema).option("header", "true").csv("/user/pliu/iotinput") val dfraw = spark.readStream. format("kafka"). option("kafka.bootstrap.servers", kafkaBrokers). option("subscribe", kafkaTopic). option("startingOffsets", "earliest"). //this is ignored when checkpoint passes in offsets option("maxOffsetsPerTrigger", maxOffsetsPerTrigger). //this controls how many messages to read per trigger load val dftyped = if (messageFormat == "csv") toTypedDF.fromCSV(dfraw, spark) else toTypedDF.fromJSON(dfraw, spark) // if the events include a timestamp field //val df = dftyped.withColumn("ts", from_unixtime($"ts" /1000, "YYYY-MM-dd HH:mm:ss").cast(TimestampType)) // else we add a timestamp field just to show how to use the windowing functions below val df = dftyped.withColumn("ts", current_timestamp) /* aggregation */ val dfagg = df. withWatermark("ts", watermark). groupBy(window($"ts", tumblingWindow), $"deviceid"). agg(avg($"sensor9").alias("sensor9avg")). select($"window.start", $"window.end", lower($"deviceid").alias("deviceid"), $"sensor9avg"). withColumn("year", year($"start")). withColumn("month", month($"start")) /* alerting - not used in this example */ //spark.conf.get("spark.sql.caseSensitive") by default its false val dfalert = df. filter($"endofcycle" === 1 && $"sensor11" > 600). withColumn("message", concat(lit("temperature too high "),$"sensor11")) /* storing output */ // the working folder is partitioned by year and month, so after the month ends, // run the compaction job to compact the files in that month of the year to its destination folder val query = dfagg. writeStream. queryName("storeinfile"). format("parquet"). partitionBy("year", "month"). trigger(Trigger.ProcessingTime(Duration(triggerInterval))). //trigger controls how often to read from Kafka option("path", workingDir). option("checkpointLocation",devicelogCheckpointDir). //checkpoint controls offset to read from start /* also push to another topic for visualization */ val querybi = dfagg. select($"deviceid".alias("key"), concat(lit("{\"deviceid\":\""), $"deviceid", lit("\",\"readat\":\""), $"start", lit("\",\"sensor9\":"), $"sensor9avg", lit("}")).alias("value")). selectExpr("CAST(key as STRING)", "CAST(value AS STRING)"). writeStream. queryName("push2kafkabi"). format("kafka"). option("kafka.bootstrap.servers", kafkaBrokers). option("topic", "bi"). option("checkpointLocation", biCheckpointDir). start // Append mode only works with watermark, and will only produce outputs when // max seen event time - watermark > end of the evaluated time window. Append mode is the default. // If you don't see results in console sink, try Update or Complete mode // val query = dfagg.writeStream.format("console").outputMode(OutputMode.Update).option("truncate", false).start query.awaitTermination querybi.awaitTermination } }
liupeirong/Azure
IoTKafkaSpark/2.Streaming/src/main/scala/main.scala
Scala
mit
4,527
package com.socrata.thirdparty.geojson import com.rojoma.json.v3.ast._ import com.rojoma.json.v3.codec.{DecodeError, JsonDecode} import com.rojoma.json.v3.io.JsonReader import com.socrata.thirdparty.geojson.JtsCodecs._ import com.socrata.thirdparty.EitherCompat._ import com.vividsolutions.jts.geom._ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalatest.{FunSpec, Matchers} class JtsCodecsTest extends FunSpec with Matchers with ScalaCheckPropertyChecks with GeoTest { val pointCoords = JArray(Seq(JNumber(6.0), JNumber(1.2))) val point2Coords = JArray(Seq(JNumber(3.4), JNumber(-2.7))) val lineCoords = JArray(Seq(pointCoords, point2Coords)) def decodeString(str: String): JsonDecode.DecodeResult[Geometry] = geoCodec.decode(JsonReader.fromString(str)) def encode(geom: Geometry): JValue = geoCodec.encode(geom) describe("GeometryCodec") { it("should convert geometry JSON of type Point correctly") { val body = """{ | "type": "Point", | "coordinates": [6.0, 1.2] |}""".stripMargin val pt = decodeString(body).asInstanceOf[JsonDecode.DecodeResult[Point]].rightProjection.getOrThrow (pt.getX, pt.getY) should equal ((6.0, 1.2)) // uses implicit arbitrary. forAll { (point: Point) => geoCodec.decode(encode(point)) should equal (Right(point)) } } it("should convert geometry JSON of type Polygon - exterior and interior ring(s)") { val body = """{ | "type": "Polygon", | "coordinates": [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], | [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]] |}""".stripMargin val p = decodeString(body).asInstanceOf[JsonDecode.DecodeResult[Polygon]].rightProjection.getOrThrow p should equal (polygon(Seq((100.0, 0.0), (101.0, 0.0), (101.0, 1.0), (100.0, 1.0), (100.0, 0.0)), Seq(Seq((100.2, 0.2), (100.8, 0.2), (100.8, 0.8), (100.2, 0.8), (100.2, 0.2))))) forAll{(poly: Polygon) => geoCodec.decode(encode(poly)) should equal (Right(poly)) } } it("should convert geometry JSON of type Polygon - exterior ring only") { val body = """{ | "type": "Polygon", | "coordinates": [[[0.0, 0.0], [0.0, 1.0], [1.0, 1.0], [0.0, 0.0]]] |}""".stripMargin val p = decodeString(body).asInstanceOf[JsonDecode.DecodeResult[Polygon]].rightProjection.getOrThrow p should equal (polygon((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (0.0, 0.0))) forAll { (c: Coordinate, c1: Coordinate, c2: Coordinate, c3: Coordinate) => val extPolygon = polygon((c.x, c.y), (c1.x, c1.y), (c2.x, c2.y), (c.x, c.y)) geoCodec.decode(encode(extPolygon)) should equal (Right(extPolygon)) } } it("should convert empty geometry JSON of type Polygon to a Left") { val body = """{ | "type": "Polygon", | "coordinates": [] |}""".stripMargin val decoded = decodeString(body) decoded should be (Symbol("left")) decoded.left.getOrThrow should be (a[DecodeError.InvalidValue]) } it("should convert geometry JSON of MultiLineString") { val body = """{ | "type": "MultiLineString", | "coordinates": [[[0.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [1.0, 1.0]]] |}""".stripMargin val mls = factory.createMultiLineString(Array( linestring((0.0, 0.0), (0.0, 1.0)), linestring((1.0, 0.0), (1.0, 1.0)) )) decodeString(body).rightProjection.getOrThrow should equal (mls) forAll{(ml: MultiLineString) => geoCodec.decode(encode(ml)) should equal (Right(ml)) } } it("should convert geometry JSON of type MultiPolygon") { val body = """{ | "type": "MultiPolygon", | "coordinates": [[[[0.0, 0.0], [0.0, 1.0], [1.0, 1.0], [0.0, 0.0]]], [[[1.0, 1.0], [1.0, 2.0], [2.0, 2.0], [1.0, 1.0]]]] |}""".stripMargin val mp = factory.createMultiPolygon(Array( polygon((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (0.0, 0.0)), polygon((1.0, 1.0), (1.0, 2.0), (2.0, 2.0), (1.0, 1.0)) )) decodeString(body) should be (Right(mp)) forAll{(mp: MultiPolygon) => geoCodec.decode(encode(mp)) should equal (Right(mp)) } } it("should convert geometry JSON of type MultiPoint") { val body = """{ "type": "MultiPoint", |"coordinates": [ [0, 0], [10.1, 10.1] ] |}""".stripMargin val mps = factory.createMultiPoint(Array(coord(0.0, 0.0), coord(10.10, 10.10))) decodeString(body) should be (Right(mps)) geoCodec.decode(encode(mps)) should equal (Right(mps)) forAll{(mps: MultiPoint) => geoCodec.decode(encode(mps)) should equal(Right(mps)) } } it("should not convert non-GeoJSON or unsupported types") { val body = JObject(Map("type" -> JString("foo"), "coordinates" -> pointCoords)) geoCodec.decode(body) should be (Symbol("left")) val body2 = JArray(Seq(JString("totally not"), JNumber(5.6))) geoCodec.decode(body2) should be (Symbol("left")) } } describe("coordinates") { it("should convert Points correctly") { val pt = PointCodec.decode(pointCoords).rightProjection.getOrThrow (pt.getX, pt.getY) should equal ((6.0, 1.2)) } it("should not convert non-Points") { PointCodec.decode(JArray(Seq(JNumber(-1)))) should be (Symbol("left")) PointCodec.decode(lineCoords) should be (Symbol("left")) } } }
socrata-platform/socrata-thirdparty-utils
core/src/test/scala/com/socrata/thirdparty/geojson/JtsCodecsTest.scala
Scala
apache-2.0
5,840
/* * Copyright (c) 2012-2017 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and * limitations there under. */ package com.snowplowanalytics.snowplow.enrich.spark import org.specs2.mutable.Specification object MasterCljTomcatSpec { // Concatenate ALL lines from ALL other jobs val lines = good.CljTomcatTp1SingleEventSpec.lines.lines ++ // 1 good good.CljTomcatCallrailEventSpec.lines.lines ++ // 1 good good.CljTomcatTp2MultiEventsSpec.lines.lines ++ // 3 good good.CljTomcatTp2MegaEventsSpec.lines.lines // 7,500 good = 7,505 GOOD object expected { val goodCount = 7505 } } /** Master test which runs using all of the individual good, bad and misc tests */ class MasterCljTomcatSpec extends Specification with EnrichJobSpec { import EnrichJobSpec._ override def appName = "master-clj-tomcat" sequential "A job which processes a Clojure-Tomcat file containing 7,505 valid events, 0 bad lines and " + "3 discardable lines" should { runEnrichJob(Lines(MasterCljTomcatSpec.lines: _*), "clj-tomcat", "1", false, List("geo")) "write 7,505 events" in { val Some(goods) = readPartFile(dirs.output) goods.size must_== MasterCljTomcatSpec.expected.goodCount } "write 0 bad rows" in { dirs.badRows must beEmptyDir } } }
acgray/snowplow
3-enrich/spark-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.spark/MasterCljTomcatSpec.scala
Scala
apache-2.0
1,929
package org.tejo.model.google import java.io.File case class AccountConfig( serviceAccountId: String, p12PrivateKey: File )
tomaszym/izabela
actor/src/main/scala/org/tejo/model/google/AccountConfig.scala
Scala
gpl-2.0
136
package com.arcusys.learn.liferay.services import com.liferay.counter.kernel.service.CounterLocalServiceUtil object CounterLocalServiceHelper { def increment: Long = CounterLocalServiceUtil.increment() }
arcusys/Valamis
learn-liferay700-services/src/main/scala/com/arcusys/learn/liferay/services/CounterLocalServiceHelper.scala
Scala
gpl-3.0
209
package play.api.templates { trait Template0[Result] { def render(): Result } trait Template1[A, Result] { def render(a: A): Result } trait Template2[A, B, Result] { def render(a: A, b: B): Result } trait Template3[A, B, C, Result] { def render(a: A, b: B, c: C): Result } trait Template4[A, B, C, D, Result] { def render(a: A, b: B, c: C, d: D): Result } trait Template5[A, B, C, D, E, Result] { def render(a: A, b: B, c: C, d: D, e: E): Result } trait Template6[A, B, C, D, E, F, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F): Result } trait Template7[A, B, C, D, E, F, G, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G): Result } trait Template8[A, B, C, D, E, F, G, H, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H): Result } trait Template9[A, B, C, D, E, F, G, H, I, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I): Result } trait Template10[A, B, C, D, E, F, G, H, I, J, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J): Result } trait Template11[A, B, C, D, E, F, G, H, I, J, K, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K): Result } trait Template12[A, B, C, D, E, F, G, H, I, J, K, L, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L): Result } trait Template13[A, B, C, D, E, F, G, H, I, J, K, L, M, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M): Result } trait Template14[A, B, C, D, E, F, G, H, I, J, K, L, M, N, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N): Result } trait Template15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O): Result } trait Template16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P): Result } trait Template17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q): Result } trait Template18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R): Result } trait Template19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S): Result } trait Template20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T): Result } trait Template21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U): Result } trait Template22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, Result] { def render(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L, m: M, n: N, o: O, p: P, q: Q, r: R, s: S, t: T, u: U, v: V): Result } } package play.templates { trait Appendable[T] { def +(other: T): T override def equals(x: Any): Boolean = super.equals(x) override def hashCode() = super.hashCode() } trait Format[T <: Appendable[T]] { def raw(text: String): T def escape(text: String): T } case class BaseScalaTemplate[T <: Appendable[T], F <: Format[T]](format: F) { def _display_(o: Any)(implicit m: Manifest[T]): T = { o match { case escaped if escaped != null && escaped.getClass == m.erasure => escaped.asInstanceOf[T] case () => format.raw("") case None => format.raw("") case Some(v) => _display_(v) case xml: scala.xml.NodeSeq => format.raw(xml.toString) case escapeds: TraversableOnce[_] => escapeds.foldLeft(format.raw(""))(_ + _display_(_)) case escapeds: Array[_] => escapeds.foldLeft(format.raw(""))(_ + _display_(_)) case string: String => format.escape(string) case v if v != null => _display_(v.toString) case _ => format.raw("") } } } /* ------ */ object TemplateMagic { // --- UTILS def defining[T](t: T)(handler: T => Any) = { handler(t) } def using[T](t: T) = t // --- IF implicit def iterableToBoolean(x: Iterable[_]) = x != null && !x.isEmpty implicit def optionToBoolean(x: Option[_]) = x != null && x.isDefined implicit def stringToBoolean(x: String) = x != null && !x.isEmpty // --- JAVA implicit def javaCollectionToScala[T](x: java.lang.Iterable[T]) = { import scala.collection.JavaConverters._ x.asScala } // --- DEFAULT case class Default(default: Any) { def ?:(x: Any) = x match { case "" => default case Nil => default case false => default case 0 => default case None => default case _ => x } } implicit def anyToDefault(x: Any) = Default(x) // --- DATE class RichDate(date: java.util.Date) { def format(pattern: String) = { new java.text.SimpleDateFormat(pattern).format(date) } } implicit def richDate(date: java.util.Date) = new RichDate(date) // --- STRING class RichString(string: String) { def when(predicate: => Boolean) = { predicate match { case true => string case false => "" } } } implicit def richString(string: String) = new RichString(string) } }
noel-yap/setter-for-catan
play-2.1.1/framework/src/templates-compiler/src/test/scala/FakeRuntime.scala
Scala
apache-2.0
5,918
/* * Copyright 2010 Data Fueled * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datafueled.trace.core.attributes abstract class IntAttribute(initialValue: Int = 0) extends Attribute[Int](initialValue)
waywardmonkeys/trace
trace-core/src/main/scala/com/datafueled/trace/core/attributes/IntAttribute.scala
Scala
apache-2.0
731
package org.tejo.iza.rules.facts import pl.pej.trelloilaro.api.model.ListJson import scala.beans.BeanInfo import scala.util.Try @BeanInfo case class ListFact(id: String, name: String) object ListFact { def apply(json: ListJson): ListFact = { Try(ListFact(json.id, json.name.get)).get } }
tomaszym/izabela
rules/src/main/scala/org/tejo/iza/rules/facts/ListFact.scala
Scala
gpl-2.0
302
/* * Wire * Copyright (C) 2016 Wire Swiss GmbH * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.waz.model import com.google.protobuf.nano.MessageNano import com.waz.model.AssetMetaData.Image.Tag import com.waz.model.AssetMetaData.Loudness import com.waz.model.AssetStatus.{DownloadFailed, UploadCancelled, UploadDone, UploadFailed, UploadInProgress, UploadNotStarted} import com.waz.model.nano.Messages import com.waz.model.nano.Messages.MessageEdit import com.waz.service.assets2.Asset.{Audio, General, Image, Video} import com.waz.service.assets2.{AES_CBC_Encryption, UploadAsset, UploadAssetStatus, Asset => Asset2} import com.waz.utils._ import com.waz.utils.crypto.AESUtils import com.waz.utils.wrappers.URI import org.json.JSONObject import org.threeten.bp.{Duration => Dur} import scala.collection.breakOut import scala.concurrent.duration._ trait GenericContent[-T] { def set(msg: GenericMessage): T => GenericMessage } object GenericContent { trait EphemeralContent[-T] { def set(eph: Ephemeral): T => Ephemeral } type Asset = Messages.Asset implicit object Asset extends GenericContent[Asset] { override def set(msg: GenericMessage) = msg.setAsset type Original = Messages.Asset.Original object Original { def apply(asset: AssetData): Original = returning(new Messages.Asset.Original) { o => o.mimeType = asset.mime.str o.size = asset.size asset.name foreach {o.name = _} asset.metaData match { case Some(video: AssetMetaData.Video) => o.setVideo(VideoMetaData(video)) case Some(image: AssetMetaData.Image) => o.setImage(ImageMetaData(image)) case Some(audio: AssetMetaData.Audio) => o.setAudio(AudioMetaData(audio)) case _ => } //TODO Dean giphy source and caption } def apply(asset: UploadAsset): Original = returning(new Messages.Asset.Original) { o => o.mimeType = asset.mime.str o.size = asset.size o.name = asset.name asset.details match { case image: Image => o.setImage(ImageMetaData(image)) case video: Video => o.setVideo(VideoMetaData(video)) case audio: Audio => o.setAudio(AudioMetaData(audio)) case _ => } } def apply(asset: Asset2): Original = returning(new Messages.Asset.Original) { o => o.mimeType = asset.mime.str o.size = asset.size o.name = asset.name asset.details match { case image: Image => o.setImage(ImageMetaData(image)) case video: Video => o.setVideo(VideoMetaData(video)) case audio: Audio => o.setAudio(AudioMetaData(audio)) case _ => } } def unapply(proto: Original): Option[(Mime, Long, Option[String], Option[AssetMetaData])] = Option(proto) map { orig => ( Option(orig.mimeType).filter(_.nonEmpty).map(Mime(_)).getOrElse(Mime.Unknown), orig.size, Option(orig.name).filter(_.nonEmpty), orig.getMetaDataCase match { case Messages.Asset.Original.IMAGE_FIELD_NUMBER => ImageMetaData.unapply(orig.getImage) case Messages.Asset.Original.VIDEO_FIELD_NUMBER => VideoMetaData.unapply(orig.getVideo) case Messages.Asset.Original.AUDIO_FIELD_NUMBER => AudioMetaData.unapply(orig.getAudio) case _ => None }) } } type ImageMetaData = Messages.Asset.ImageMetaData object ImageMetaData { def apply(md: AssetMetaData.Image): ImageMetaData = returning(new Messages.Asset.ImageMetaData) { p => p.tag = md.tag.toString p.width = md.dimensions.width p.height = md.dimensions.height } def apply(details: Image): ImageMetaData = returning(new Messages.Asset.ImageMetaData) { p => p.width = details.dimensions.width p.height = details.dimensions.height } def unapply(proto: ImageMetaData): Option[AssetMetaData.Image] = Some(AssetMetaData.Image(Dim2(proto.width, proto.height), Tag(proto.tag))) } type VideoMetaData = Messages.Asset.VideoMetaData object VideoMetaData { def apply(md: AssetMetaData.Video): VideoMetaData = returning(new Messages.Asset.VideoMetaData) { p => p.width = md.dimensions.width p.height = md.dimensions.height p.durationInMillis = md.duration.toMillis } def apply(details: Video): VideoMetaData = returning(new Messages.Asset.VideoMetaData) { p => p.width = details.dimensions.width p.height = details.dimensions.height p.durationInMillis = details.duration.toMillis } def unapply(proto: VideoMetaData): Option[AssetMetaData.Video] = Some(AssetMetaData.Video(Dim2(proto.width, proto.height), Dur.ofMillis(proto.durationInMillis))) } type AudioMetaData = Messages.Asset.AudioMetaData object AudioMetaData { def apply(md: AssetMetaData.Audio): AudioMetaData = returning(new Messages.Asset.AudioMetaData) { p => p.durationInMillis = md.duration.toMillis md.loudness.foreach(l => p.normalizedLoudness = bytify(l.levels)) } def apply(details: Audio): AudioMetaData = returning(new Messages.Asset.AudioMetaData) { p => p.durationInMillis = details.duration.toMillis p.normalizedLoudness = bytify(details.loudness.levels.map(_.toFloat)) } def unapply(p: AudioMetaData): Option[AssetMetaData.Audio] = Some(AssetMetaData.Audio(Dur.ofMillis(p.durationInMillis), Some(Loudness(floatify(p.normalizedLoudness))))) def bytify(ls: Iterable[Float]): Array[Byte] = ls.map(l => (l * 255f).toByte)(breakOut) def floatify(bs: Array[Byte]): Vector[Float] = bs.map(b => (b & 255) / 255f)(breakOut) } type Preview = Messages.Asset.Preview object Preview { def apply(preview: AssetData): Preview = returning(new Messages.Asset.Preview()) { p => p.mimeType = preview.mime.str p.size = preview.size //remote preview.remoteData.foreach(ak => p.remote = RemoteData.apply(ak)) //image meta preview.metaData.foreach { case meta@AssetMetaData.Image(_, _) => p.setImage(ImageMetaData(meta)) case _ => //other meta data types not supported } } def apply(asset: Asset2): Preview = returning(new Messages.Asset.Preview()) { p => p.mimeType = asset.mime.str p.size = asset.size p.remote = RemoteData(asset) //image meta asset.details match { case image: Image => p.setImage(ImageMetaData(image)) case _ => } } def unapply(preview: Preview): Option[AssetData] = Option(preview) map { prev => val remoteData = RemoteData.unapply(prev.remote) AssetData( mime = Mime(prev.mimeType), sizeInBytes = prev.size, status = remoteData.map(_ => UploadDone).getOrElse(UploadNotStarted), remoteId = remoteData.flatMap(_.remoteId), token = remoteData.flatMap(_.token), otrKey = remoteData.flatMap(_.otrKey), sha = remoteData.flatMap(_.sha256), metaData = Option(prev.getImage).flatMap(ImageMetaData.unapply) ) } } type RemoteData = Messages.Asset.RemoteData object RemoteData { def apply(ak: AssetData.RemoteData): RemoteData = returning(new Messages.Asset.RemoteData) { rData => ak.remoteId.foreach(v => rData.assetId = v.str) ak.token.foreach(v => rData.assetToken = v.str) ak.otrKey.foreach(v => rData.otrKey = v.bytes) ak.sha256.foreach(v => rData.sha256 = v.bytes) ak.encryption.foreach(v => rData.encryption = v.value) } def apply(asset: Asset2): RemoteData = returning(new Messages.Asset.RemoteData) { rData => rData.assetId = asset.id.str asset.token.foreach(token => rData.assetToken = token.str) rData.sha256 = asset.sha.bytes asset.encryption match { case AES_CBC_Encryption(key) => rData.encryption = Messages.AES_CBC rData.otrKey = key.bytes case _ => } } def unapply(remoteData: RemoteData): Option[AssetData.RemoteData] = Option(remoteData) map { rData => AssetData.RemoteData( Option(rData.assetId).filter(_.nonEmpty).map(RAssetId), Option(rData.assetToken).filter(_.nonEmpty).map(AssetToken), Some(AESKey(rData.otrKey)).filter(_ != AESKey.Empty), Some(Sha256(rData.sha256)).filter(_ != Sha256.Empty), Some(EncryptionAlgorithm(rData.encryption))) } } def apply(asset: AssetData, preview: Option[AssetData] = None, expectsReadConfirmation: Boolean): Messages.Asset = returning(new Messages.Asset) { proto => proto.original = Original(asset) preview.foreach(p => proto.preview = Preview(p)) (asset.status, asset.remoteData) match { case (UploadCancelled, _) => proto.setNotUploaded(Messages.Asset.CANCELLED) case (UploadFailed, _) => proto.setNotUploaded(Messages.Asset.FAILED) case (UploadDone, Some(data)) => proto.setUploaded(RemoteData(data)) case (DownloadFailed, Some(data)) => proto.setUploaded(RemoteData(data)) case _ => } proto.expectsReadConfirmation = expectsReadConfirmation } def apply(asset: UploadAsset, preview: Option[Asset2], expectsReadConfirmation: Boolean): Messages.Asset = returning(new Messages.Asset) { proto => proto.original = Original(asset) preview.foreach(p => proto.preview = Preview(p)) asset.status match { case UploadAssetStatus.Cancelled => proto.setNotUploaded(Messages.Asset.CANCELLED) case UploadAssetStatus.Failed => proto.setNotUploaded(Messages.Asset.FAILED) case _ => } proto.expectsReadConfirmation = expectsReadConfirmation } def apply(asset: Asset2, preview: Option[Asset2], expectsReadConfirmation: Boolean): Messages.Asset = returning(new Messages.Asset) { proto => proto.original = Original(asset) preview.foreach(p => proto.preview = Preview(p)) proto.setUploaded(RemoteData(asset)) proto.expectsReadConfirmation = expectsReadConfirmation } def unapply(a: Messages.Asset): Option[(AssetData, Option[AssetData])] = { //TODO Dean - think of better way to handle when only one part of asset proto appears without original val (mime, size, name, meta) = Original.unapply(a.original).getOrElse(Mime.Unknown, 0L, None, None) val preview = Preview.unapply(a.preview) val remoteData = RemoteData.unapply(a.getUploaded) val status = a.getStatusCase match { case Messages.Asset.UPLOADED_FIELD_NUMBER => remoteData.map(_ => UploadDone).getOrElse(UploadFailed) case Messages.Asset.NOT_UPLOADED_FIELD_NUMBER => a.getNotUploaded match { case Messages.Asset.CANCELLED => UploadCancelled case Messages.Asset.FAILED => UploadFailed case _ => UploadInProgress } case _ => UploadInProgress } val asset = AssetData( mime = mime, sizeInBytes = size, name = name, metaData = meta, status = status, remoteId = remoteData.flatMap(_.remoteId), token = remoteData.flatMap(_.token), otrKey = remoteData.flatMap(_.otrKey), sha = remoteData.flatMap(_.sha256), previewId = preview.map(_.id) ) Some((asset, preview)) } } implicit object EphemeralAsset extends EphemeralContent[Messages.Asset] { override def set(eph: Ephemeral): Messages.Asset => Ephemeral = eph.setAsset } type ImageAsset = Messages.ImageAsset implicit object ImageAsset extends GenericContent[ImageAsset] { override def set(msg: GenericMessage) = msg.setImage def unapply(proto: ImageAsset): Option[AssetData] = { Some(AssetData( status = UploadDone, otrKey = Option(proto.otrKey).map(AESKey(_)), sha = Option(proto.sha256).map(Sha256(_)), sizeInBytes = proto.size, mime = Mime(proto.mimeType), metaData = Some(AssetMetaData.Image(Dim2(proto.width, proto.height), Tag(proto.tag))) )) } def apply(asset: AssetData): ImageAsset = returning(new Messages.ImageAsset) { proto => asset.metaData.foreach { case AssetMetaData.Image(Dim2(w, h), tag) => proto.tag = tag.toString proto.width = w proto.height = h proto.originalWidth = w proto.originalHeight = h case _ => throw new Exception("Trying to create image proto from non image asset data") } proto.mimeType = asset.mime.str proto.size = asset.size.toInt asset.otrKey.foreach(v => proto.otrKey = v.bytes) asset.sha.foreach(v => proto.sha256 = v.bytes) } } implicit object EphemeralImageAsset extends EphemeralContent[ImageAsset] { override def set(eph: Ephemeral): ImageAsset => Ephemeral = eph.setImage } type Mention = Messages.Mention object Mention { def apply(userId: Option[UserId], start: Int, length: Int) = returning(new Messages.Mention) { m => userId.map(id => m.setUserId(id.str)) m.start = start m.length = length } } type Quote = Messages.Quote object Quote { def apply(id: MessageId, sha256: Option[Sha256]) = returning(new Messages.Quote) { q => q.quotedMessageId = id.str sha256.foreach(sha => if (sha.bytes.nonEmpty) q.quotedMessageSha256 = sha.bytes) } def unapply(quote: Quote): Option[(MessageId, Option[Sha256])] = Some(MessageId(quote.quotedMessageId), Option(quote.quotedMessageSha256).collect { case bytes if bytes.nonEmpty => Sha256.calculate(bytes) }) } type LinkPreview = Messages.LinkPreview object LinkPreview { trait PreviewMeta[A] { def apply(preview: LinkPreview, meta: A): LinkPreview } implicit object TweetMeta extends PreviewMeta[Tweet] { override def apply(preview: LinkPreview, meta: Tweet): LinkPreview = returning(preview) {_.setTweet(meta)} } def apply(uri: URI, offset: Int): LinkPreview = returning(new Messages.LinkPreview) { p => p.url = uri.toString p.urlOffset = offset } def apply(uri: URI, offset: Int, title: String, summary: String, image: Option[Messages.Asset], permanentUrl: Option[URI]): LinkPreview = returning(new Messages.LinkPreview) { p => p.url = uri.toString p.urlOffset = offset p.title = title p.summary = summary permanentUrl foreach { u => p.permanentUrl = u.toString } image foreach {p.image = _} // set article for backward compatibility, we will stop sending it once all platforms switch to using LinkPreview properties p.setArticle(article(title, summary, image, permanentUrl)) } def apply[Meta: PreviewMeta](uri: URI, offset: Int, title: String, summary: String, image: Option[Messages.Asset], permanentUrl: Option[URI], meta: Meta): LinkPreview = returning(apply(uri, offset, title, summary, image, permanentUrl)) { p => implicitly[PreviewMeta[Meta]].apply(p, meta) } type Tweet = Messages.Tweet object Tweet { } private def article(title: String, summary: String, image: Option[Messages.Asset], uri: Option[URI]) = returning(new Messages.Article) { p => p.title = title p.summary = summary uri foreach { u => p.permanentUrl = u.toString } image foreach {p.image = _} } implicit object JsDecoder extends JsonDecoder[LinkPreview] { override def apply(implicit js: JSONObject): LinkPreview = Messages.LinkPreview.parseFrom(AESUtils.base64(js.getString("proto"))) } implicit object JsEncoder extends JsonEncoder[LinkPreview] { override def apply(v: LinkPreview): JSONObject = JsonEncoder { o => o.put("proto", AESUtils.base64(MessageNano.toByteArray(v))) } } object WithAsset { def unapply(lp: LinkPreview): Option[AssetData] = (Option(lp.image) orElse {if (lp.hasArticle) Option(lp.getArticle.image) else None}).flatMap { a => Asset.unapply(a).map { case (asset, _) => asset}} } object WithDescription { def unapply(lp: LinkPreview): Option[(String, String)] = if (lp.hasArticle) Some((lp.getArticle.title, lp.getArticle.summary)) else Some((lp.title, lp.summary)) } } type Reaction = Messages.Reaction implicit object Reaction extends GenericContent[Reaction] { override def set(msg: GenericMessage) = msg.setReaction val HeavyBlackHeart = "\\u2764\\uFE0F" def apply(msg: MessageId, action: Liking.Action): Reaction = returning(new Messages.Reaction) { proto => proto.emoji = action match { case Liking.Action.Like => HeavyBlackHeart case Liking.Action.Unlike => "" } proto.messageId = msg.str } def unapply(proto: Messages.Reaction): Option[(MessageId, Liking.Action)] = Some((MessageId(proto.messageId), proto.emoji match { case HeavyBlackHeart => Liking.Action.Like case _ => Liking.Action.Unlike })) } type Knock = Messages.Knock implicit object Knock extends GenericContent[Knock] { override def set(msg: GenericMessage) = msg.setKnock def apply(expectsReadConfirmation: Boolean) = returning(new Messages.Knock())(_.expectsReadConfirmation = expectsReadConfirmation) def unapply(arg: Knock): Boolean = true } implicit object EphemeralKnock extends EphemeralContent[Knock] { override def set(eph: Ephemeral): (Knock) => Ephemeral = eph.setKnock } type Text = Messages.Text implicit object Text extends GenericContent[Text] { override def set(msg: GenericMessage) = msg.setText def apply(content: String): Text = apply(content, Nil, Nil, None, expectsReadConfirmation = false) def apply(content: String, links: Seq[LinkPreview], expectsReadConfirmation: Boolean): Text = apply(content, Nil, links, None, expectsReadConfirmation) def apply(content: String, mentions: Seq[com.waz.model.Mention], links: Seq[LinkPreview], expectsReadConfirmation: Boolean): Text = apply(content, mentions, links, None, expectsReadConfirmation) def apply(content: String, mentions: Seq[com.waz.model.Mention], links: Seq[LinkPreview], quote: Quote, expectsReadConfirmation: Boolean): Text = apply(content, mentions, links, Some(quote), expectsReadConfirmation) def apply(content: String, mentions: Seq[com.waz.model.Mention], links: Seq[LinkPreview], quote: Option[Quote], expectsReadConfirmation: Boolean): Text = returning(new Messages.Text()) { t => t.content = content t.mentions = mentions.map { case com.waz.model.Mention(userId, start, length) => GenericContent.Mention(userId, start, length) }(breakOut).toArray t.linkPreview = links.toArray t.expectsReadConfirmation = expectsReadConfirmation quote.foreach(q => t.quote = q) } def unapply(proto: Text): Option[(String, Seq[com.waz.model.Mention], Seq[LinkPreview], Option[Quote])] = { val mentions = proto.mentions.map { m => val userId = m.getUserId match { case id: String if id.nonEmpty => Option(UserId(id)) case _ => None } com.waz.model.Mention(userId, m.start, m.length) }.toSeq Option((proto.content, mentions, proto.linkPreview.toSeq, Option(proto.quote))) } } implicit object EphemeralText extends EphemeralContent[Text] { override def set(eph: Ephemeral): (Text) => Ephemeral = eph.setText } type MsgEdit = Messages.MessageEdit implicit object MsgEdit extends GenericContent[MsgEdit] { override def set(msg: GenericMessage) = msg.setEdited def apply(ref: MessageId, content: Text) = returning(new MessageEdit) { c => c.replacingMessageId = ref.str c.setText(content) } def unapply(arg: MsgEdit): Option[(MessageId, Text)] = arg.getContentCase match { case Messages.MessageEdit.TEXT_FIELD_NUMBER => Some((MessageId(arg.replacingMessageId), arg.getText)) case _ => None } } type Cleared = Messages.Cleared implicit object Cleared extends GenericContent[Cleared] { override def set(msg: GenericMessage) = msg.setCleared def apply(conv: RConvId, time: RemoteInstant) = returning(new Messages.Cleared) { c => c.conversationId = conv.str c.clearedTimestamp = time.toEpochMilli } def unapply(arg: Cleared): Option[(RConvId, RemoteInstant)] = for { conv <- Option(arg.conversationId) time <- Option(arg.clearedTimestamp) } yield (RConvId(conv), RemoteInstant.ofEpochMilli(time)) } type LastRead = Messages.LastRead implicit object LastRead extends GenericContent[LastRead] { override def set(msg: GenericMessage) = msg.setLastRead def apply(conv: RConvId, time: RemoteInstant) = returning(new Messages.LastRead) { l => l.conversationId = conv.str l.lastReadTimestamp = time.toEpochMilli } def unapply(arg: LastRead): Option[(RConvId, RemoteInstant)] = Some((RConvId(arg.conversationId), RemoteInstant.ofEpochMilli(arg.lastReadTimestamp))) } type MsgDeleted = Messages.MessageHide implicit object MsgDeleted extends GenericContent[MsgDeleted] { override def set(msg: GenericMessage) = msg.setHidden def apply(conv: RConvId, msg: MessageId) = returning(new Messages.MessageHide) { d => d.conversationId = conv.str d.messageId = msg.str } def unapply(proto: MsgDeleted): Option[(RConvId, MessageId)] = Some((RConvId(proto.conversationId), MessageId(proto.messageId))) } type MsgRecall = Messages.MessageDelete implicit object MsgRecall extends GenericContent[MsgRecall] { override def set(msg: GenericMessage) = msg.setDeleted def apply(msg: MessageId) = returning(new Messages.MessageDelete) { d => d.messageId = msg.str } def unapply(proto: MsgRecall): Option[MessageId] = Some(MessageId(proto.messageId)) } type Location = Messages.Location implicit object Location extends GenericContent[Location] { override def set(msg: GenericMessage): Location => GenericMessage = msg.setLocation def apply(lon: Float, lat: Float, name: String, zoom: Int, expectsReadConfirmation: Boolean) = returning(new Messages.Location) { p => p.longitude = lon p.latitude = lat p.name = name p.zoom = zoom p.expectsReadConfirmation = expectsReadConfirmation } def unapply(l: Location): Option[(Float, Float, Option[String], Option[Int])] = Some((l.longitude, l.latitude, Option(l.name).filter(_.nonEmpty), Option(l.zoom).filter(_ != 0))) } implicit object EphemeralLocation extends EphemeralContent[Location] { override def set(eph: Ephemeral): Location => Ephemeral = eph.setLocation } type Receipt = Messages.Confirmation object DeliveryReceipt extends GenericContent[Receipt] { override def set(msg: GenericMessage) = msg.setConfirmation def apply(msg: MessageId) = returning(new Messages.Confirmation) { c => c.firstMessageId = msg.str c.`type` = Messages.Confirmation.DELIVERED } def apply(msgs: Seq[MessageId]) = returning(new Messages.Confirmation) { c => c.firstMessageId = msgs.head.str c.moreMessageIds = msgs.map(_.str).tail.toArray c.`type` = Messages.Confirmation.DELIVERED } def unapply(proto: Receipt): Option[Seq[MessageId]] = if (proto.`type` == Messages.Confirmation.DELIVERED) Some(Seq(MessageId(proto.firstMessageId)) ++ proto.moreMessageIds.map(MessageId(_)).toSeq) else None } object ReadReceipt extends GenericContent[Receipt] { override def set(msg: GenericMessage) = msg.setConfirmation def apply(msg: MessageId) = returning(new Messages.Confirmation) { c => c.firstMessageId = msg.str c.`type` = Messages.Confirmation.READ } def apply(msgs: Seq[MessageId]) = returning(new Messages.Confirmation) { c => c.firstMessageId = msgs.head.str c.moreMessageIds = msgs.map(_.str).tail.toArray c.`type` = Messages.Confirmation.READ } def unapply(proto: Receipt): Option[Seq[MessageId]] = if (proto.`type` == Messages.Confirmation.READ) Some(Seq(MessageId(proto.firstMessageId)) ++ proto.moreMessageIds.map(MessageId(_)).toSeq) else None } type External = Messages.External implicit object External extends GenericContent[External] { override def set(msg: GenericMessage) = msg.setExternal def apply(key: AESKey, sha: Sha256) = returning(new Messages.External) { e => e.otrKey = key.bytes e.sha256 = sha.bytes } def unapply(e: External): Option[(AESKey, Sha256)] = for { key <- Option(e.otrKey) sha <- Option(e.sha256) } yield (AESKey(key), Sha256(sha)) } type Ephemeral = Messages.Ephemeral implicit object Ephemeral extends GenericContent[Ephemeral] { override def set(msg: GenericMessage): Ephemeral => GenericMessage = msg.setEphemeral def apply[Content: EphemeralContent](expiry: Option[FiniteDuration], content: Content) = returning(new Messages.Ephemeral) { proto => proto.expireAfterMillis = expiry.getOrElse(Duration.Zero).toMillis implicitly[EphemeralContent[Content]].set(proto)(content) } def unapply(proto: Ephemeral): Option[(Option[FiniteDuration], Any)] = proto.expireAfterMillis match { case 0 => Some((None, content(proto))) case _ => Some(Some(EphemeralDuration(proto.expireAfterMillis)), content(proto)) } def content(e: Ephemeral) = e.getContentCase match { case Messages.Ephemeral.TEXT_FIELD_NUMBER => e.getText case Messages.Ephemeral.ASSET_FIELD_NUMBER => e.getAsset case Messages.Ephemeral.IMAGE_FIELD_NUMBER => e.getImage case Messages.Ephemeral.KNOCK_FIELD_NUMBER => e.getKnock case Messages.Ephemeral.LOCATION_FIELD_NUMBER => e.getLocation case _ => Unknown } } type AvailabilityStatus = Messages.Availability implicit object AvailabilityStatus extends GenericContent[AvailabilityStatus] { import Messages.Availability._ override def set(msg: GenericMessage): AvailabilityStatus => GenericMessage = msg.setAvailability def apply(activity: Availability): AvailabilityStatus = returning(new Messages.Availability) { _.`type` = activity match { case Availability.None => NONE case Availability.Available => AVAILABLE case Availability.Away => AWAY case Availability.Busy => BUSY } } def unapply(availability: AvailabilityStatus): Option[Availability] = availability.`type` match { case NONE => Some(Availability.None) case AVAILABLE => Some(Availability.Available) case AWAY => Some(Availability.Away) case BUSY => Some(Availability.Busy) case _ => None } } case object Unknown implicit object UnknownContent extends GenericContent[Unknown.type] { override def set(msg: GenericMessage) = { _ => msg } } sealed trait ClientAction { val value: Int } implicit object ClientAction extends GenericContent[ClientAction] { case object SessionReset extends ClientAction { override val value: Int = Messages.RESET_SESSION } case class UnknownAction(value: Int) extends ClientAction def apply(v: Int) = v match { case Messages.RESET_SESSION => SessionReset case other => UnknownAction(other) } override def set(msg: GenericMessage) = { action => msg.setClientAction(action.value) } } type Calling = Messages.Calling implicit object Calling extends GenericContent[Calling] { override def set(msg: GenericMessage): Calling => GenericMessage = msg.setCalling def apply(content: String): Calling = returning(new Calling) { c => c.content = content } def unapply(calling: Calling): Option[String] = Option(calling.content) } sealed trait EncryptionAlgorithm { val value: Int } implicit object EncryptionAlgorithm { case object AES_CBC extends EncryptionAlgorithm { override val value: Int = Messages.AES_CBC } case object AES_GCM extends EncryptionAlgorithm { override val value: Int = Messages.AES_GCM } def apply(v: Int) = v match { case Messages.AES_GCM => AES_GCM case other => AES_CBC } def unapply(encryption: EncryptionAlgorithm): Option[Int] = encryption match { case AES_GCM => Some(Messages.AES_GCM) case _ => Some(Messages.AES_CBC) } } }
wireapp/wire-android-sync-engine
zmessaging/src/main/scala/com/waz/model/GenericContent.scala
Scala
gpl-3.0
29,505
package pictureshow object IO { import java.net.URL def slurp(url: URL) = try { Some(scala.io.Source.fromURL(url, "utf-8").mkString("")) } catch { case e: java.io.FileNotFoundException => None } }
softprops/picture-show
core/src/main/scala/IO.scala
Scala
mit
214
object Problem { def main(args: Array[String]) { println(for (a <- 100 to 400; b <- a to 500; c <- b to 600 if a+b+c == 1000 && a*a + b*b == c*c) yield (a*b*c)) } }
Jiri-Kremser/euler
009/Problem.scala
Scala
gpl-2.0
174
package io.github.mandar2812.dynaml.algebra import breeze.generic.UFunc import breeze.linalg.sum import breeze.numerics.{abs, pow} /** * Created by mandar on 13/10/2016. */ object normDist extends UFunc { implicit object implDV extends Impl2[SparkVector, Double, Double] { def apply(a: SparkVector, p: Double) = { assert(p >= 1.0, "For an L_p norm to be computed p >= 1.0") math.pow(a._vector.values.map(x => math.pow(math.abs(x), p)).sum(), 1.0/p) } } } object normBDist extends UFunc { implicit object implBlockedDV extends Impl2[SparkBlockedVector, Double, Double] { def apply(a: SparkBlockedVector, p: Double) = { assert(p >= 1.0, "For an L_p norm to be computed p >= 1.0") math.pow(a._vector.values.map(x => sum(pow(abs(x), p))).sum(), 1.0/p) } } implicit object implPartitionedDV extends Impl2[PartitionedVector, Double, Double] { def apply(a: PartitionedVector, p: Double) = { assert(p >= 1.0, "For an L_p norm to be computed p >= 1.0") math.pow(a._data.map(_._2).map(x => sum(pow(abs(x), p))).sum, 1.0/p) } } }
transcendent-ai-labs/DynaML
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/algebra/normDist.scala
Scala
apache-2.0
1,101
import numbers.finite.Complex import numbers.finite.PolarComplex import numbers.finite.RectComplex import scala.math.sin import scala.math.min import scala.math.max import scala.math.floor import scala.math.ceil import scala.math.Pi println("Generating data for butterworth filters") def beta(k : Int, m : Int) = new PolarComplex(1, Pi/2*(1+(2.0*k-1.0)/m)) def lambda(m: Int, f : Double) = Complex.one / (1 to m).foldLeft(Complex.one){ (prod, k) => prod*(new RectComplex(0,f) - beta(k,m)) } def mod2pi(x : Double) = x - floor(x/2/Pi)*2*Pi val fmin = 0.00001 val fmax=3.1 /// Format Doubles string to a reasonable number of decimal places def fmt(x : Double) = "%f" format x for(m <- 1 to 4) { val filetfun = new java.io.FileWriter("data" + m + ".csv") (fmin to fmax by 0.05) foreach { f => filetfun.write(fmt(f) + "\\t" + fmt(mod2pi(lambda(m,f).angle)) + "\\n") } filetfun.close } println("Scala finished")
robbymckilliam/testablelinearsystems
data/butterworth/butterworth.scala
Scala
agpl-3.0
914
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation} import org.apache.spark.sql.test.SharedSQLContext class QueryExecutionSuite extends SharedSQLContext { test("toString() exception/error handling") { spark.experimental.extraStrategies = Seq( new SparkStrategy { override def apply(plan: LogicalPlan): Seq[SparkPlan] = Nil }) def qe: QueryExecution = new QueryExecution(spark, OneRowRelation()) // Nothing! assert(qe.toString.contains("OneRowRelation")) // Throw an AnalysisException - this should be captured. spark.experimental.extraStrategies = Seq( new SparkStrategy { override def apply(plan: LogicalPlan): Seq[SparkPlan] = throw new AnalysisException("exception") }) assert(qe.toString.contains("org.apache.spark.sql.AnalysisException")) // Throw an Error - this should not be captured. spark.experimental.extraStrategies = Seq( new SparkStrategy { override def apply(plan: LogicalPlan): Seq[SparkPlan] = throw new Error("error") }) val error = intercept[Error](qe.toString) assert(error.getMessage.contains("error")) } }
esi-mineset/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
Scala
apache-2.0
2,083
package lila.security import lila.user.{ User, UserRepo } private[security] final class Cli extends lila.common.Cli { def process = { case "security" :: "enable" :: uid :: Nil => perform(uid, u => UserRepo enable u.id) case "security" :: "disable" :: uid :: Nil => perform(uid, u => (UserRepo disable u.id) >> (Store disconnect u.id)) case "security" :: "passwd" :: uid :: pwd :: Nil => perform(uid, user => UserRepo.passwd(user.id, pwd)) case "security" :: "roles" :: uid :: Nil => UserRepo named uid map { _.fold("User %s not found" format uid)(_.roles mkString " ") } case "security" :: "grant" :: uid :: roles => perform(uid, user => UserRepo.setRoles(user.id, roles map (_.toUpperCase)) ) } private def perform(username: String, op: User => Funit): Fu[String] = UserRepo named username flatMap { userOption => userOption.fold(fufail[String]("User %s not found" format username)) { u => op(u) inject "User %s successfully updated".format(username) } } }
Happy0/lila
modules/security/src/main/Cli.scala
Scala
mit
1,079
/* * Copyright (c) 2012-2019 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and * limitations there under. */ package com.snowplowanalytics.snowplow.enrich.spark package good import org.specs2.mutable.Specification object CljTomcatSendgridEventSpec { import EnrichJobSpec._ val lines = Lines( "2014-10-09 16:28:31 - 13 255.255.255.255 POST 255.255.255.255 /com.sendgrid/v3 404 - - aid=email&cv=clj-0.6.0-tom-0.0.4&nuid=- - - - application%2Fjson W3siZW1haWwiOiJleGFtcGxlQHRlc3QuY29tIiwidGltZXN0YW1wIjoxNDQ2NTQ5NjE1LCJzbXRwLWlkIjoiXHUwMDNjMTRjNWQ3NWNlOTMuZGZkLjY0YjQ2OUBpc210cGQtNTU1XHUwMDNlIiwiZXZlbnQiOiJwcm9jZXNzZWQiLCJjYXRlZ29yeSI6ImNhdCBmYWN0cyIsInNnX2V2ZW50X2lkIjoic1pST3dNR01hZ0Znbk9FbVNkdmhpZz09Iiwic2dfbWVzc2FnZV9pZCI6IjE0YzVkNzVjZTkzLmRmZC42NGI0NjkuZmlsdGVyMDAwMS4xNjY0OC41NTE1RTBCODguMCIsIm1hcmtldGluZ19jYW1wYWlnbl9pZCI6MTIzNDUsIm1hcmtldGluZ19jYW1wYWlnbl9uYW1lIjoiY2FtcGFpZ24gbmFtZSIsIm1hcmtldGluZ19jYW1wYWlnbl92ZXJzaW9uIjoiQiIsIm1hcmtldGluZ19jYW1wYWlnbl9zcGxpdF9pZCI6MTM0NzF9XQ==" ) val expected = List( "email", "srv", etlTimestamp, "2014-10-09 16:28:31.000", null, "unstruct", null, // We can't predict the event_id null, null, // No tracker namespace "com.sendgrid-v3", "clj-0.6.0-tom-0.0.4", etlVersion, null, // No user_id set "79398dd7e78a8998b6e58e380e7168d8766f1644", null, null, null, "-", // TODO: fix this, https://github.com/snowplow/snowplow/issues/1133 null, // No geo-location for this IP address null, null, null, null, null, null, null, // No additional MaxMind databases used null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, // Marketing campaign fields empty null, // null, // null, // null, // null, // No custom contexts null, // Structured event fields empty null, // null, // null, // null, // """{"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.sendgrid/processed/jsonschema/2-0-0","data":{"email":"example@test.com","timestamp":"2015-11-03T11:20:15.000Z","smtp-id":"\\u003c14c5d75ce93.dfd.64b469@ismtpd-555\\u003e","category":"cat facts","sg_event_id":"sZROwMGMagFgnOEmSdvhig==","sg_message_id":"14c5d75ce93.dfd.64b469.filter0001.16648.5515E0B88.0","marketing_campaign_id":12345,"marketing_campaign_name":"campaign name","marketing_campaign_version":"B","marketing_campaign_split_id":13471}}}""", null, // Transaction fields empty null, // null, // null, // null, // null, // null, // null, // null, // Transaction item fields empty null, // null, // null, // null, // null, // null, // Page ping fields empty null, // null, // null, // null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null ) } class CljTomcatSendgridEventSpec extends Specification with EnrichJobSpec { import EnrichJobSpec._ override def appName = "clj-tomcat-sendgrid-event" sequential "A job which processes a Clojure-Tomcat file containing a Sendgrid POST raw event representing " + "1 valid completed call" should { runEnrichJob(CljTomcatSendgridEventSpec.lines, "clj-tomcat", "2", true, List("geo")) "correctly output 1 completed call" in { val Some(goods) = readPartFile(dirs.output) goods.size must_== 1 val actual = goods.head.split("\\t").map(s => if (s.isEmpty()) null else s) for (idx <- CljTomcatSendgridEventSpec.expected.indices) { actual(idx) must BeFieldEqualTo(CljTomcatSendgridEventSpec.expected(idx), idx) } } "not write any bad rows" in { dirs.badRows must beEmptyDir } } }
RetentionGrid/snowplow
3-enrich/spark-enrich/src/test/scala/com.snowplowanalytics.snowplow.enrich.spark/good/CljTomcatSendgridEventSpec.scala
Scala
apache-2.0
4,728
package scalaz.stream.mongodb.query import scalaz.stream.mongodb.collectionSyntax._ import scalaz.syntax.Ops /** * Syntax to allow creation of OrderPairs from string */ trait OrderingOps extends Ops[String]{ def Ascending: OrderPair = OrderPair(self,Order.Ascending) def Descending: OrderPair = OrderPair(self,Order.Descending) }
Spinoco/scalaz-stream-mongodb
core/src/main/scala/scalaz/stream/mongodb/query/OrderingOps.scala
Scala
mit
343
package org.denigma.kappa import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import scala.concurrent.duration._ import scala.concurrent.{Await, Future} /** * Created by antonkulaga on 3/29/16. */ class Tester(implicit system: ActorSystem, materializer: ActorMaterializer) { implicit def dispatcher = system.dispatcher def wait[T](fut: Future[T]): T = Await.result(fut, 2 seconds) def open(method: HttpMethod, path: String, params: (String, String)*): Future[HttpResponse] = Http().singleRequest(HttpRequest(uri = Uri(path).withQuery(Query(params:_*)))) def get(path: String, params: (String, String)*): Future[HttpResponse] = open(HttpMethods.GET, path, params:_*) def post(path: String, params: (String, String)*): Future[HttpResponse] = open(HttpMethods.GET, path, params:_*) def delete(path: String, params: (String, String)*): Future[HttpResponse] = open(HttpMethods.DELETE, path, params:_*) }
antonkulaga/kappa-notebook
websim/jvm/src/test/scala/org.denigma.kappa/Tester.scala
Scala
mpl-2.0
1,059
import scala.deriving.Mirror import scala.compiletime.{constValue, error} import scala.quoted.* object TestMacro { inline def test1: Unit = ${ code() } def code()(using Quotes): Expr[Unit] = '{ println(${Expr(Type.valueOfTuple[EmptyTuple].toString)}) println(${Expr(Type.valueOfTuple[1 *: EmptyTuple].toString)}) println(${Expr(Type.valueOfTuple[(1, 2)].toString)}) println(${Expr(Type.valueOfTuple[(1, 2, 3)].toString)}) println(${Expr(Type.valueOfTuple[(1, 2, 3, 4)].toString)}) println(${Expr(Type.valueOfTuple[(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26)].toString)}) } }
dotty-staging/dotty
tests/run-macros/i12417b/Macros_1.scala
Scala
apache-2.0
681
/* * SendRepliesAs.scala * * Updated: Jan 29, 2015 * * Copyright (c) 2015, CodeMettle */ package com.codemettle.reactivemq.connection import akka.actor.{Actor, ActorRef} import akka.pattern.pipe import scala.concurrent.Future /** * @author steven * */ private[connection] trait SendRepliesAs { this: Actor => import context.dispatcher protected def sendRepliesAs: ActorRef protected def routeFutureFromSRA[T](to: ActorRef)(f: => Future[T]) = { f.pipeTo(to)(sendRepliesAs) } protected implicit class SendReply(val u: ActorRef) { def tellFromSRA(msg: Any) = u.tell(msg, sendRepliesAs) } }
CodeMettle/reactivemq
src/main/scala/com/codemettle/reactivemq/connection/SendRepliesAs.scala
Scala
apache-2.0
645
package org.hammerlab.guacamole.filters.somatic import org.apache.spark.rdd.RDD import org.hammerlab.guacamole.variants.CalledSomaticAllele object SomaticAverageBaseQualityFilter { def hasMinimumAverageBaseQuality(somaticGenotype: CalledSomaticAllele, minAverageBaseQuality: Int): Boolean = { somaticGenotype.tumorVariantEvidence.meanMappingQuality >= minAverageBaseQuality && somaticGenotype.normalReferenceEvidence.meanMappingQuality >= minAverageBaseQuality } /** * * @param genotypes RDD of genotypes to filter * @param minAverageBaseQuality * @param debug if true, compute the count of genotypes after filtering * @return Genotypes with mean average base quality >= minAverageBaseQuality */ def apply(genotypes: RDD[CalledSomaticAllele], minAverageBaseQuality: Int, debug: Boolean = false): RDD[CalledSomaticAllele] = { val filteredGenotypes = genotypes.filter(hasMinimumAverageBaseQuality(_, minAverageBaseQuality)) if (debug) SomaticGenotypeFilter.printFilterProgress(filteredGenotypes) filteredGenotypes } }
hammerlab/guacamole
src/main/scala/org/hammerlab/guacamole/filters/somatic/SomaticAverageBaseQualityFilter.scala
Scala
apache-2.0
1,131
package jp.co.cyberagent.aeromock.template.thymeleaf import jp.co.cyberagent.aeromock.core.bootstrap.Bootstrap /** * * @author stormcat24 */ class ThymeleafBootstrap extends Bootstrap { /** * @inheritdoc */ def process(): Unit = { // nothing to do. } }
CyberAgent/aeromock
aeromock-thymeleaf/src/main/scala/jp/co/cyberagent/aeromock/template/thymeleaf/ThymeleafBootstrap.scala
Scala
mit
275
package scife.enumeration package lzy import scife.util.RunnableJUnit import org.scalatest._ class ConcatFiniteTest extends FunSuite with Matchers with RunnableJUnit { test("Simple accesses, equal") { val rr = ConcatFinite.equal(Array[Finite[Int]]( WrapArray(1, 2, 3), WrapArray(4, 5, 6), WrapArray(7, 8, 9) )) (0 until 9).map( rr(_) ) should be ( List(1, 4, 7, 2, 5, 8, 3, 6, 9) ) } test("Simple accesses, fixed") { val arrays: Array[Finite[Int]] = Array( WrapArray(1, 2, 3), WrapArray(4, 5, 6), WrapArray(7, 8, 9) ) arrays.size should be (3) val rr = ConcatFinite.fixed[Int](arrays) for (target <- 0 until 9) { rr.binarySearch(target) should be (target / 3) } (0 until 9).map( rr.apply(_) ) should be ( 1 to 9 ) } test("Simple accesses, fixed, one-element Enums") { val arrays: Array[Finite[Int]] = Array( WrapArray(1), WrapArray(4), WrapArray(7) ) arrays.size should be (3) val rr = ConcatFinite.fixed[Int](arrays) rr.size should be (3) for (target <- 0 until 3) { rr.binarySearch(target) should be (target) } // (0 until 3).map( // rr.apply(_) // ) should be ( List(1, 4, 7) ) } test("Simple accesses and appending, buffer") { val rr = ConcatFinite.buffer(Array( WrapArray(1 to 30), WrapArray(31 to 60), WrapArray(61 to 90) )) for (target <- 0 to 89) { rr.binarySearch(target) should be (target / 30) rr(target) should be (target + 1) } rr.append(WrapArray(91 to 120)) for (target <- 0 to 119) { rr.binarySearch(target) should be (target / 30) rr(target) should be (target + 1) } } test("RoundRobbin with empty array") { val rr = ConcatFinite.fixed[Int](Array( )) rr.size should be (0) } }
kaptoxic/SciFe
src/test/scala/scife/enumeration/lzy/ConcatFiniteTest.scala
Scala
gpl-2.0
1,884
package mesosphere.marathon package state import org.apache.mesos.{Protos => mesos} /** * Defines an IPC mode for linux tasks */ sealed trait IpcMode { val value: String def toProto: Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode def toMesos: mesos.LinuxInfo.IpcMode } object IpcMode { case object Private extends IpcMode { override val value = raml.IPCMode.Private.value override val toProto = Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode.PRIVATE override val toMesos = mesos.LinuxInfo.IpcMode.PRIVATE } case object ShareParent extends IpcMode { override val value = raml.IPCMode.ShareParent.value override val toProto = Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode.SHARE_PARENT override val toMesos = mesos.LinuxInfo.IpcMode.SHARE_PARENT } private[this] val proto2Model: Map[Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode, IpcMode] = Map( Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode.PRIVATE -> Private, Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode.SHARE_PARENT -> ShareParent ) private[this] val mesos2Model: Map[mesos.LinuxInfo.IpcMode, IpcMode] = Map( mesos.LinuxInfo.IpcMode.PRIVATE -> Private, mesos.LinuxInfo.IpcMode.SHARE_PARENT -> ShareParent ) def fromProto(proto: Protos.ExtendedContainerInfo.LinuxInfo.IpcInfo.IpcMode): IpcMode = proto2Model(proto) def fromMesos(proto: mesos.LinuxInfo.IpcMode): IpcMode = mesos2Model(proto) }
mesosphere/marathon
src/main/scala/mesosphere/marathon/state/IpcMode.scala
Scala
apache-2.0
1,475
/******************************************************************************* * Copyright (c) 2019. Carl Minden * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package com.anathema_roguelike package stats.characterstats.attributes import com.anathema_roguelike.entities.characters.Character class Strength(character: Character) extends Attribute(character) { }
carlminden/anathema-roguelike
src/com/anathema_roguelike/stats/characterstats/attributes/Strength.scala
Scala
gpl-3.0
1,049
package com.rklaehn.abc import algebra.Eq import cats.kernel.instances.all._ import ichi.bench.Thyme import ichi.bench.Thyme.HowWarm object BinarySearchBench extends App { val th = Thyme.warmed(verbose = println, warmth = HowWarm.BenchOff) def block(f: ⇒ Unit) = f block { val as = (0 until 1000).map(_.toByte).toArray val bs = (0 until 1000).map(_.toByte).toArray def eq = Eq[Array[Byte]] def hash = Hash[Array[Byte]] th.pbenchOffWarm( "java.util.Arrays.binarySearch vs. Order[Byte]" )( th.Warm(java.util.Arrays.binarySearch(as, 0, as.length, as(17))) )( th.Warm(Searching.search(as, 0, as.length, as(17))) ) // th.pbenchOffWarm( // "java.util.Arrays.equals vs. Eq[Array[Byte]]" // )( // th.Warm(java.util.Arrays.equals(as, bs)) // )( // th.Warm(eq.eqv(as, bs)) // ) // // th.pbenchOffWarm( // "java.util.Arrays.hashCode vs. Hash[Array[Byte]]" // )( // th.Warm(java.util.Arrays.hashCode(as)) // )( // th.Warm(hash.hash(as)) // ) } block { val as = (0 until 1000).map(_.toShort).toArray val bs = (0 until 1000).map(_.toShort).toArray def eq = Eq[Array[Short]] def hash = Hash[Array[Short]] th.pbenchOffWarm( "java.util.Arrays.binarySearch vs. Order[Short]" )( th.Warm(java.util.Arrays.binarySearch(as, 0, as.length, as(17))) )( th.Warm(Searching.search(as, 0, as.length, as(17))) ) // th.pbenchOffWarm( // "java.util.Arrays.equals vs. Eq[Array[Short]]" // )( // th.Warm(java.util.Arrays.equals(as, bs)) // )( // th.Warm(eq.eqv(as, bs)) // ) // // th.pbenchOffWarm( // "java.util.Arrays.hashCode vs. Hash[Array[Short]]" // )( // th.Warm(java.util.Arrays.hashCode(as)) // )( // th.Warm(hash.hash(as)) // ) } block { val as = (0 until 1000).map(_.toInt).toArray val bs = (0 until 1000).map(_.toInt).toArray def eq = Eq[Array[Int]] def hash = Hash[Array[Int]] th.pbenchOffWarm( "java.util.Arrays.binarySearch vs. Order[Int]" )( th.Warm(java.util.Arrays.binarySearch(as, 0, as.length, as(17))) )( th.Warm(Searching.search(as, 0, as.length, as(17))) ) // th.pbenchOffWarm( // "java.util.Arrays.equals vs. Eq[Array[Int]]" // )( // th.Warm(java.util.Arrays.equals(as, bs)) // )( // th.Warm(eq.eqv(as, bs)) // ) // // th.pbenchOffWarm( // "java.util.Arrays.hashCode vs. Hash[Array[Int]]" // )( // th.Warm(java.util.Arrays.hashCode(as)) // )( // th.Warm(hash.hash(as)) // ) } block { val as = (0 until 1000).map(_.toLong).toArray val bs = (0 until 1000).map(_.toLong).toArray def eq = Eq[Array[Long]] def hash = Hash[Array[Long]] th.pbenchOffWarm( "java.util.Arrays.binarySearch vs. Order[Long]" )( th.Warm(java.util.Arrays.binarySearch(as, 0, as.length, as(17))) )( th.Warm(Searching.search(as, 0, as.length, as(17))) ) // th.pbenchOffWarm( // "java.util.Arrays.equals vs. Eq[Array[Long]]" // )( // th.Warm(java.util.Arrays.equals(as, bs)) // )( // th.Warm(eq.eqv(as, bs)) // ) // // th.pbenchOffWarm( // "java.util.Arrays.hashCode vs. Hash[Array[Long]]" // )( // th.Warm(java.util.Arrays.hashCode(as)) // )( // th.Warm(hash.hash(as)) // ) } }
rklaehn/abc
thymeBenchmarks/src/main/scala/com/rklaehn/abc/BinarySearchBench.scala
Scala
apache-2.0
3,478
/* * ****************************************************************************** * Copyright 2012-2013 SpotRight * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ****************************************************************************** */ package com.spotright.polidoro import org.specs2.mutable._ import com.spotright.polidoro.model._ import com.spotright.polidoro.serialization.CompStr2 class TestGet extends SpecificationWithJUnit with AstyanaxTestPool { sequential import CompositeFactory.CF import SpotAsty.DemoFG._ "Polidoro" should { "get" in { (cfUsers \\ "abc" \\ "age").execute{_.putValue(3, null)} (cfUsers \\ "abc" \\ "age").get must beSome } "not get" in { (cfUsers \\ "xyz" \\ "age").get must beNone } "get composite" in { SpotAsty.batch( List( Insert(scfClusters \\ "Alpha" \\ (CF("State", "CO"), "")) ) ) val got = (scfClusters \\ "Alpha" \\ CF("State", "CO")) getWith CompStr2.serdes got must beSome } "not get composite" in { val got = (scfClusters \\ "Alpha" \\ CF("State", "TX")) getWith CompStr2.serdes got must beNone } } }
SpotRight/Polidoro
src/test/scala/com/spotright/polidoro/TestGet.scala
Scala
apache-2.0
1,722
package com.phosphene.kafkastorm.storm import backtype.storm.topology.{BasicOutputCollector, OutputFieldsDeclarer} import backtype.storm.tuple.{Fields, Tuple, Values} import com.phosphene.avro.Stashy import com.twitter.bijection.avro.SpecificAvroCodecs import com.twitter.bijection.Injection import org.apache.avro.specific.SpecificRecordBase import org.mockito.Matchers._ import org.mockito.Mockito.{when => mwhen, _} import org.scalatest.{FunSpec, GivenWhenThen, Matchers} import org.scalatest.mock.MockitoSugar import scala.concurrent.duration._ class StashyTypeFilterBoltSpec extends FunSpec with Matchers with GivenWhenThen with MockitoSugar { private val AnyStashy = new Stashy("ANY_message_1", "ANY_version_1",1234.seconds.toSeconds, "ANYstring", "ANYstring") describe("A StashyTypeFilterBolt") { it("should read incoming tuples") { Given("no bolt") When("I create a StashyTypeFilterBolt bolt ") val bolt = new StashyTypeFilterBolt And("the bolt receives a tuple") val tuple = mock[Tuple] mwhen(tuple.getValueByField(anyString)).thenReturn(AnyStashy, Nil: _*) val collector = mock[BasicOutputCollector] bolt.execute(tuple, collector) Then("the bolt should read the field 'pojo' from the tuple") verify(tuple, times(1)).getValueByField("pojo") } it("should receive pojos and send new pojos to downstream bolts") { Given("a bolt of type StashyTypeFilterBolt") val bolt = new StashyTypeFilterBolt And("a Stashy pojo") val tuple = mock[Tuple] mwhen(tuple.getValueByField(anyString)).thenReturn(AnyStashy, Nil: _*) When("the bolt receives the Stashy pojo") val collector = mock[BasicOutputCollector] bolt.execute(tuple, collector) Then("the bolt should send the a Stashy to downstream bolts") verify(collector, times(1)).emit(new Values(AnyStashy)) } it("should skip over tuples for which reading fails") { Given("a bolt") val bolt = new StashyTypeFilterBolt And("a tuple from which one cannot read") val tuple = mock[Tuple] mwhen(tuple.getValueByField(anyString)).thenReturn(null, Nil: _*) When("the bolt receives the tuple") val collector = mock[BasicOutputCollector] bolt.execute(tuple, collector) Then("the bolt should not send any data to downstream bolts") verifyZeroInteractions(collector) } } }
phosphene/kafka-storm-test-demo
src/test/scala/com/phosphene/kafkastorm/storm/StashyTypeFilterBoltSpec.scala
Scala
apache-2.0
2,433
/* * Copyright 2007-2010 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb { package mapper { import _root_.org.specs._ import _root_.org.specs.runner.JUnit3 import _root_.org.specs.runner.ConsoleRunner import _root_.net.liftweb.common._ import _root_.net.liftweb.util._ import _root_.net.liftweb.http.{S,LiftSession,LiftRules} import Helpers._ class DBSpecsAsTest extends JUnit3(DBSpecs) object DBSpecsRunner extends ConsoleRunner(DBSpecs) object DBSpecs extends Specification { val provider = DBProviders.H2MemoryProvider val logF = Schemifier.infoF _ def cleanup() { provider.setupDB Schemifier.destroyTables_!!(DefaultConnectionIdentifier, logF , User) Schemifier.schemify(true, logF, DefaultConnectionIdentifier, User) } "DB" should { "collect queries when queryCollector is added as logFunc" in { cleanup() DB.addLogFunc(DB.queryCollector) var statements: List[(String, Long)] = Nil S.addAnalyzer((r,t,ss) => statements=ss) val session = new LiftSession("hello", "", Empty) val elwood = S.initIfUninitted(session) { val r = User.find(By(User.firstName, "Elwood")).open_! S.queryLog.size must_== 1 r } statements.size must_==1 elwood.firstName.is must_== "Elwood" } } } } }
wsaccaco/lift
framework/lift-persistence/lift-mapper/src/test/scala/net/liftweb/mapper/DBSpecs.scala
Scala
apache-2.0
1,903
package mil.nga.giat.geowave.analytics.spark import mil.nga.giat.geowave.core.store.config.ConfigUtils import mil.nga.giat.geowave.core.store.operations.remote.options.DataStorePluginOptions class GeoWaveContext( val storeParameters: java.util.Map[String, String], val dataStoreName: String, val tableNameSpace: String) { } object GeoWaveContext { def apply(dataStoreOptions: DataStorePluginOptions, dataStoreName: String, tableNameSpace: String) = new GeoWaveContext( dataStoreOptions.getOptionsAsMap, dataStoreName, tableNameSpace); }
dcy2003/geowave
analytics/spark/src/main/scala/mil/nga/giat/geowave/analytics/spark/GeoWaveContext.scala
Scala
apache-2.0
593
/* * Copyright 2014 Treode, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.treode.async.misc import org.scalatest.FlatSpec class PackageSpec extends FlatSpec { "parseUnsignedLong" should "work" in { assertResult (Some (0xFECDB005DA3B9A60L)) (parseUnsignedLong ("0xFECDB005DA3B9A60")) assertResult (Some (0xFECDB005DA3B9A60L)) (parseUnsignedLong ("#FECDB005DA3B9A60")) assertResult (Some (64)) (parseUnsignedLong ("0100")) assertResult (Some (10L)) (parseUnsignedLong ("10")) assertResult (Some (-1)) (parseUnsignedLong ("0xFFFFFFFFFFFFFFFF")) assertResult (None) (parseUnsignedLong ("0x1FFFFFFFFFFFFFFFF")) }}
Treode/store
core/test/com/treode/async/misc/PackageSpec.scala
Scala
apache-2.0
1,175
package com.github.swwjf.errorhandling import com.fasterxml.jackson.annotation.JsonProperty private[errorhandling] case class ErrorDTO(@JsonProperty("error_message") errorMessage: String)
andrei-l/scala-webapp-with-java-frameworks
webservices/src/main/scala/com/github/swwjf/errorhandling/ErrorDTO.scala
Scala
mit
189
import scala.collection.mutable.{HashMap, ListBuffer} import DataPipeline._ import Tree._ import scala.util.Random object Driver{ val dirPath = "C:/Users/Brent/Documents/School/DataPrac/FinalData/15_min/" val tournamentSize = 3 val ran = new Random(System.currentTimeMillis()) val populationSize = 20 val maxDepth = 5 val maxGenerations = 50 def importFile(year: Int): List[HashMap[String, Double]] = { DataPipeline.readFile(dirPath + year.toString + "-1.csv") ++: DataPipeline.readFile(dirPath + year.toString + "-2.csv") } def importData(start: Int, end: Int): List[HashMap[String, Double]] = { if (start <= end) { importFile(start) ++: importData(start+1, end) } else List[HashMap[String, Double]]() } def findBest(pop: ListBuffer[Tree], data: List[HashMap[String, Double]]): Tree = { pop.foldLeft(pop.head) { (t1: Tree, t2: Tree) => val f1 = findAverageFitness(t1, data) val f2 = findAverageFitness(t2, data) if ((f1 < f2 && !f1.isNaN) || f2.isNaN) t1 //println("\\nt1: " ++ Tree.toString(t1)) //println("f1: " ++ f1.toString) //println("t2: " ++ Tree.toString(t2)) //println("f2: " ++ f2.toString ++ "\\n") //println("f1 NaN: " ++ (f1.isNaN).toString) //println("f2 NaN: " ++ (f2.isNaN).toString) if ((f1 < f2 && !f1.isNaN && f1 != 0.0) || f2.isNaN) t1 else t2 } } def runAux(pop: ListBuffer[Tree], data: List[HashMap[String, Double]], tourSize: Int, maxGen: Int, curGen: Int, ran: Random): Tree = { println("\\n" ++ curGen.toString) /*pop.foldLeft(pop.head)( (t1, t2) => { println(Tree.toString(t1)) println(Tree.findAverageFitness(t1, data)) t2})*/ if (curGen <= maxGen) { if (curGen % 10 == 0) { val bb = findBest(pop, data) println(Tree.toString(bb)) println(Tree.findAverageFitness(bb, data)) } //if val nextGen: ListBuffer[Tree] = nextGeneration(pop, tourSize, data, ran) runAux(nextGen, data, tourSize, maxGen, curGen+1, ran) } else findBest(pop, data) } def run(popSize: Int, maxDepth: Int, data: List[HashMap[String, Double]], tourSize: Int, maxGen: Int, ran: Random): Tree = { val pop = initializePopulation(popSize, maxDepth, ran) runAux(pop, data, tourSize, maxGen, 0, ran) } def main(args:Array[String]):Unit={ val data = importData(2016, 2016).filter( inst => !inst.isEmpty ) val result = run(populationSize, maxDepth, data, tournamentSize, maxGenerations, ran) println(Tree.toString(result)) println(findAverageFitness(result, data)) } }
snakes-in-the-box/galapagos
src/main/scala/Driver.scala
Scala
mit
2,702
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.SparkException import org.apache.spark.sql.QueryTest import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession class ParquetFileFormatSuite extends QueryTest with ParquetTest with SharedSparkSession { test("read parquet footers in parallel") { def testReadFooters(ignoreCorruptFiles: Boolean): Unit = { withTempDir { dir => val fs = FileSystem.get(spark.sessionState.newHadoopConf()) val basePath = dir.getCanonicalPath val path1 = new Path(basePath, "first") val path2 = new Path(basePath, "second") val path3 = new Path(basePath, "third") spark.range(1).toDF("a").coalesce(1).write.parquet(path1.toString) spark.range(1, 2).toDF("a").coalesce(1).write.parquet(path2.toString) spark.range(2, 3).toDF("a").coalesce(1).write.json(path3.toString) val fileStatuses = Seq(fs.listStatus(path1), fs.listStatus(path2), fs.listStatus(path3)).flatten val footers = ParquetFileFormat.readParquetFootersInParallel( spark.sessionState.newHadoopConf(), fileStatuses, ignoreCorruptFiles) assert(footers.size == 2) } } testReadFooters(true) val exception = intercept[SparkException] { testReadFooters(false) }.getCause assert(exception.getMessage().contains("Could not read footer for file")) } }
jkbradley/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormatSuite.scala
Scala
apache-2.0
2,317
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.batch.sql import org.apache.calcite.runtime.SqlFunctions.{internalToTimestamp => toTimestamp} import org.apache.flink.api.scala._ import org.apache.flink.api.scala.util.CollectionDataSets import org.apache.flink.table.api.scala._ import org.apache.flink.table.functions.aggfunctions.CountAggFunction import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvgWithMergeAndReset import org.apache.flink.table.runtime.utils.TableProgramsCollectionTestBase import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode import org.apache.flink.table.utils.NonMergableCount import org.apache.flink.test.util.TestBaseUtils import org.apache.flink.types.Row import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized import scala.collection.JavaConverters._ @RunWith(classOf[Parameterized]) class AggregateITCase( configMode: TableConfigMode) extends TableProgramsCollectionTestBase(configMode) { @Test def testAggregationTypes(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT sum(_1), min(_1), max(_1), count(_1), avg(_1) FROM MyTable" val ds = CollectionDataSets.get3TupleDataSet(env) tEnv.createTemporaryView("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "231,1,21,21,11" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testTableAggregation(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT sum(_1) FROM MyTable" val ds = CollectionDataSets.get3TupleDataSet(env).toTable(tEnv) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "231" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testDataSetAggregation(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT sum(_1) FROM MyTable" val ds = CollectionDataSets.get3TupleDataSet(env) tEnv.createTemporaryView("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "231" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testAggregationDataTypes(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT avg(a), avg(b), avg(c), avg(d), avg(e), avg(f), count(g), " + "min(g), min('Ciao'), max(g), max('Ciao'), sum(CAST(f AS DECIMAL)) FROM MyTable" val ds = env.fromElements( (1: Byte, 1: Short, 1, 1L, 1.0f, 1.0d, "Hello"), (2: Byte, 2: Short, 2, 2L, 2.0f, 2.0d, "Ciao")).toTable(tEnv, 'a, 'b, 'c, 'd, 'e, 'f, 'g) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "1,1,1,1,1.5,1.5,2,Ciao,Ciao,Hello,Ciao,3.0" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testTableProjection(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT avg(a), sum(a), count(a), avg(b), sum(b) " + "FROM MyTable" val ds = env.fromElements((1: Byte, 1: Short), (2: Byte, 2: Short)).toTable(tEnv, 'a, 'b) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "1,3,2,1,3" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testTableAggregationWithArithmetic(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT avg(a + 2) + 2, count(b) + 5 " + "FROM MyTable" val ds = env.fromElements((1f, "Hello"), (2f, "Ciao")).toTable(tEnv, 'a, 'b) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "5.5,7" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testAggregationWithTwoCount(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT count(_1), count(_2) FROM MyTable" val ds = env.fromElements((1f, "Hello"), (2f, "Ciao")).toTable(tEnv) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "2,2" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testAggregationAfterProjection(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT avg(a), sum(b), count(c) FROM " + "(SELECT _1 as a, _2 as b, _3 as c FROM MyTable)" val ds = env.fromElements( (1: Byte, 1: Short, 1, 1L, 1.0f, 1.0d, "Hello"), (2: Byte, 2: Short, 2, 2L, 2.0f, 2.0d, "Ciao")).toTable(tEnv) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "1,3,2" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testDistinctAggregate(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT sum(_1) as a, count(distinct _3) as b FROM MyTable" val ds = CollectionDataSets.get3TupleDataSet(env).toTable(tEnv) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "231,21" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testGroupedDistinctAggregate(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT _2, avg(distinct _1) as a, count(_3) as b FROM MyTable GROUP BY _2" val ds = CollectionDataSets.get3TupleDataSet(env).toTable(tEnv) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val expected = "6,18,6\\n5,13,5\\n4,8,4\\n3,5,3\\n2,2,2\\n1,1,1" val results = result.toDataSet[Row].collect() TestBaseUtils.compareResultAsText(results.asJava, expected) } @Test def testGroupingSetAggregate(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT _2, _3, avg(_1) as a, GROUP_ID() as g FROM MyTable GROUP BY GROUPING SETS (_2, _3)" val ds = CollectionDataSets.get3TupleDataSet(env) tEnv.createTemporaryView("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = "1,Hi,1,0\\n" + "2,Hello world,3,0\\n" + "2,Hello,2,0\\n" + "3,Hello world, how are you?,4,0\\n" + "3,I am fine.,5,0\\n" + "3,Luke Skywalker,6,0\\n" + "4,Comment#1,7,0\\n" + "4,Comment#2,8,0\\n" + "4,Comment#3,9,0\\n" + "4,Comment#4,10,0\\n" + "5,Comment#5,11,0\\n" + "5,Comment#6,12,0\\n" + "5,Comment#7,13,0\\n" + "5,Comment#8,14,0\\n" + "5,Comment#9,15,0\\n" + "6,Comment#10,16,0\\n" + "6,Comment#11,17,0\\n" + "6,Comment#12,18,0\\n" + "6,Comment#13,19,0\\n" + "6,Comment#14,20,0\\n" + "6,Comment#15,21,0" TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testAggregateEmptyDataSets(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val myAgg = new NonMergableCount tEnv.registerFunction("myAgg", myAgg) val sqlQuery = "SELECT avg(a), sum(a), count(b) " + "FROM MyTable where a = 4 group by a" val sqlQuery2 = "SELECT avg(a), sum(a), count(b) " + "FROM MyTable where a = 4" val sqlQuery3 = "SELECT avg(a), sum(a), count(b) " + "FROM MyTable" val sqlQuery4 = "SELECT avg(a), sum(a), count(b), myAgg(b)" + "FROM MyTable where a = 4" val ds = env.fromElements( (1: Byte, 1: Short), (2: Byte, 2: Short)) .toTable(tEnv, 'a, 'b) tEnv.registerTable("MyTable", ds) val result = tEnv.sqlQuery(sqlQuery) val result2 = tEnv.sqlQuery(sqlQuery2) val result3 = tEnv.sqlQuery(sqlQuery3) val result4 = tEnv.sqlQuery(sqlQuery4) val results = result.toDataSet[Row].collect() val expected = Seq.empty val results2 = result2.toDataSet[Row].collect() val expected2 = "null,null,0" val results3 = result3.toDataSet[Row].collect() val expected3 = "1,3,2" val results4 = result4.toDataSet[Row].collect() val expected4 = "null,null,0,0" assert(results.equals(expected), "Empty result is expected for grouped set, but actual: " + results) TestBaseUtils.compareResultAsText(results2.asJava, expected2) TestBaseUtils.compareResultAsText(results3.asJava, expected3) TestBaseUtils.compareResultAsText(results4.asJava, expected4) } @Test def testTumbleWindowAggregate(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) tEnv.registerFunction("countFun", new CountAggFunction) tEnv.registerFunction("wAvgWithMergeAndReset", new WeightedAvgWithMergeAndReset) val sqlQuery = "SELECT b, SUM(a), countFun(c), wAvgWithMergeAndReset(b, a), wAvgWithMergeAndReset(a, a)" + "FROM T " + "GROUP BY b, TUMBLE(ts, INTERVAL '3' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "1,1,1,1,1", "2,2,1,2,2", "2,3,1,2,3", "3,9,2,3,4", "3,6,1,3,6", "4,15,2,4,7", "4,19,2,4,9", "5,11,1,5,11", "5,39,3,5,13", "5,15,1,5,15", "6,33,2,6,16", "6,57,3,6,19", "6,21,1,6,21" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testTumbleWindowAggregateWithCollect(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT b, COLLECT(b)" + "FROM T " + "GROUP BY b, TUMBLE(ts, INTERVAL '3' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "1,{1=1}", "2,{2=1}", "2,{2=1}", "3,{3=1}", "3,{3=2}", "4,{4=2}", "4,{4=2}", "5,{5=1}", "5,{5=1}", "5,{5=3}", "6,{6=1}", "6,{6=2}", "6,{6=3}" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testTumbleWindowAggregateWithCollectUnnest(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("t1", ds, 'a, 'b, 'c, 'ts) val t2 = tEnv.sqlQuery("SELECT b, COLLECT(b) as `set`" + "FROM t1 " + "GROUP BY b, TUMBLE(ts, INTERVAL '3' SECOND)") tEnv.registerTable("t2", t2) val result = tEnv.sqlQuery("SELECT b, s FROM t2, UNNEST(t2.`set`) AS A(s) where b < 3") .toDataSet[Row] .collect() val expected = Seq( "1,1", "2,2", "2,2" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testTumbleWindowWithProperties(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT b, COUNT(a), " + "TUMBLE_START(ts, INTERVAL '5' SECOND), " + "TUMBLE_END(ts, INTERVAL '5' SECOND), " + "TUMBLE_ROWTIME(ts, INTERVAL '5' SECOND)" + "FROM T " + "GROUP BY b, TUMBLE(ts, INTERVAL '5' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // min time unit is seconds .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "1,1,1970-01-01 00:00:00.0,1970-01-01 00:00:05.0,1970-01-01 00:00:04.999", "2,2,1970-01-01 00:00:00.0,1970-01-01 00:00:05.0,1970-01-01 00:00:04.999", "3,1,1970-01-01 00:00:00.0,1970-01-01 00:00:05.0,1970-01-01 00:00:04.999", "3,2,1970-01-01 00:00:05.0,1970-01-01 00:00:10.0,1970-01-01 00:00:09.999", "4,3,1970-01-01 00:00:05.0,1970-01-01 00:00:10.0,1970-01-01 00:00:09.999", "4,1,1970-01-01 00:00:10.0,1970-01-01 00:00:15.0,1970-01-01 00:00:14.999", "5,4,1970-01-01 00:00:10.0,1970-01-01 00:00:15.0,1970-01-01 00:00:14.999", "5,1,1970-01-01 00:00:15.0,1970-01-01 00:00:20.0,1970-01-01 00:00:19.999", "6,4,1970-01-01 00:00:15.0,1970-01-01 00:00:20.0,1970-01-01 00:00:19.999", "6,2,1970-01-01 00:00:20.0,1970-01-01 00:00:25.0,1970-01-01 00:00:24.999" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testHopWindowAggregate(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) tEnv.registerFunction("countFun", new CountAggFunction) tEnv.registerFunction("wAvgWithMergeAndReset", new WeightedAvgWithMergeAndReset) val sqlQuery = "SELECT b, SUM(a), countFun(c), wAvgWithMergeAndReset(b, a), wAvgWithMergeAndReset(a, a)" + "FROM T " + "GROUP BY b, HOP(ts, INTERVAL '2' SECOND, INTERVAL '4' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "1,1,1,1,1","1,1,1,1,1", "2,5,2,2,2","2,5,2,2,2", "3,9,2,3,4", "3,15,3,3,5", "3,6,1,3,6", "4,7,1,4,7", "4,24,3,4,8", "4,27,3,4,9", "4,10,1,4,10", "5,11,1,5,11", "5,36,3,5,12", "5,54,4,5,13", "5,29,2,5,14", "6,33,2,6,16", "6,70,4,6,17", "6,78,4,6,19", "6,41,2,6,20" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testHopWindowWithProperties(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT b, COUNT(a), " + "HOP_START(ts, INTERVAL '5' SECOND, INTERVAL '10' SECOND), " + "HOP_END(ts, INTERVAL '5' SECOND, INTERVAL '10' SECOND), " + "HOP_ROWTIME(ts, INTERVAL '5' SECOND, INTERVAL '10' SECOND) " + "FROM T " + "GROUP BY b, HOP(ts, INTERVAL '5' SECOND, INTERVAL '10' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "1,1,1969-12-31 23:59:55.0,1970-01-01 00:00:05.0,1970-01-01 00:00:04.999", "2,2,1969-12-31 23:59:55.0,1970-01-01 00:00:05.0,1970-01-01 00:00:04.999", "3,1,1969-12-31 23:59:55.0,1970-01-01 00:00:05.0,1970-01-01 00:00:04.999", "1,1,1970-01-01 00:00:00.0,1970-01-01 00:00:10.0,1970-01-01 00:00:09.999", "2,2,1970-01-01 00:00:00.0,1970-01-01 00:00:10.0,1970-01-01 00:00:09.999", "3,3,1970-01-01 00:00:00.0,1970-01-01 00:00:10.0,1970-01-01 00:00:09.999", "4,3,1970-01-01 00:00:00.0,1970-01-01 00:00:10.0,1970-01-01 00:00:09.999", "3,2,1970-01-01 00:00:05.0,1970-01-01 00:00:15.0,1970-01-01 00:00:14.999", "4,4,1970-01-01 00:00:05.0,1970-01-01 00:00:15.0,1970-01-01 00:00:14.999", "5,4,1970-01-01 00:00:05.0,1970-01-01 00:00:15.0,1970-01-01 00:00:14.999", "4,1,1970-01-01 00:00:10.0,1970-01-01 00:00:20.0,1970-01-01 00:00:19.999", "5,5,1970-01-01 00:00:10.0,1970-01-01 00:00:20.0,1970-01-01 00:00:19.999", "6,4,1970-01-01 00:00:10.0,1970-01-01 00:00:20.0,1970-01-01 00:00:19.999", "5,1,1970-01-01 00:00:15.0,1970-01-01 00:00:25.0,1970-01-01 00:00:24.999", "6,6,1970-01-01 00:00:15.0,1970-01-01 00:00:25.0,1970-01-01 00:00:24.999", "6,2,1970-01-01 00:00:20.0,1970-01-01 00:00:30.0,1970-01-01 00:00:29.999" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testSessionWindowAggregate(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) tEnv.registerFunction("countFun", new CountAggFunction) tEnv.registerFunction("wAvgWithMergeAndReset", new WeightedAvgWithMergeAndReset) val sqlQuery = "SELECT MIN(a), MAX(a), SUM(a), countFun(c), wAvgWithMergeAndReset(b, a), " + "wAvgWithMergeAndReset(a, a)" + "FROM T " + "GROUP BY SESSION(ts, INTERVAL '4' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .filter(x => (x._2 % 2) == 0) .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "2,10,39,6,3,7", "16,21,111,6,6,18" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testSessionWindowWithProperties(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlQuery = "SELECT COUNT(a), " + "SESSION_START(ts, INTERVAL '4' SECOND), " + "SESSION_END(ts, INTERVAL '4' SECOND), " + "SESSION_ROWTIME(ts, INTERVAL '4' SECOND) " + "FROM T " + "GROUP BY SESSION(ts, INTERVAL '4' SECOND)" val ds = CollectionDataSets.get3TupleDataSet(env) // create timestamps .filter(x => (x._2 % 2) == 0) .map(x => (x._1, x._2, x._3, toTimestamp(x._1 * 1000))) tEnv.createTemporaryView("T", ds, 'a, 'b, 'c, 'ts) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "6,1970-01-01 00:00:02.0,1970-01-01 00:00:14.0,1970-01-01 00:00:13.999", "6,1970-01-01 00:00:16.0,1970-01-01 00:00:25.0,1970-01-01 00:00:24.999" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } @Test def testMultipleDistinctWithDiffParams(): Unit = { val env = ExecutionEnvironment.getExecutionEnvironment val tEnv = BatchTableEnvironment.create(env, config) val sqlWithNull = "SELECT a, " + " CASE WHEN b = 2 THEN null ELSE b END AS b, " + " c FROM MyTable" val sqlQuery = "SELECT b, " + " COUNT(DISTINCT b), " + " SUM(DISTINCT (a / 3)), " + " COUNT(DISTINCT SUBSTRING(c FROM 1 FOR 2))," + " COUNT(DISTINCT c) " + "FROM (" + sqlWithNull + ") GROUP BY b " + "ORDER BY b" val t = CollectionDataSets.get3TupleDataSet(env).toTable(tEnv).as('a, 'b, 'c) tEnv.registerTable("MyTable", t) val result = tEnv.sqlQuery(sqlQuery).toDataSet[Row].collect() val expected = Seq( "1,1,0,1,1", "3,1,3,3,3", "4,1,5,1,4", "5,1,12,1,5", "6,1,18,1,6", "null,0,1,1,2" ).mkString("\\n") TestBaseUtils.compareResultAsText(result.asJava, expected) } }
bowenli86/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/AggregateITCase.scala
Scala
apache-2.0
21,331
package com.twitter.finagle.thrift /** * ThriftChannel decoder: this simply converts the underlying * ChannelBuffers (which have been deframed) into byte arrays. */ import org.jboss.netty.channel.{ChannelHandlerContext, Channel} import org.jboss.netty.buffer.ChannelBuffer import org.jboss.netty.handler.codec.oneone.OneToOneDecoder private[thrift] class ThriftChannelBufferDecoder extends OneToOneDecoder { def decode(ctx: ChannelHandlerContext, ch: Channel, message: Object) = { message match { case buffer: ChannelBuffer => buffer.array() // is this kosher? case _ => throw new IllegalArgumentException("no byte buffer") } } }
enachb/finagle_2.9_durgh
finagle-thrift/src/main/scala/com/twitter/finagle/thrift/ThriftChannelBufferDecoder.scala
Scala
apache-2.0
661
/* * Odessa State environmental University * Copyright (C) 2014 */ package ua.edu.odeku.ceem.mapRadar.tools.radar.airspace import gov.nasa.worldwind.render.airspaces.editor.SphereAirspaceEditor /** * * Created by Aleo on 23.04.2014. */ class RadarAirspaceEditor(controlPointRender: RadarAirspaceControlPointRenderer) extends SphereAirspaceEditor(controlPointRender)
aleo72/ww-ceem-radar
src/main/scala/ua/edu/odeku/ceem/mapRadar/tools/radar/airspace/RadarAirspaceEditor.scala
Scala
apache-2.0
375
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive.orc import java.io.File import org.apache.spark.sql.{AnalysisException, Row} import org.apache.spark.sql.TestingUDT.{IntervalData, IntervalUDT} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.datasources.orc.OrcSuite import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.internal.HiveSerDe import org.apache.spark.sql.types._ import org.apache.spark.util.Utils class HiveOrcSourceSuite extends OrcSuite with TestHiveSingleton { override val orcImp: String = "hive" override def beforeAll(): Unit = { super.beforeAll() sql( s"""CREATE EXTERNAL TABLE normal_orc( | intField INT, | stringField STRING |) |STORED AS ORC |LOCATION '${orcTableAsDir.toURI}' """.stripMargin) sql( s"""INSERT INTO TABLE normal_orc |SELECT intField, stringField FROM orc_temp_table """.stripMargin) spark.sql( s"""CREATE TEMPORARY VIEW normal_orc_source |USING org.apache.spark.sql.hive.orc |OPTIONS ( | PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}' |) """.stripMargin) spark.sql( s"""CREATE TEMPORARY VIEW normal_orc_as_source |USING org.apache.spark.sql.hive.orc |OPTIONS ( | PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}' |) """.stripMargin) } test("SPARK-22972: hive orc source") { val tableName = "normal_orc_as_source_hive" withTable(tableName) { sql( s""" |CREATE TABLE $tableName |USING org.apache.spark.sql.hive.orc |OPTIONS ( | PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}' |) """.stripMargin) val tableMetadata = spark.sessionState.catalog.getTableMetadata( TableIdentifier(tableName)) assert(tableMetadata.storage.inputFormat == Option("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat")) assert(tableMetadata.storage.outputFormat == Option("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat")) assert(tableMetadata.storage.serde == Option("org.apache.hadoop.hive.ql.io.orc.OrcSerde")) assert(HiveSerDe.sourceToSerDe("org.apache.spark.sql.hive.orc") .equals(HiveSerDe.sourceToSerDe("orc"))) assert(HiveSerDe.sourceToSerDe("org.apache.spark.sql.orc") .equals(HiveSerDe.sourceToSerDe("orc"))) } } test("SPARK-19459/SPARK-18220: read char/varchar column written by Hive") { val location = Utils.createTempDir() val uri = location.toURI try { hiveClient.runSqlHive("USE default") hiveClient.runSqlHive( """ |CREATE EXTERNAL TABLE hive_orc( | a STRING, | b CHAR(10), | c VARCHAR(10), | d ARRAY<CHAR(3)>) |STORED AS orc""".stripMargin) // Hive throws an exception if I assign the location in the create table statement. hiveClient.runSqlHive( s"ALTER TABLE hive_orc SET LOCATION '$uri'") hiveClient.runSqlHive( """ |INSERT INTO TABLE hive_orc |SELECT 'a', 'b', 'c', ARRAY(CAST('d' AS CHAR(3))) |FROM (SELECT 1) t""".stripMargin) // We create a different table in Spark using the same schema which points to // the same location. spark.sql( s""" |CREATE EXTERNAL TABLE spark_orc( | a STRING, | b CHAR(10), | c VARCHAR(10), | d ARRAY<CHAR(3)>) |STORED AS orc |LOCATION '$uri'""".stripMargin) val result = Row("a", "b ", "c", Seq("d ")) checkAnswer(spark.table("hive_orc"), result) checkAnswer(spark.table("spark_orc"), result) } finally { hiveClient.runSqlHive("DROP TABLE IF EXISTS hive_orc") hiveClient.runSqlHive("DROP TABLE IF EXISTS spark_orc") Utils.deleteRecursively(location) } } test("SPARK-24204 error handling for unsupported data types") { withTempDir { dir => val orcDir = new File(dir, "orc").getCanonicalPath // write path var msg = intercept[AnalysisException] { sql("select interval 1 days").write.mode("overwrite").orc(orcDir) }.getMessage assert(msg.contains("Cannot save interval data type into external storage.")) msg = intercept[UnsupportedOperationException] { sql("select null").write.mode("overwrite").orc(orcDir) }.getMessage assert(msg.contains("ORC data source does not support null data type.")) msg = intercept[UnsupportedOperationException] { spark.udf.register("testType", () => new IntervalData()) sql("select testType()").write.mode("overwrite").orc(orcDir) }.getMessage assert(msg.contains("ORC data source does not support calendarinterval data type.")) // read path msg = intercept[UnsupportedOperationException] { val schema = StructType(StructField("a", CalendarIntervalType, true) :: Nil) spark.range(1).write.mode("overwrite").orc(orcDir) spark.read.schema(schema).orc(orcDir).collect() }.getMessage assert(msg.contains("ORC data source does not support calendarinterval data type.")) msg = intercept[UnsupportedOperationException] { val schema = StructType(StructField("a", NullType, true) :: Nil) spark.range(1).write.mode("overwrite").orc(orcDir) spark.read.schema(schema).orc(orcDir).collect() }.getMessage assert(msg.contains("ORC data source does not support null data type.")) msg = intercept[UnsupportedOperationException] { val schema = StructType(StructField("a", new IntervalUDT(), true) :: Nil) spark.range(1).write.mode("overwrite").orc(orcDir) spark.read.schema(schema).orc(orcDir).collect() }.getMessage assert(msg.contains("ORC data source does not support calendarinterval data type.")) } } }
bravo-zhang/spark
sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/HiveOrcSourceSuite.scala
Scala
apache-2.0
6,833
import scalaz.Id.Id package object dielectric { type SparkOp[S, A] = SparkOpT[Id, S, A] }
adelbertc/dielectric
src/main/scala/dielectric/package.scala
Scala
apache-2.0
93
package actors import java.util.concurrent.Executor import actors.Scheduler.NotificationUrlChange import akka.actor.{Actor, ActorLogging, ActorRef, Props} import com.cave.metrics.data._ import init.AwsWrapper import init.AwsWrapper.WorkItem import org.apache.commons.logging.LogFactory import play.api.libs.json._ import scala.collection.mutable import scala.concurrent.ExecutionContext import scala.concurrent.duration._ object Coordinator { // Key is AlertId, Value is Alert Scheduler Actor type SchedulesMap = mutable.Map[String, ActorRef] // Key is OrgName, value is a list of Alert IDs for this Org type SchedulesCache = mutable.Map[String, List[String]] object CheckQueue object StatusRequest case class StatusResponse(cache: CoordinatorCache, schedulers: SchedulesMap) implicit val cacheWrites = new Writes[CoordinatorCache] { def writes(obj: CoordinatorCache): JsValue = Json.arr( obj.schedulesByOrganization.map { case (orgName, alertList) => Json.obj( "organization" -> orgName, "alerts" -> Json.toJson(alertList) ) } ) } implicit val mapWrites = new Writes[SchedulesMap] { def writes(obj: SchedulesMap): JsValue = Json.toJson(obj.keysIterator.toList) } implicit val responseWrites = new Writes[StatusResponse] { def writes(obj: StatusResponse): JsValue = Json.obj( "cache" -> Json.toJson(obj.cache), "schedulers" -> Json.toJson(obj.schedulers) ) } } import actors.Coordinator._ trait CoordinatorCacheListener { def createScheduler(schedule: Schedule) def stopScheduler(scheduleId: String) def deleteMessage(receiptHandle: String) def notifyUrlChange(scheduleId: String, newUrl: String) } class CoordinatorCache(cacheManager: CacheDataManager, listener: CoordinatorCacheListener) { private final val Log = LogFactory.getLog(this.getClass) val schedulesByOrganization: SchedulesCache = { val scheduleMap = cacheManager.getEnabledAlerts().getOrElse( sys.error("Failed to retrieve alerts from data source. Quitting...")) mutable.Map.empty[String, List[String]] ++= scheduleMap map { case (orgName, orgSchedules) => orgSchedules foreach listener.createScheduler orgName -> orgSchedules.map(_.alert.id.get) } } Log.warn("Initial data: " + schedulesByOrganization) private[actors] def updateCache(message: WorkItem) = { message.update.entityType match { case Entity.Organization => Log.warn("Entity: Org") updateOrganization(message.update) case Entity.Alert => Log.warn("Entity: Alert") updateSchedule(message.update) case x => Log.warn(s"Ignoring update of unsupported entity type $x") } listener.deleteMessage(message.receiptHandle) } private[actors] def updateOrganization(update: Update): Unit = { val orgName = update.id update.operation match { case Operation.Create => schedulesByOrganization.get(orgName) match { case None => schedulesByOrganization += orgName -> List.empty[String] case Some(_) => Log.warn(s"Unexpected create received for known Organization $orgName.") } case Operation.Update => schedulesByOrganization.get(orgName) match { case Some(schedules) => schedules foreach (id => listener.notifyUrlChange(id, update.extra)) case None => Log.warn(s"Unexpected update received for unknown Organization $orgName.") } case Operation.Delete => schedulesByOrganization.remove(orgName) match { case Some(schedules) => schedules foreach listener.stopScheduler case None => Log.warn(s"Unexpected delete received for unknown Organization $orgName.") } } } private[actors] def updateSchedule(update: Update): Unit = update.operation match { case Operation.Create => getSchedule(update.extra) foreach { schedule => if (schedule.alert.enabled) { schedulesByOrganization.get(schedule.orgName) foreach { schedules => schedules.find(_ == update.id) match { case None => listener.createScheduler(schedule) schedulesByOrganization.update(schedule.orgName, update.id :: schedules) case Some(_) => Log.warn(s"Unexpected create received for existing alert with id ${update.id}.") } } } else { Log.debug("Alert is disabled. Not creating a scheduler for it.") } } case Operation.Update => getSchedule(update.extra) foreach { schedule => schedulesByOrganization.get(schedule.orgName) foreach { schedules => schedules.filter(_ == update.id).foreach(listener.stopScheduler) if (schedule.alert.enabled) { // if we just enabled an alert, we need to add it to the map schedulesByOrganization.update(schedule.orgName, update.id :: schedules.filterNot(_ == update.id)) listener.createScheduler(schedule) } else { // if we just disabled an alert, we need to remove it from the map schedulesByOrganization.update(schedule.orgName, schedules.filterNot(_ == update.id)) Log.debug("Alert is disabled. Not creating a new scheduler for it.") } } } case Operation.Delete => val orgName = update.extra schedulesByOrganization.get(orgName) foreach { schedules => schedules.find(_ == update.id) match { case Some(_) => schedulesByOrganization.update(orgName, schedules.filterNot(_ == update.id)) listener.stopScheduler(update.id) case None => Log.warn(s"Unexpected delete received for unknown alert with id ${update.id}.") } } } private[actors] def getSchedule(extra: String): Option[Schedule] = try { Some(Json.parse(extra).as[Schedule]) } catch { case e: Exception => Log.warn("Received a bad extra: " + extra + ". Expected a Schedule entity in JSON format.") None } } class Coordinator(awsWrapper: AwsWrapper, cacheManager: CacheDataManager) extends Actor with ActorLogging with CoordinatorCacheListener { private[actors] def WorkPeriod = 10.seconds implicit val executor = context.dispatcher.asInstanceOf[Executor with ExecutionContext] private[actors] val schedulers: SchedulesMap = mutable.Map.empty[String, ActorRef] private[actors] val cache = new CoordinatorCache(cacheManager, this) private val queueCheckSchedule = context.system.scheduler.schedule(0.minutes, WorkPeriod, self, Coordinator.CheckQueue) override def receive = { case Coordinator.CheckQueue => awsWrapper.receiveMessages() foreach { messageList => log.warning("Messages: " + messageList) messageList foreach cache.updateCache } case Coordinator.StatusRequest => sender ! StatusResponse(cache, schedulers) } override def postStop(): Unit = queueCheckSchedule.cancel() override def createScheduler(schedule: Schedule) = schedulers += schedule.alert.id.get -> context.actorOf(Props(new Scheduler(schedule, awsWrapper))) override def stopScheduler(scheduleId: String) = schedulers.remove(scheduleId) foreach(_ ! Scheduler.Die) override def deleteMessage(receiptHandle: String) = awsWrapper.deleteMessage(receiptHandle) override def notifyUrlChange(scheduleId: String, newUrl: String) = schedulers.get(scheduleId) foreach(_ ! NotificationUrlChange(newUrl)) }
gilt/cave
scheduler/app/actors/Coordinator.scala
Scala
mit
7,765
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql package object matfast { type DataFrame = matfast.Dataset[Row] }
yuyongyang800/SparkDistributedMatrix
src/main/scala/org/apache/spark/sql/matfast/package.scala
Scala
apache-2.0
900
import test._ import org.specs2.mutable.Specification class Specific2ArityHomoSpec extends Specification { "A case class with an `Int` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest14(1, 2) val record2 = AvroTypeProviderTest14(3, 4) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Float` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest15(1F, 2F) val record2 = AvroTypeProviderTest15(3F, 4F) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Long` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest16(1L, 2L) val record2 = AvroTypeProviderTest16(3L, 4L) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Double` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest17(1D, 2D) val record2 = AvroTypeProviderTest17(3D, 4D) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Boolean` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest18(true, false) val record2 = AvroTypeProviderTest18(false, true) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `String` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest19("1", "2") val record2 = AvroTypeProviderTest19("1", "2") val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Null` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest20(null, null) val record2 = AvroTypeProviderTest20(null, null) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `List[String]` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest24(List("mekka.lekka.hi"), List("mekka.hiney.ho")) val record2 = AvroTypeProviderTest24(List("time"), List("travel")) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `List[Int]` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest25(List(1, 2), List(3,4)) val record2 = AvroTypeProviderTest25(List(5, 6), List(7,8)) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Option[String]` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest26(Some("sun"), Some("moon")) val record2 = AvroTypeProviderTest26(Some("day"), Some("night")) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with an `Option[Int]` field in the second position" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTest27(Some(1), Some(2)) val record2 = AvroTypeProviderTest27(Some(3), Some(4)) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with two Map[Int, Int] fields" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTestMap04(Map("Gorgonzola"->2), Map("Cheddar"->4)) val record2 = AvroTypeProviderTestMap04(Map("Gouda"->5), Map("Swiss"->6)) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with two Map[Int, String] fields" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTestMap05(Map("Havana"->"Cuba"), Map("World"->"series")) val record2 = AvroTypeProviderTestMap05(Map("Bogota"->"Colombia"), Map("time"->"series")) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } "A case class with two Map[String, Option[List[Int]]] fields" should { "serialize and deserialize correctly" in { val record1 = AvroTypeProviderTestMap06(Map("Olala"->Some(List(1,4))), Map("Rumpole"->None)) val record2 = AvroTypeProviderTestMap06(Map("Cran"->Some(List(3,5))), Map("Doc"->None)) val records = List(record1, record2) SpecificTestUtil.verifyWriteAndRead(records) } } }
julianpeeters/sbt-avrohugger
src/sbt-test/avrohugger/SpecificSerializationTests/src/test/scala/specific/Specific2ArityHomoSpec.scala
Scala
apache-2.0
5,073
package yiris.core.ratios import yiris.core.valueunit.ValueUnit /** * A trait contains all default ratio values and units. */ trait ValueUnits extends Ratios with Units { /** * BOD,,5,,/cBOD,,5,,. * @param value Default value is '''1.10'''. * @param unit Default is '''No unit'''. */ case class BOD5CBOD5Ratio(value: Option[Double] = Some(bod5cBOD5Ratio), unit: String = bod5cBOD5RatioUnits.headOption.getOrElse("")) extends ValueUnit /** * COD/BOD. * @param value Default value is '''1.60'''. * @param unit Default is '''No unit'''. */ case class CODBODRatio(value: Option[Double] = Some(codBODRatio), unit: String = codBODRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * COD/VSS. * @param value Default value is '''1.42'''. * @param unit Default is '''No unit'''. */ case class CODVSSRatio(value: Option[Double] = Some(codVSSRatio), unit: String = codVSSRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * VSS/TSS. * @param value Default value is '''0.80'''. * @param unit Default is '''No unit'''. */ case class VSSTSSRatio(value: Option[Double] = Some(vssTSSRatio), unit: String = vssTSSRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * Biodegradable VSS (bVSS/VSS). * @param value Default value is '''0.80'''. * @param unit Default is '''No unit'''. */ case class BVSSVSSRatio(value: Option[Double] = Some(bvssVSSRatio), unit: String = bvssVSSRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * Non biodegradable VSS (nbVSS/VSS). * @param value Default value is '''0.20'''. * @param unit Default is '''No unit'''. */ case class NBVSSVSSRatio(value: Option[Double] = Some(nbvssVSSRatio), unit: String = nbvssVSSRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * NTU/TSS. * @param value Default value is '''0.50'''. * @param unit Default is '''No unit'''. */ case class NTUTSSRatio(value: Option[Double] = Some(ntuTSSRatio), unit: String = ntuTSSRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * P/VSS. * @param value Default value is '''0.02'''. * @param unit Default is '''No unit'''. */ case class PVSSRatio(value: Option[Double] = Some(pVSSRatio), unit: String = pVSSRatioUnits.headOption.getOrElse("")) extends ValueUnit /** * Miscellaneous ratios * @param bod5CBOD5 the value of BOD,,5,,/cBOD. Default value and units are 1.10. * @param codBOD the value of COD/BOD. Default value and units are 1.60. * @param codVSS the value of COD/VSS. Default value and units are 1.42. * @param vssTSS the value of VSS/TSS. Default value and units are 0.80. * @param bvssVSS the value of bVSS/VSS. Default value and units are 0.80. * @param nbvssVSS the value of nbVSS/VSS. Default value and units are 0.20. * @param ntuTSS the value of NTU/VSS. Default value and units are 0.50. * @param pVSS the value of P/VSS. Default value and units are 0.02. */ case class MiscellaneousRatios(bod5CBOD5: BOD5CBOD5Ratio = BOD5CBOD5Ratio(), codBOD: CODBODRatio = CODBODRatio(), codVSS: CODVSSRatio = CODVSSRatio(), vssTSS: VSSTSSRatio = VSSTSSRatio(), bvssVSS: BVSSVSSRatio = BVSSVSSRatio(), nbvssVSS: NBVSSVSSRatio = NBVSSVSSRatio(), ntuTSS: NTUTSSRatio = NTUTSSRatio(), pVSS: PVSSRatio = PVSSRatio()) }
kasonchan/yiris
core/src/main/scala/yiris/core/ratios/ValueUnits.scala
Scala
apache-2.0
3,760
/** This file is part of TextCompose, a program for producing PDF from text files. * Copyright 2014 Jesper S Villadsen <jeschvi@gmail.com> * License: GNU Affero General Public License version 3 or later. * For full license text see LICENSE.txt or <http://www.gnu.org/licenses/>. */ package textcompose.core import scala.collection.mutable.ArrayBuffer import scala.collection.immutable.List import com.itextpdf.text._ import com.itextpdf.text.pdf._ class DrawingSequence(pdfDocument: PDFDocument) { private val commands = new ArrayBuffer[DrawingCommand] def isEmpty = commands.isEmpty def drawingMoveTo(x: DecoratedNumber, y: DecoratedNumber) { commands += DrawingCommand.fromDecNums(pdfDocument, "move", List((x, y))) } def drawingMoveTo(x: Double, y: Double) { commands += new DrawingCommand(pdfDocument, "move", List((x, y))) } def checkNonEmpty() { if (isEmpty) throw new TagError("To draw, you must first use 'move-to'.") } def drawingLineTo(x: DecoratedNumber, y: DecoratedNumber) { checkNonEmpty() commands += DrawingCommand.fromDecNums(pdfDocument, "line", List((x, y))) } def drawingLineTo(x: Double, y: Double) { checkNonEmpty() commands += new DrawingCommand(pdfDocument, "line", List((x, y))) } def draw(contentByte: PdfContentByte, state: State) { draw(contentByte, state.strokeOpacity, state.actualBlendMode, state.lineWidth, state.lineCap, state.actualLineColor, true, state.lineDashPattern, state.lineDashPhase) } def draw(contentByte: PdfContentByte, state: State, opacity: Float, lineWidth: Float) { draw(contentByte, opacity, state.actualBlendMode, lineWidth, PdfContentByte.LINE_CAP_PROJECTING_SQUARE, state.actualImgBdrColor, false, null, 0) } private def draw( contentByte: PdfContentByte, opacity: Float, actualBlendMode: PdfName, lineWidth: Float, lineCap: Int, lineColor: BaseColor, useDashing: Boolean, lineDashPattern: ArrayBuffer[Float], lineDashPhase: Float) { val gState = new PdfGState gState.setBlendMode(actualBlendMode) gState.setStrokeOpacity(opacity / 100f) contentByte.saveState() contentByte.setGState(gState) contentByte.setLineWidth(lineWidth) contentByte.setLineCap(lineCap) contentByte.setColorStroke(lineColor) if (useDashing && !lineDashPattern.isEmpty) { contentByte.setLineDash(lineDashPattern.toArray, lineDashPhase) } for (d <- commands) { d.command match { case "move" => contentByte.moveTo(d.arguments(0)._1.toFloat, d.arguments(0)._2.toFloat) case "line" => contentByte.lineTo(d.arguments(0)._1.toFloat, d.arguments(0)._2.toFloat) } } contentByte.stroke() contentByte.restoreState() commands.clear() } }
jvilladsen/TextCompose
src/main/scala/core/DrawingSequence.scala
Scala
agpl-3.0
2,938
package org.suecarter.websocket import akka.actor._ import akka.dispatch._ import akka.io.Tcp import com.typesafe.config.Config import java.util.concurrent.LinkedBlockingDeque import spray.can.websocket.FrameCommandFailed /** * When using `context.become` to wait for an `Ack`, then `Ack` will * normally be placed at the end of the queue. This custom mailbox * will prioritise `Ack` messages so that they are always placed at * the front of the queue. * * This showed a performance improvement of 1 hour to 2 minutes when * sending about 100,000 messages, as the client actor was spending * the vast majority of its time traversing the work queue and * re-stashing messages. */ case class HighPriorityAckMailbox(settings: ActorSystem.Settings, config: Config) extends PriorityUnboundedDequeMailbox(settings, config) { override def priority(e: Envelope): Boolean = e.message match { case Ack => true case fail: FrameCommandFailed => true case Tcp.WritingResumed => true case _ => false } } /** * Specialist priority (user provides the rules), unbounded, deque * (can be used for Stashing) mailbox. * * Very useful for messages of high priority, such as `Ack`s in I/O * situations. * * Based on UnboundedDequeBasedMailbox from Akka. */ abstract class PriorityUnboundedDequeMailbox extends MailboxType with ProducesMessageQueue[UnboundedDequeBasedMailbox.MessageQueue] { def this(settings: ActorSystem.Settings, config: Config) = this() final override def create(owner: Option[ActorRef], system: Option[ActorSystem]): MessageQueue = new PriorityUnboundedDequeMailbox.MessageQueue(priority) /** * When true, the queue will place this envelope at the front of the * queue (as if it was just stashed). */ def priority(e: Envelope): Boolean } object PriorityUnboundedDequeMailbox { class MessageQueue(priority: Envelope => Boolean) extends LinkedBlockingDeque[Envelope] with UnboundedDequeBasedMessageQueue { final val queue = this override def enqueue(receiver: ActorRef, handle: Envelope): Unit = if (priority(handle)) super.enqueueFirst(receiver, handle) else super.enqueue(receiver, handle) } }
smootoo/simple-spray-websockets
src/main/scala/org/suecarter/websocket/PriorityUnboundedDequeMailbox.scala
Scala
unlicense
2,192
/* * Sources.scala * An example of dependent universe reasoning. * * Created By: Avi Pfeffer (apfeffer@cra.com) * Creation Date: Jan 1, 2009 * * Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email figaro@cra.com for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.example import com.cra.figaro.algorithm._ import com.cra.figaro.algorithm.sampling._ import com.cra.figaro.algorithm.factored._ import com.cra.figaro.library.compound.{ If, ^^ } import com.cra.figaro.library.atomic.continuous._ import com.cra.figaro.language._ import com.cra.figaro.util._ /** * An example of dependent universe reasoning. */ object Sources { private class Source(val name: String) { override val toString = name } private abstract class Sample(val name: String) { val fromSource: Element[Source] override val toString = name } private class Pair(val source: Source, val sample: Sample) { val universe = new Universe(List(sample.fromSource)) val isTheRightSource = Apply(sample.fromSource, (s: Source) => s == source)("isTheRightSource", universe) val rightSourceDistance = Normal(0.0, 1.0)("rightSourceDistance", universe) val wrongSourceDistance = Uniform(0.0, 10.0)("wrongSourceDistance", universe) val distance = If(isTheRightSource, rightSourceDistance, wrongSourceDistance)("distance", universe) } private val source1 = new Source("Source 1") private val source2 = new Source("Source 2") private val source3 = new Source("Source 3") private val sample1 = new Sample("Sample 1") { val fromSource = Select(0.5 -> source1, 0.5 -> source2) } private val sample2 = new Sample("Sample 2") { val fromSource = Select(0.9 -> source1, 0.1 -> source3) } private val pair1 = new Pair(source1, sample1) private val pair2 = new Pair(source2, sample1) private val pair3 = new Pair(source1, sample2) private val pair4 = new Pair(source3, sample2) private val values = Values() private val samples = List(sample1, sample2) for { (firstSample, secondSample) <- upperTriangle(samples) sources1 = values(firstSample.fromSource) sources2 = values(secondSample.fromSource) if sources1.intersect(sources2).nonEmpty } { println("First sample: " + firstSample + ", Second sample: " + secondSample) ^^(firstSample.fromSource, secondSample.fromSource).addCondition((p: (Source, Source)) => p._1 != p._2) } def main(args: Array[String]) { val evidence1 = NamedEvidence("distance", Condition((d: Double) => d > 0.5 && d < 0.6)) val evidence2 = NamedEvidence("distance", Condition((d: Double) => d > 1.5 && d < 1.6)) val evidence3 = NamedEvidence("distance", Condition((d: Double) => d > 2.5 && d < 2.6)) val evidence4 = NamedEvidence("distance", Condition((d: Double) => d > 0.5 && d < 0.6)) val ue1 = (pair1.universe, List(evidence1)) val ue2 = (pair2.universe, List(evidence2)) val ue3 = (pair3.universe, List(evidence3)) val ue4 = (pair4.universe, List(evidence4)) def peAlg(universe: Universe, evidence: List[NamedEvidence[_]]) = () => ProbEvidenceSampler.computeProbEvidence(100000, evidence)(universe) val alg = VariableElimination(List(ue1, ue2, ue3, ue4), peAlg _, sample1.fromSource) alg.start() val result = alg.probability(sample1.fromSource)(_ == source1) println("Probability of Source 1: " + result) alg.kill() } }
scottcb/figaro
FigaroExamples/src/main/scala/com/cra/figaro/example/Sources.scala
Scala
bsd-3-clause
3,493
package com.wincom.dcim.domain import akka.event.NoLogging import com.wincom.dcim.message.alarm.AlarmLevel.LEVEL_1 import com.wincom.dcim.message.alarm._ import com.wincom.dcim.signal.FunctionRegistry import scala.collection.mutable /** * Created by wangxy on 17-8-31. */ object Main extends App { implicit val registry: FunctionRegistry = new FunctionRegistry(NoLogging).initialize() val ordering = Ordering.fromLessThan[AlarmCondition]((x, y) => x != y && x.subsetOf(y)) var set2: mutable.Set[AlarmCondition] = mutable.Set() val ac0 = new AlarmConditionVo(ThresholdFunctionVo("LessThan", Map("threshold" -> "1.0", "insensitivity-zone" -> "0.1")), LEVEL_1, "Critical", "Normal") val ac1 = new AlarmConditionVo(ThresholdFunctionVo("LessThan", Map("threshold" -> "1.0", "insensitivity-zone" -> "0.1")), LEVEL_1, "Critical", "Normal") val ac2 = new AlarmConditionVo(ThresholdFunctionVo("LessThan", Map("threshold" -> "2.0", "insensitivity-zone" -> "0.1")), LEVEL_1, "Critical", "Normal") val ac3 = new AlarmConditionVo(ThresholdFunctionVo("LessThan", Map("threshold" -> "-2.0", "insensitivity-zone" -> "0.1")), LEVEL_1, "Critical", "Normal") val ac4 = new AlarmConditionVo(ThresholdFunctionVo("GreaterThan", Map("threshold" -> "-2.0", "insensitivity-zone" -> "0.1")), LEVEL_1, "Critical", "Normal") val ac5 = new AlarmConditionVo(ThresholdFunctionVo("GreaterThan", Map("threshold" -> "2.0", "insensitivity-zone" -> "0.1")), LEVEL_1, "Critical", "Normal") set2 += AlarmCondition(ac0) set2 += AlarmCondition(ac1) set2 += AlarmCondition(ac2) set2 += AlarmCondition(ac3) set2 += AlarmCondition(ac4) set2 += AlarmCondition(ac5) while (!set2.isEmpty) { var s = mutable.TreeSet.empty(ordering) set2.foreach(x => s += x) println("partition:") s.foreach(println(_)) set2 --= s } set2.foreach(println(_)) }
xtwxy/mysc
dcim-cluster/domain/src/main/scala/com/wincom/dcim/domain/Main.scala
Scala
apache-2.0
1,861
package sbtmarathon import scala.reflect.api._ import scala.reflect.ManifestFactory import scala.reflect.runtime.universe._ import org.json4sbt._ import org.json4sbt.jackson.JsonMethods._ import org.json4sbt.jackson.Serialization object TemplateDriverSerializer extends Serializer[TemplateDriver] { private val mirror = runtimeMirror(getClass.getClassLoader) def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), TemplateDriver] = { throw new UnsupportedOperationException } def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = { case TemplateDriver(instance) => val manifests = manifestsByName(instance) val jfields = valuesByName(instance).toSeq.map { case (name, value) => name -> JObject( "manifest" -> ManifestSerialization.emit(manifests(name)), "value" -> JString(Serialization.write(value.asInstanceOf[AnyRef])(formats)) ) } JObject(jfields: _*) } def termSymbolsByName(instance: AnyRef): Map[String, TermSymbol] = { val instanceType = mirror.classSymbol(instance.getClass).toType val reservedNames = typeOf[AnyRef].members.map(_.name.decoded).toSet instanceType.members.map { symbol => symbol.name.decoded -> symbol }.collect { case (name, symbol) if !reservedNames.contains(name) && symbol.isTerm && !symbol.isSynthetic => name -> symbol.asTerm }(scala.collection.breakOut) } def manifestsByName(instance: AnyRef): Map[String, Manifest[_]] = { val typesByName: Map[String, Type] = termSymbolsByName(instance).map { case (name, symbol) if symbol.isMethod => name -> symbol.asMethod.returnType case (name, symbol) => name -> symbol.typeSignature } typesByName.mapValues(typeToManifest) } def valuesByName(instance: AnyRef): Map[String, Any] = { termSymbolsByName(instance).mapValues { case symbol if symbol.isMethod => mirror.reflect(instance).reflectMethod(symbol.asMethod).apply() case symbol => mirror.reflect(instance).reflectField(symbol).get } } def typeToManifest(tpe: Type): Manifest[_] = { val runtimeClass = mirror.runtimeClass(tpe) val typeArguments = tpe.asInstanceOf[TypeRefApi].args.map(typeToManifest) typeArguments match { case Nil => ManifestFactory.classType(runtimeClass) case head :: tail => ManifestFactory.classType(runtimeClass, head, tail: _*) } } }
Tapad/sbt-marathon
templating-lib/src/main/scala/sbtmarathon/TemplateDriverSerializer.scala
Scala
bsd-3-clause
2,458
package gitbucket.core.servlet import java.io.File import java.sql.{DriverManager, Connection} import gitbucket.core.plugin.PluginRegistry import gitbucket.core.service.SystemSettingsService import gitbucket.core.util._ import org.apache.commons.io.FileUtils import javax.servlet.{ServletContextListener, ServletContextEvent} import org.slf4j.LoggerFactory import Directory._ import ControlUtil._ import JDBCUtil._ import org.eclipse.jgit.api.Git import gitbucket.core.util.Versions import gitbucket.core.util.Directory object AutoUpdate { /** * The history of versions. A head of this sequence is the current BitBucket version. */ val versions = Seq( new Version(3, 5), new Version(3, 4), new Version(3, 3), new Version(3, 2), new Version(3, 1), new Version(3, 0), new Version(2, 8), new Version(2, 7) { override def update(conn: Connection, cl: ClassLoader): Unit = { super.update(conn, cl) conn.select("SELECT * FROM REPOSITORY"){ rs => // Rename attached files directory from /issues to /comments val userName = rs.getString("USER_NAME") val repoName = rs.getString("REPOSITORY_NAME") defining(Directory.getAttachedDir(userName, repoName)){ newDir => val oldDir = new File(newDir.getParentFile, "issues") if(oldDir.exists && oldDir.isDirectory){ oldDir.renameTo(newDir) } } // Update ORIGIN_USER_NAME and ORIGIN_REPOSITORY_NAME if it does not exist val originalUserName = rs.getString("ORIGIN_USER_NAME") val originalRepoName = rs.getString("ORIGIN_REPOSITORY_NAME") if(originalUserName != null && originalRepoName != null){ if(conn.selectInt("SELECT COUNT(*) FROM REPOSITORY WHERE USER_NAME = ? AND REPOSITORY_NAME = ?", originalUserName, originalRepoName) == 0){ conn.update("UPDATE REPOSITORY SET ORIGIN_USER_NAME = NULL, ORIGIN_REPOSITORY_NAME = NULL " + "WHERE USER_NAME = ? AND REPOSITORY_NAME = ?", userName, repoName) } } // Update PARENT_USER_NAME and PARENT_REPOSITORY_NAME if it does not exist val parentUserName = rs.getString("PARENT_USER_NAME") val parentRepoName = rs.getString("PARENT_REPOSITORY_NAME") if(parentUserName != null && parentRepoName != null){ if(conn.selectInt("SELECT COUNT(*) FROM REPOSITORY WHERE USER_NAME = ? AND REPOSITORY_NAME = ?", parentUserName, parentRepoName) == 0){ conn.update("UPDATE REPOSITORY SET PARENT_USER_NAME = NULL, PARENT_REPOSITORY_NAME = NULL " + "WHERE USER_NAME = ? AND REPOSITORY_NAME = ?", userName, repoName) } } } } }, new Version(2, 6), new Version(2, 5), new Version(2, 4), new Version(2, 3) { override def update(conn: Connection, cl: ClassLoader): Unit = { super.update(conn, cl) conn.select("SELECT ACTIVITY_ID, ADDITIONAL_INFO FROM ACTIVITY WHERE ACTIVITY_TYPE='push'"){ rs => val curInfo = rs.getString("ADDITIONAL_INFO") val newInfo = curInfo.split("\n").filter(_ matches "^[0-9a-z]{40}:.*").mkString("\n") if (curInfo != newInfo) { conn.update("UPDATE ACTIVITY SET ADDITIONAL_INFO = ? WHERE ACTIVITY_ID = ?", newInfo, rs.getInt("ACTIVITY_ID")) } } ignore { FileUtils.deleteDirectory(Directory.getPluginCacheDir()) //FileUtils.deleteDirectory(new File(Directory.PluginHome)) } } }, new Version(2, 2), new Version(2, 1), new Version(2, 0){ override def update(conn: Connection, cl: ClassLoader): Unit = { import eu.medsea.mimeutil.{MimeUtil2, MimeType} val mimeUtil = new MimeUtil2() mimeUtil.registerMimeDetector("eu.medsea.mimeutil.detector.MagicMimeMimeDetector") super.update(conn, cl) conn.select("SELECT USER_NAME, REPOSITORY_NAME FROM REPOSITORY"){ rs => defining(Directory.getAttachedDir(rs.getString("USER_NAME"), rs.getString("REPOSITORY_NAME"))){ dir => if(dir.exists && dir.isDirectory){ dir.listFiles.foreach { file => if(file.getName.indexOf('.') < 0){ val mimeType = MimeUtil2.getMostSpecificMimeType(mimeUtil.getMimeTypes(file, new MimeType("application/octet-stream"))).toString if(mimeType.startsWith("image/")){ file.renameTo(new File(file.getParent, file.getName + "." + mimeType.split("/")(1))) } } } } } } } }, Version(1, 13), Version(1, 12), Version(1, 11), Version(1, 10), Version(1, 9), Version(1, 8), Version(1, 7), Version(1, 6), Version(1, 5), Version(1, 4), new Version(1, 3){ override def update(conn: Connection, cl: ClassLoader): Unit = { super.update(conn, cl) // Fix wiki repository configuration conn.select("SELECT USER_NAME, REPOSITORY_NAME FROM REPOSITORY"){ rs => using(Git.open(getWikiRepositoryDir(rs.getString("USER_NAME"), rs.getString("REPOSITORY_NAME")))){ git => defining(git.getRepository.getConfig){ config => if(!config.getBoolean("http", "receivepack", false)){ config.setBoolean("http", null, "receivepack", true) config.save } } } } } }, Version(1, 2), Version(1, 1), Version(1, 0), Version(0, 0) ) /** * The head version of BitBucket. */ val headVersion = versions.head /** * The version file (GITBUCKET_HOME/version). */ lazy val versionFile = new File(GitBucketHome, "version") /** * Returns the current version from the version file. */ def getCurrentVersion(): Version = { if(versionFile.exists){ FileUtils.readFileToString(versionFile, "UTF-8").trim.split("\\.") match { case Array(majorVersion, minorVersion) => { versions.find { v => v.majorVersion == majorVersion.toInt && v.minorVersion == minorVersion.toInt }.getOrElse(Version(0, 0)) } case _ => Version(0, 0) } } else Version(0, 0) } }
ssogabe/gitbucket
src/main/scala/gitbucket/core/servlet/AutoUpdate.scala
Scala
apache-2.0
6,371
/* * Copyright 2014 Kevin Herron * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpetri.modbus.master import io.netty.channel.{ChannelFuture, Channel} import io.netty.util.concurrent.GenericFutureListener import java.util.concurrent.atomic.AtomicReference import scala.concurrent._ import scala.util.{Failure, Success} object AbstractChannelManager { private sealed trait State private case object Idle extends State private case class Connecting(p: Promise[Channel]) extends State private case class Connected(channel: Channel) extends State } abstract class AbstractChannelManager { import AbstractChannelManager._ private[this] val state = new AtomicReference[State](Idle) def getChannel: Either[Future[Channel], Channel] = { state.get match { case s@Idle => val p: Promise[Channel] = promise() val nextState = Connecting(p) if (state.compareAndSet(s, nextState)) Left(connect(nextState, p)) else getChannel case s@Connecting(p) => Left(p.future) case s@Connected(channel) => Right(channel) } } private[this] def connect(expectedState: State, channelPromise: Promise[Channel]): Future[Channel] = { channelPromise.future.onComplete { case Success(ch) => if (state.compareAndSet(expectedState, Connected(ch))) { ch.closeFuture().addListener(new GenericFutureListener[ChannelFuture] { def operationComplete(future: ChannelFuture) { state.set(Idle) } }) } case Failure(ex) => state.compareAndSet(expectedState, Idle) } connect(channelPromise) channelPromise.future } /** * Make a connection, completing the Promise with the resulting Channel. */ def connect(channelPromise: Promise[Channel]): Unit /** * ExecutionContext to run completion callbacks on. */ protected implicit val executionContext: ExecutionContext def getStatus: String = state.get match { case s@Idle => "Idle" case s@Connecting(_) => "Connecting" case s@Connected(_) => "Connected" } def disconnect(): Unit = { state.get match { case s@Connecting(p) => p.future.onSuccess { case ch => ch.close() } case s@Connected(ch) => ch.close() case s@Idle => // No-op } } }
digitalpetri/scala-modbus-tcp
modbus-master/src/main/scala/com/digitalpetri/modbus/master/AbstractChannelManager.scala
Scala
apache-2.0
2,849
package nl.rabobank.oss.rules.dsl.core import LijstBewerkingenGlossary$._ import nl.rabobank.oss.rules.dsl.nl.grammar._ import nl.rabobank.oss.rules.dsl.nl.grammar.DslCondition._ import nl.rabobank.oss.rules.finance.nl._ import nl.rabobank.oss.rules.engine._ import scala.language.implicitConversions class NietLijstBewerkingen extends Berekening ( Gegeven (altijd) Bereken GetalC is GetalA * GetalB en GetalD is GetalA + GetalB en GetalE is GetalA - GetalB en GetalF is GetalA / GetalB ) class LijstOptellingen extends Berekening ( Gegeven (altijd) Bereken OptellingLijstEnLijst is InvoerLijstA + InvoerLijstB en OptellingLijstEnGetal is InvoerLijstA + GetalA en OptellingGetalEnLijst is GetalA + InvoerLijstA en OptellingEnkeleLijstEnEnkeleLijst is InvoerLijstEnkeleC + InvoerLijstEnkeleD en OptellingEnkeleLijstEnGetal is InvoerLijstEnkeleC + GetalA en OptellingGetalEnEnkeleLijst is GetalA + InvoerLijstEnkeleD ) class LijstSubtracties extends Berekening ( Gegeven (altijd) Bereken SubtractieLijstEnLijst is InvoerLijstA - InvoerLijstB en SubtractieLijstEnGetal is InvoerLijstA - GetalA en SubtractieGetalEnLijst is GetalA - InvoerLijstA en SubtractieEnkeleLijstEnEnkeleLijst is InvoerLijstEnkeleC - InvoerLijstEnkeleD en SubtractieEnkeleLijstEnGetal is InvoerLijstEnkeleC - GetalA en SubtractieGetalEnEnkeleLijst is GetalA - InvoerLijstEnkeleD en SubtractieLijstVanLijstResultaat is (substractie van SubtractieLijstVanLijst) ) class LijstVermenigvuldigingen extends Berekening ( Gegeven (altijd) Bereken VermenigvuldigingLijstEnLijst is InvoerLijstA * InvoerLijstB en VermenigvuldigingLijstEnGetal is InvoerLijstA * GetalA en VermenigvuldigingGetalEnLijst is GetalA * InvoerLijstA en VermenigvuldigingEnkeleLijstEnEnkeleLijst is InvoerLijstEnkeleC * InvoerLijstEnkeleD en VermenigvuldigingEnkeleLijstEnGetal is InvoerLijstEnkeleC * GetalA en VermenigvuldigingGetalEnEnkeleLijst is GetalA * InvoerLijstEnkeleD ) class LijstDelingen extends Berekening ( Gegeven (altijd) Bereken DelingLijstEnLijst is InvoerLijstA / InvoerLijstB en DelingLijstEnGetal is InvoerLijstA / GetalA en DelingGetalEnLijst is GetalA / InvoerLijstA en DelingEnkeleLijstEnEnkeleLijst is InvoerLijstEnkeleC / InvoerLijstEnkeleD en DelingEnkeleLijstEnGetal is InvoerLijstEnkeleC / GetalA en DelingGetalEnEnkeleLijst is GetalA / InvoerLijstEnkeleD ) class LijstElementKeuzes extends Berekening ( Gegeven (altijd) Bereken EersteElementVan is (element(0) van InvoerLijstA) en LaagsteElementA is (laagste van InvoerLijstB) en LaagsteElementB is (laagste van InvoerLijstSerieE) en LaagsteElementC is (laagste van InvoerLijstF) en HoogsteElementA is (hoogste van InvoerLijstB) en HoogsteElementB is (hoogste van InvoerLijstSerieE) en HoogsteElementC is (hoogste van InvoerLijstF) ) class LijstGemiddelden extends Berekening ( Gegeven (altijd) Bereken GemiddeldeA is (gemiddelde van InvoerLijstB) en GemiddeldeB is (gemiddelde van InvoerLijstSerieE) en GemiddeldeC is (gemiddelde van InvoerLijstF) en GemiddeldeD is (gemiddelde van InvoerLijstEnkeleC) en GemiddeldeE is (gemiddelde van InvoerLijstLeegG) en GemiddeldeListA is (gemiddelde van InvoerLijstVanLijstA) ) class LijstSommaties extends Berekening ( Gegeven (altijd) Bereken SommatieA is (totaal van InvoerLijstA) en SommatieB is (totaal van InvoerLijstEnkeleC) en SommatieC is (totaal van InvoerLijstLeegG) en SommatieListA is (totaal van InvoerLijstVanLijstA) ) class LijstConditionals extends Berekening ( Gegeven (altijd) Bereken AlsDanPerElementA is DummyFunction(InvoerLijstBedragen, 33.procent, 100.procent) ) class LijstInLijstOptelling extends Berekening ( Gegeven (altijd) Bereken LijstInLijstC is LijstInLijstA + LijstInLijstB en LijstInLijstOptellingA is (totaal van LijstInLijstA) en LijstInLijstBO is (totaal van LijstInLijstInLijstA) ) object DummyFunction { def apply[T](input: DslEvaluation[List[Bedrag]], wegingPositiefInkomen: DslEvaluation[Percentage], wegingNegatiefInkomen: DslEvaluation[Percentage]): DslEvaluation[List[Percentage]] = { val newCondition = andCombineConditions(input.condition, wegingPositiefInkomen.condition, wegingNegatiefInkomen.condition) DslEvaluation(newCondition, new Evaluation[List[Percentage]] { override def apply(c: Context): Option[List[Percentage]] = { val wegingsFactorP: Percentage = wegingPositiefInkomen.evaluation(c).get val wegingsFactorN: Percentage = wegingNegatiefInkomen.evaluation(c).get input.evaluation(c) match { case Some(x) => Some(x.map ( a => if (a < 0.euro) wegingsFactorN else wegingsFactorP )) case _ => None } } }) } }
rabobank-nederland/rule-engine
engine/src/test/scala/nl/rabobank/oss/rules/dsl/core/LijstBewerkingen.scala
Scala
mit
4,855
package uk.gov.gds.ier.transaction.crown.applicationFormVote import uk.gov.gds.ier.transaction.crown.CrownControllers import com.google.inject.{Inject, Singleton} import uk.gov.gds.ier.serialiser.JsonSerialiser import uk.gov.gds.ier.config.Config import uk.gov.gds.ier.security.EncryptionService import uk.gov.gds.ier.step.CrownStep import uk.gov.gds.ier.step.Routes import uk.gov.gds.ier.model.{WaysToVoteType} import uk.gov.gds.ier.validation.ErrorTransformForm import uk.gov.gds.ier.transaction.crown.InprogressCrown import uk.gov.gds.ier.assets.RemoteAssets @Singleton class PostalVoteStep @Inject ()( val serialiser: JsonSerialiser, val config: Config, val encryptionService : EncryptionService, val remoteAssets: RemoteAssets, val crown: CrownControllers ) extends CrownStep with PostalOrProxyVoteForms with PostalOrProxyVoteMustache { val wayToVote = WaysToVoteType.ByPost val validation = postalOrProxyVoteForm val routing = Routes( get = routes.PostalVoteStep.get, post = routes.PostalVoteStep.post, editGet = routes.PostalVoteStep.editGet, editPost = routes.PostalVoteStep.editPost ) def nextStep(currentState: InprogressCrown) = { crown.ContactStep } }
michaeldfallen/ier-frontend
app/uk/gov/gds/ier/transaction/crown/applicationFormVote/PostalVoteStep.scala
Scala
mit
1,228
package de.zalando.beard.ast import scala.collection.immutable.Seq sealed trait Statement sealed trait Interpolation extends Statement case class AttrInterpolation(identifier: Identifier, attributes: Seq[Attribute] = Seq.empty) extends Interpolation { def attributeMap = attributes.map(attr => attr.key -> attr.stringValue).toMap } case class IdInterpolation(identifier: CompoundIdentifier) extends Interpolation case class YieldStatement() extends Statement case class ExtendsStatement(template: String) extends Statement case class RenderStatement(template: String, localValues: Seq[Attribute] = Seq.empty) extends Statement case class BlockStatement(identifier: Identifier, statements: Seq[Statement] = Seq.empty) extends Statement case class ContentForStatement(identifier: Identifier, statements: Seq[Statement] = Seq.empty) extends Statement case class IfStatement(condition: CompoundIdentifier, ifStatements: Seq[Statement], elseStatements: Seq[Statement] = Seq.empty) extends Statement case class ForStatement(iterator: Identifier, collection: CompoundIdentifier, statements: Seq[Statement] = Seq.empty) extends Statement sealed trait Attribute { def key: String def stringValue: Option[String] def identifier: Option[CompoundIdentifier] } case class AttributeWithValue(key: String, value: String) extends Attribute { def identifier = None def stringValue = Some(value) } case class AttributeWithIdentifier(key: String, id: CompoundIdentifier) extends Attribute { def identifier = Some(id) def stringValue = None } trait HasText { def text: String } case class Text(text: String) extends Statement with HasText case class NewLine(times: Int) extends Statement with HasText { override def text: String = (1 to times).foldLeft("")((s, time) => s + "\\n") } case class White(times: Int) extends Statement with HasText { override def text: String = (1 to times).foldLeft("")((s, time) => s + " ") } case class BeardTemplate(statements: Seq[Statement], extended: Option[ExtendsStatement] = None, renderStatements: Seq[RenderStatement] = Seq.empty, contentForStatements: Seq[ContentForStatement] = Seq.empty) object EmptyBeardTemplate extends BeardTemplate(Seq.empty) case class CompoundIdentifier(identifierPart: String, identifierParts: Seq[String] = Seq.empty) case class Identifier(identifier: String)
slavaschmidt/beard
src/main/scala/de/zalando/beard/ast/Nodes.scala
Scala
apache-2.0
2,429
package isabelle.eclipse.ui.editors import scala.collection.JavaConverters._ import isabelle.Document.Snapshot import isabelle.Session import isabelle.eclipse.ui.annotations.{ IsabelleAnnotationConstants, IsabelleAnnotations, TheoryViewerAnnotations } import isabelle.eclipse.ui.preferences.IsabelleUIPreferences import org.eclipse.jface.preference.{IPreferenceStore, PreferenceConverter} import org.eclipse.jface.resource.{JFaceResources, LocalResourceManager, ResourceManager} import org.eclipse.jface.text.source.{ Annotation, AnnotationRulerColumn, CompositeRuler, IAnnotationAccess, IOverviewRuler, ISharedTextColors, IVerticalRuler, OverviewRuler, SourceViewer } import org.eclipse.jface.util.{IPropertyChangeListener, PropertyChangeEvent} import org.eclipse.swt.graphics.Color import org.eclipse.swt.widgets.Composite import org.eclipse.ui.texteditor.{ AnnotationPreference, DefaultMarkerAnnotationAccess, MarkerAnnotationPreferences, SourceViewerDecorationSupport } import org.eclipse.ui.texteditor.AbstractDecoratedTextEditorPreferenceConstants import org.eclipse.ui.texteditor.AbstractTextEditor._ /** * An extension of SourceViewer with Isabelle theory specific customisations. * * Most adapted from AbstractDecoratedTextEditor and AbstractTextEditor. * * @author Andrius Velykis */ class IsabelleTheorySourceViewer private ( parent: Composite, session: => Option[Session], snapshot: => Option[Snapshot], style: Int, resourceManager: ResourceManager, sharedColors: ISharedTextColors, annotationAccess: IAnnotationAccess, annotationPrefs: MarkerAnnotationPreferences, verticalRuler: IVerticalRuler, overviewRuler: IOverviewRuler, targetEditor: => Option[TheoryEditor]) extends SourceViewer(parent, verticalRuler, overviewRuler, true, style) with FontUpdates { private val configuration = new IsabelleTheoryViewerConfiguration( session, snapshot, targetEditor, resourceManager) configure(configuration) private val decorationSupport = configureDecorationSupport() private val annotations = new TheoryViewerAnnotations(snapshot, annotationModel) def annotationModel: Option[IsabelleAnnotations] = Option(getAnnotationModel) match { case Some(isa: IsabelleAnnotations) => Some(isa) case _ => None } def fontKey = IsabelleUIPreferences.ISABELLE_FONT val preferenceListener = new IPropertyChangeListener { override def propertyChange(event: PropertyChangeEvent) = handlePreferenceStoreChanged(event) } configuration.preferenceStore.addPropertyChangeListener(preferenceListener) initializeViewerColors() private def configureDecorationSupport(): SourceViewerDecorationSupport = { import IsabelleTheorySourceViewer.AnnotationPrefsScala import AbstractDecoratedTextEditorPreferenceConstants._ val decorationSupport = new SourceViewerDecorationSupport( this, overviewRuler, annotationAccess, sharedColors) annotationPrefs.iterator foreach (pref => decorationSupport.setAnnotationPreference(pref)) decorationSupport.setCursorLinePainterPreferenceKeys( EDITOR_CURRENT_LINE, EDITOR_CURRENT_LINE_COLOR) decorationSupport.setMarginPainterPreferenceKeys( EDITOR_PRINT_MARGIN, EDITOR_PRINT_MARGIN_COLOR, EDITOR_PRINT_MARGIN_COLUMN) decorationSupport.setSymbolicFontName(fontKey) decorationSupport.install(configuration.preferenceStore) decorationSupport } def updateAnnotations() = annotations.updateAnnotations() def dispose() { configuration.preferenceStore.removePropertyChangeListener(preferenceListener) resourceManager.dispose() decorationSupport.dispose() } private def handlePreferenceStoreChanged(event: PropertyChangeEvent) { // notify configuration to update syntax highlighting configuration.handlePropertyChangeEvent(event) if (viewerColorPrefs(event.getProperty)) { initializeViewerColors() } // invalidate text presentation, otherwise the syntax highlighting does not get changed // TODO investigate a more precise refresh (not on every preference change) invalidateTextPresentation() // TODO support other preference changes (see TextEditor#handlePreferenceStoreChanged // and its parents) - issue #37 } private val viewerColorPrefs = Set( PREFERENCE_COLOR_FOREGROUND, PREFERENCE_COLOR_FOREGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_BACKGROUND, PREFERENCE_COLOR_BACKGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_SELECTION_FOREGROUND, PREFERENCE_COLOR_SELECTION_FOREGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_SELECTION_BACKGROUND, PREFERENCE_COLOR_SELECTION_BACKGROUND_SYSTEM_DEFAULT) /** * Initializes the fore- and background colors of the source viewer for both * normal and selected text. */ // adapted from AbstractTextEditor.initializeViewerColors private def initializeViewerColors() { val store = configuration.preferenceStore val styledText = getTextWidget def setColor(setter: Color => Unit, defaultKey: String, prefKey: String) { val col = if (store.getBoolean(defaultKey)) None else color(store, prefKey) setter(col.orNull) } setColor(styledText.setForeground, PREFERENCE_COLOR_FOREGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_FOREGROUND) setColor(styledText.setBackground, PREFERENCE_COLOR_BACKGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_BACKGROUND) setColor(styledText.setSelectionForeground, PREFERENCE_COLOR_SELECTION_FOREGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_SELECTION_FOREGROUND) setColor(styledText.setSelectionBackground, PREFERENCE_COLOR_SELECTION_BACKGROUND_SYSTEM_DEFAULT, PREFERENCE_COLOR_SELECTION_BACKGROUND) } /** * Creates a color from the information stored in the given preference store. */ private def color(store: IPreferenceStore, key: String): Option[Color] = if (store.contains(key)) { val rgb = if (store.isDefault(key)) { PreferenceConverter.getDefaultColor(store, key) } else { PreferenceConverter.getColor(store, key) } val color = resourceManager.createColor(rgb) Some(color) } else { None } } /** * @author Andrius Velykis */ object IsabelleTheorySourceViewer { private implicit class AnnotationPrefsScala(prefs: MarkerAnnotationPreferences) { def iterator: Iterator[AnnotationPreference] = prefs.getAnnotationPreferences.iterator. asInstanceOf[java.util.Iterator[AnnotationPreference]].asScala } private def createViewerAnnotations(sharedColors: ISharedTextColors) : (IAnnotationAccess, MarkerAnnotationPreferences, IOverviewRuler) = { val annotationAccess = new DefaultMarkerAnnotationAccess val annotationPreferences = new MarkerAnnotationPreferences val verticalRulerWidth = 12 val ruler = new OverviewRuler(annotationAccess, verticalRulerWidth, sharedColors) annotationPreferences.iterator foreach { pref => if (pref.contributesToHeader) { ruler.addHeaderAnnotationType(pref.getAnnotationType) } } (annotationAccess, annotationPreferences, ruler) } private def createVerticalRuler(annotationAccess: IAnnotationAccess): IVerticalRuler = { val ruler = new CompositeRuler val verticalRulerWidth = 12 val anotationRulerColumn = new AnnotationRulerColumn(verticalRulerWidth, annotationAccess) // add types explicitly // TODO load from preferences? anotationRulerColumn.addAnnotationType(Annotation.TYPE_UNKNOWN) val annotationTypes = IsabelleAnnotationConstants.annotationTypes.keySet annotationTypes foreach anotationRulerColumn.addAnnotationType // temporarily remove Info annotations // TODO review this anotationRulerColumn.removeAnnotationType("isabelle.eclipse.ui.annotation.info") ruler.addDecorator(0, anotationRulerColumn) ruler } def apply(parent: Composite, session: => Option[Session], snapshot: => Option[Snapshot], targetEditor: => Option[TheoryEditor], style: Int): IsabelleTheorySourceViewer = { val resourceManager = new LocalResourceManager(JFaceResources.getResources) val sharedColors = new ManagedTextColors(resourceManager) val (annotationAccess, annotationPrefs, overviewRuler) = createViewerAnnotations(sharedColors) val verticalRuler = createVerticalRuler(annotationAccess) new IsabelleTheorySourceViewer(parent, session, snapshot, style, resourceManager, sharedColors, annotationAccess, annotationPrefs, verticalRuler, overviewRuler, targetEditor) } }
andriusvelykis/isabelle-eclipse
isabelle.eclipse.ui/src/isabelle/eclipse/ui/editors/IsabelleTheorySourceViewer.scala
Scala
epl-1.0
8,714
/* * ServerImpl.scala * (ScalaCollider) * * Copyright (c) 2008-2021 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.synth package impl import de.sciss.model.impl.ModelImpl import de.sciss.osc import de.sciss.processor.Processor import de.sciss.processor.impl.ProcessorImpl import de.sciss.synth.message.StatusReply import java.io.{File, IOException} import java.util.{Timer, TimerTask} import scala.concurrent.duration.Duration import scala.concurrent.{Future, Promise} import scala.sys.process.{Process, ProcessLogger} import scala.util.control.NonFatal private[synth] object ServerImpl { @volatile private var _default: Server = _ def default: Server = { val res = _default if (res == null) throw new IllegalStateException("There is no default Server yet") res } private[impl] def add(s: Server): Unit = this.synchronized { if (_default == null) _default = s } private[impl] def remove(s: Server): Unit = this.synchronized { if (_default == s) _default = null } } private[synth] final class NRTImpl(dur: Double, sCfg: Server.Config) extends ProcessorImpl[Int, Processor[Int]] with Processor[Int] { protected def body(): Int = { val procArgs = sCfg.toNonRealtimeArgs val procBuilder = Process(procArgs, Some(new File(sCfg.program).getParentFile)) val log: ProcessLogger = new ProcessLogger { def buffer[A](f: => A): A = f def out(lineL: => String): Unit = { val line: String = lineL if (line.startsWith("nextOSCPacket")) { val time = line.substring(14).toFloat val prog = time / dur progress = prog } else { // ignore the 'start time <num>' message, and also the 'Found <num> LADSPA plugins' on Linux if (!line.startsWith("start time ") && !line.endsWith(" LADSPA plugins")) { Console.out.println(line) } } } def err(line: => String): Unit = Console.err.println(line) } val _proc = procBuilder.run(log) checkAborted() await(Processor.fromProcess("scsynth -N", _proc)) } } private[synth] final class OnlineServerImpl(val name: String, c: osc.Client, val addr: Server.Address, val config: Server.Config, val clientConfig: Client.Config, var countsVar: message.StatusReply, timeOutTimer: java.util.Timer) extends ServerImpl { server => import clientConfig.executionContext private[this] val condSync = new AnyRef @volatile private[this] var _condition : Server.Condition = Server.Running private[this] var pendingCondition: Server.Condition = Server.NoPending private[this] var aliveThread : Option[StatusWatcher] = None // ---- constructor ---- // OSCReceiverActor.start() c.action = OSCReceiverActor receive _ ServerImpl.add(server) def isConnected: Boolean = c.isConnected def condition: Server.Condition = _condition def !(p: osc.Packet): Unit = c ! p def !![A](p: osc.Packet, timeout: Duration)(handler: PartialFunction[osc.Message, A]): Future[A] = { val promise = Promise[A]() val res = promise.future val oh = new OSCTimeOutHandler(handler, promise) OSCReceiverActor.addHandler(oh) server ! p // only after addHandler! val tt = new TimerTask { override def run(): Unit = promise.tryFailure(message.Timeout()) } if (timeout.isFinite) { timeOutTimer.schedule(tt, timeout.toMillis) res.andThen { case _ => tt.cancel() } } else { res } } def serverOffline(): Unit = condSync.synchronized { stopAliveThread() condition_=(Server.Offline) } def counts: StatusReply = countsVar private[synth] def counts_=(newCounts: message.StatusReply): Unit = { countsVar = newCounts dispatch(Server.Counts(newCounts)) } def dumpInOSC(mode: osc.Dump, filter: osc.Packet => Boolean): Unit = c.dumpIn(mode, filter = { case _: message.StatusReply => false case p => filter(p) }) def dumpOutOSC(mode: osc.Dump, filter: osc.Packet => Boolean): Unit = c.dumpOut(mode, filter = { case message.Status => false case p => filter(p) }) private def disposeImpl(): Unit = { nodeManager .clear() bufManager .clear() // OSCReceiverActor.clear() ServerImpl.remove(this) OSCReceiverActor.dispose() try { c.close() } catch { case NonFatal(e) => e.printStackTrace() } } def isRunning: Boolean = _condition == Server.Running def isOffline: Boolean = _condition == Server.Offline def addResponder (resp: message.Responder): Unit = OSCReceiverActor.addHandler (resp) def removeResponder(resp: message.Responder): Unit = OSCReceiverActor.removeHandler(resp) def quit(): Unit = { this ! quitMsg dispose() } def dispose(): Unit = condSync.synchronized { serverOffline() } def initTree(): Unit = { nodeManager.register(defaultGroup) server ! defaultGroup.newMsg(rootNode, addToHead) } private[synth] def condition_=(newCondition: Server.Condition): Unit = condSync.synchronized { if (newCondition != _condition) { _condition = newCondition if (newCondition == Server.Offline) { pendingCondition = Server.NoPending disposeImpl() } // else if( newCondition == Running ) { // if( pendingCondition == Booting ) { // pendingCondition = NoPending // collBootCompletion.foreach( action => try { // action.apply( this ) // } // catch { case e => e.printStackTrace() } // ) // collBootCompletion = Queue.empty // } // } dispatch(newCondition) } } def startAliveThread(delay: Float = 0.25f, period: Float = 0.25f, deathBounces: Int = 25): Unit = condSync.synchronized { if (aliveThread.isEmpty) { val statusWatcher = new StatusWatcher(delay, period, deathBounces) aliveThread = Some(statusWatcher) statusWatcher.start() } } def stopAliveThread(): Unit = condSync.synchronized { aliveThread.foreach(_.stop()) aliveThread = None } private object OSCReceiverActor { // import scala.concurrent._ private val sync = new AnyRef @volatile private var handlers = Set.empty[message.Handler] private def checkHandlers(m: osc.Message): Unit = handlers.foreach { h => if (h.handle(m)) sync.synchronized(handlers -= h) } // note: deferring to clientConfig.executionContext is dangerous, // because it can produce non-deterministic message delivery. I'm not // sure why we want to decouple this from `c.action` in the first place. // For now deliver on the OSC receiver thread! // Future { blocking { ... }} val receive: osc.Packet => Unit = { case m: osc.Message with message.NodeChange => // println(s"---- NodeChange: $nodeMsg") nodeManager.nodeChange(m) checkHandlers(m) case m: message.BufferInfo => bufManager.bufferInfo(m) checkHandlers(m) case m: message.StatusReply => aliveThread.foreach(_.statusReply(m)) checkHandlers(m) case m: osc.Message => checkHandlers(m) case _ => // ignore bundles } def clear(): Unit = { val h = sync.synchronized { val res = handlers handlers = Set.empty res } h.foreach(_.removed()) } def dispose(): Unit = clear() def addHandler(h: message.Handler): Unit = sync.synchronized(handlers += h) def removeHandler(h: message.Handler): Unit = { val seen = sync.synchronized { val res = handlers.contains(h) if (res) handlers -= h res } if (seen) h.removed() } } private final class OSCTimeOutHandler[A](fun: PartialFunction[osc.Message, A], promise: Promise[A]) extends message.Handler { def handle(msg: osc.Message): Boolean = { if (promise.isCompleted) return true val handled = fun.isDefinedAt(msg) if (handled) try { promise.trySuccess(fun(msg)) } catch { case NonFatal(e) => promise.tryFailure(e) } handled } def removed(): Unit = () } // -------- internal class StatusWatcher -------- private final class StatusWatcher(delay: Float, period: Float, deathBounces: Int) extends Runnable { watcher => private[this] var alive = deathBounces private[this] val delayMillis = (delay * 1000).toInt private[this] val periodMillis = (period * 1000).toInt private[this] var timer = Option.empty[Timer] private[this] var callServerContacted = true private[this] val sync = new AnyRef def start(): Unit = { stop() val t = new Timer("StatusWatcher", true) t.schedule(new TimerTask { def run(): Unit = watcher.run() }, delayMillis, periodMillis) timer = Some(t) } def stop(): Unit = { timer.foreach { t => t.cancel() timer = None } } def run(): Unit = { sync.synchronized { alive -= 1 if (alive < 0) { callServerContacted = true condition = Server.Offline } } try { queryCounts() } catch { case e: IOException => Server.printError("Server.status", e) } } def statusReply(msg: message.StatusReply): Unit = sync.synchronized { alive = deathBounces // note: put the counts before running // because that way e.g. the sampleRate // is instantly available counts = msg if (!isRunning && callServerContacted) { callServerContacted = false // serverContacted condition = Server.Running } } } } private[synth] final class OfflineServerImpl(val name: String, val config: Server.Config, val clientConfig: Client.Config, val counts: message.StatusReply) extends ServerImpl { def isConnected = false def isRunning = true def isOffline = false private def offlineException() = new Exception("Server is not connected") def !(p: osc.Packet): Unit = throw offlineException() def !![A](packet: osc.Packet, timeout: Duration)(handler: PartialFunction[osc.Message, A]): Future[A] = Future.failed(offlineException()) def condition: Server.Condition = Server.Running def startAliveThread(delay: Float, period: Float, deathBounces: Int): Unit = () def stopAliveThread(): Unit = () def dumpInOSC (mode: osc.Dump, filter: osc.Packet => Boolean): Unit = () def dumpOutOSC(mode: osc.Dump, filter: osc.Packet => Boolean): Unit = () private[synth] def addResponder (resp: message.Responder): Unit = () private[synth] def removeResponder(resp: message.Responder): Unit = () def dispose(): Unit = () def quit (): Unit = () def addr: Server.Address = Server.mkAddress(config) } private[synth] abstract class ServerImpl extends Server with ModelImpl[Server.Update] { server => final val rootNode = Group(this, 0) final val defaultGroup = Group(this, 1) final val nodeManager = new NodeManager (this) final val bufManager = new BufferManager(this) private[this] val nodeAllocator = new NodeIdAllocator(clientConfig.clientId, clientConfig.nodeIdOffset) private[this] val controlBusAllocator = new ContiguousBlockAllocator(config.controlBusChannels) private[this] val audioBusAllocator = new ContiguousBlockAllocator(config.audioBusChannels, config.internalBusIndex) private[this] val bufferAllocator = new ContiguousBlockAllocator(config.audioBuffers) private[this] var uniqueId = 0 private[this] val uniqueSync = new AnyRef final def isLocal: Boolean = Server.isLocal(addr) final def nextNodeId(): Int = nodeAllocator.alloc() final def allocControlBus(numChannels: Int): Int = controlBusAllocator.alloc(numChannels) final def allocAudioBus (numChannels: Int): Int = audioBusAllocator .alloc(numChannels) final def allocBuffer (numChannels: Int): Int = bufferAllocator .alloc(numChannels) final def freeControlBus(index: Int): Unit = controlBusAllocator.free(index) final def freeAudioBus (index: Int): Unit = audioBusAllocator .free(index) final def freeBuffer (index: Int): Unit = bufferAllocator .free(index) final def nextSyncId(): Int = uniqueSync.synchronized { val res = uniqueId; uniqueId += 1; res } final def sampleRate: Double = counts.sampleRate final def queryCounts(): Unit = this ! message.Status final def dumpOSC(mode: osc.Dump, filter: osc.Packet => Boolean): Unit = { dumpInOSC (mode, filter) dumpOutOSC(mode, filter) } final def syncMsg(): message.Sync = message.Sync(nextSyncId()) final def quitMsg: message.ServerQuit.type = message.ServerQuit }
Sciss/ScalaCollider
shared/src/main/scala/de/sciss/synth/impl/ServerImpl.scala
Scala
lgpl-2.1
13,531
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import java.math.{BigDecimal, BigInteger} import java.nio.ByteOrder import java.time.{ZoneId, ZoneOffset} import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import org.apache.parquet.column.Dictionary import org.apache.parquet.io.api.{Binary, Converter, GroupConverter, PrimitiveConverter} import org.apache.parquet.schema.{GroupType, OriginalType, Type} import org.apache.parquet.schema.OriginalType.LIST import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.{BINARY, FIXED_LEN_BYTE_ARRAY, INT32, INT64, INT96} import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, CaseInsensitiveMap, DateTimeUtils, GenericArrayData} import org.apache.spark.sql.execution.datasources.DataSourceUtils import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String /** * A [[ParentContainerUpdater]] is used by a Parquet converter to set converted values to some * corresponding parent container. For example, a converter for a `StructType` field may set * converted values to a [[InternalRow]]; or a converter for array elements may append converted * values to an [[ArrayBuffer]]. */ private[parquet] trait ParentContainerUpdater { /** Called before a record field is being converted */ def start(): Unit = () /** Called after a record field is being converted */ def end(): Unit = () def set(value: Any): Unit = () def setBoolean(value: Boolean): Unit = set(value) def setByte(value: Byte): Unit = set(value) def setShort(value: Short): Unit = set(value) def setInt(value: Int): Unit = set(value) def setLong(value: Long): Unit = set(value) def setFloat(value: Float): Unit = set(value) def setDouble(value: Double): Unit = set(value) } /** A no-op updater used for root converter (who doesn't have a parent). */ private[parquet] object NoopUpdater extends ParentContainerUpdater private[parquet] trait HasParentContainerUpdater { def updater: ParentContainerUpdater } /** * A convenient converter class for Parquet group types with a [[HasParentContainerUpdater]]. */ private[parquet] abstract class ParquetGroupConverter(val updater: ParentContainerUpdater) extends GroupConverter with HasParentContainerUpdater /** * Parquet converter for Parquet primitive types. Note that not all Spark SQL atomic types * are handled by this converter. Parquet primitive types are only a subset of those of Spark * SQL. For example, BYTE, SHORT, and INT in Spark SQL are all covered by INT32 in Parquet. */ private[parquet] class ParquetPrimitiveConverter(val updater: ParentContainerUpdater) extends PrimitiveConverter with HasParentContainerUpdater { override def addBoolean(value: Boolean): Unit = updater.setBoolean(value) override def addInt(value: Int): Unit = updater.setInt(value) override def addLong(value: Long): Unit = updater.setLong(value) override def addFloat(value: Float): Unit = updater.setFloat(value) override def addDouble(value: Double): Unit = updater.setDouble(value) override def addBinary(value: Binary): Unit = updater.set(value.getBytes) } /** * A [[ParquetRowConverter]] is used to convert Parquet records into Catalyst [[InternalRow]]s. * Since Catalyst `StructType` is also a Parquet record, this converter can be used as root * converter. Take the following Parquet type as an example: * {{{ * message root { * required int32 f1; * optional group f2 { * required double f21; * optional binary f22 (utf8); * } * } * }}} * 5 converters will be created: * * - a root [[ParquetRowConverter]] for [[org.apache.parquet.schema.MessageType]] `root`, * which contains: * - a [[ParquetPrimitiveConverter]] for required * [[org.apache.parquet.schema.OriginalType.INT_32]] field `f1`, and * - a nested [[ParquetRowConverter]] for optional [[GroupType]] `f2`, which contains: * - a [[ParquetPrimitiveConverter]] for required * [[org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE]] field `f21`, and * - a [[ParquetStringConverter]] for optional [[org.apache.parquet.schema.OriginalType.UTF8]] * string field `f22` * * When used as a root converter, [[NoopUpdater]] should be used since root converters don't have * any "parent" container. * * @param schemaConverter A utility converter used to convert Parquet types to Catalyst types. * @param parquetType Parquet schema of Parquet records * @param catalystType Spark SQL schema that corresponds to the Parquet record type. User-defined * types should have been expanded. * @param convertTz the optional time zone to convert to int96 data * @param datetimeRebaseMode the mode of rebasing date/timestamp from Julian to Proleptic Gregorian * calendar * @param int96RebaseMode the mode of rebasing INT96 timestamp from Julian to Proleptic Gregorian * calendar * @param updater An updater which propagates converted field values to the parent container */ private[parquet] class ParquetRowConverter( schemaConverter: ParquetToSparkSchemaConverter, parquetType: GroupType, catalystType: StructType, convertTz: Option[ZoneId], datetimeRebaseMode: LegacyBehaviorPolicy.Value, int96RebaseMode: LegacyBehaviorPolicy.Value, updater: ParentContainerUpdater) extends ParquetGroupConverter(updater) with Logging { assert( parquetType.getFieldCount <= catalystType.length, s"""Field count of the Parquet schema is greater than the field count of the Catalyst schema: | |Parquet schema: |$parquetType |Catalyst schema: |${catalystType.prettyJson} """.stripMargin) assert( !catalystType.existsRecursively(_.isInstanceOf[UserDefinedType[_]]), s"""User-defined types in Catalyst schema should have already been expanded: |${catalystType.prettyJson} """.stripMargin) logDebug( s"""Building row converter for the following schema: | |Parquet form: |$parquetType |Catalyst form: |${catalystType.prettyJson} """.stripMargin) /** * Updater used together with field converters within a [[ParquetRowConverter]]. It propagates * converted filed values to the `ordinal`-th cell in `currentRow`. */ private final class RowUpdater(row: InternalRow, ordinal: Int) extends ParentContainerUpdater { override def set(value: Any): Unit = row(ordinal) = value override def setBoolean(value: Boolean): Unit = row.setBoolean(ordinal, value) override def setByte(value: Byte): Unit = row.setByte(ordinal, value) override def setShort(value: Short): Unit = row.setShort(ordinal, value) override def setInt(value: Int): Unit = row.setInt(ordinal, value) override def setLong(value: Long): Unit = row.setLong(ordinal, value) override def setDouble(value: Double): Unit = row.setDouble(ordinal, value) override def setFloat(value: Float): Unit = row.setFloat(ordinal, value) } private[this] val currentRow = new SpecificInternalRow(catalystType.map(_.dataType)) /** * The [[InternalRow]] converted from an entire Parquet record. */ def currentRecord: InternalRow = currentRow private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead( datetimeRebaseMode, "Parquet") private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead( datetimeRebaseMode, "Parquet") private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead( int96RebaseMode, "Parquet INT96") // Converters for each field. private[this] val fieldConverters: Array[Converter with HasParentContainerUpdater] = { // (SPARK-31116) Use case insensitive map if spark.sql.caseSensitive is false // to prevent throwing IllegalArgumentException when searching catalyst type's field index val catalystFieldNameToIndex = if (SQLConf.get.caseSensitiveAnalysis) { catalystType.fieldNames.zipWithIndex.toMap } else { CaseInsensitiveMap(catalystType.fieldNames.zipWithIndex.toMap) } parquetType.getFields.asScala.map { parquetField => val fieldIndex = catalystFieldNameToIndex(parquetField.getName) val catalystField = catalystType(fieldIndex) // Converted field value should be set to the `fieldIndex`-th cell of `currentRow` newConverter(parquetField, catalystField.dataType, new RowUpdater(currentRow, fieldIndex)) }.toArray } // Updaters for each field. private[this] val fieldUpdaters: Array[ParentContainerUpdater] = fieldConverters.map(_.updater) override def getConverter(fieldIndex: Int): Converter = fieldConverters(fieldIndex) override def end(): Unit = { var i = 0 while (i < fieldUpdaters.length) { fieldUpdaters(i).end() i += 1 } updater.set(currentRow) } override def start(): Unit = { var i = 0 val numFields = currentRow.numFields while (i < numFields) { currentRow.setNullAt(i) i += 1 } i = 0 while (i < fieldUpdaters.length) { fieldUpdaters(i).start() i += 1 } } /** * Creates a converter for the given Parquet type `parquetType` and Spark SQL data type * `catalystType`. Converted values are handled by `updater`. */ private def newConverter( parquetType: Type, catalystType: DataType, updater: ParentContainerUpdater): Converter with HasParentContainerUpdater = { catalystType match { case LongType if parquetType.getOriginalType == OriginalType.UINT_32 => new ParquetPrimitiveConverter(updater) { override def addInt(value: Int): Unit = updater.setLong(Integer.toUnsignedLong(value)) } case BooleanType | IntegerType | LongType | FloatType | DoubleType | BinaryType => new ParquetPrimitiveConverter(updater) case ByteType => new ParquetPrimitiveConverter(updater) { override def addInt(value: Int): Unit = updater.setByte(value.asInstanceOf[ByteType#InternalType]) } case ShortType => new ParquetPrimitiveConverter(updater) { override def addInt(value: Int): Unit = updater.setShort(value.asInstanceOf[ShortType#InternalType]) } // For INT32 backed decimals case t: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT32 => val metadata = parquetType.asPrimitiveType().getDecimalMetadata if (metadata == null) { // If the column is a plain INT32, we should pick the precision that can host the largest // INT32 value. new ParquetIntDictionaryAwareDecimalConverter( DecimalType.IntDecimal.precision, 0, updater) } else { new ParquetIntDictionaryAwareDecimalConverter( metadata.getPrecision, metadata.getScale, updater) } // For unsigned int64 case _: DecimalType if parquetType.getOriginalType == OriginalType.UINT_64 => new ParquetPrimitiveConverter(updater) { override def addLong(value: Long): Unit = { updater.set(Decimal(java.lang.Long.toUnsignedString(value))) } } // For INT64 backed decimals case t: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT64 => val metadata = parquetType.asPrimitiveType().getDecimalMetadata if (metadata == null) { // If the column is a plain INT64, we should pick the precision that can host the largest // INT64 value. new ParquetLongDictionaryAwareDecimalConverter( DecimalType.LongDecimal.precision, 0, updater) } else { new ParquetLongDictionaryAwareDecimalConverter( metadata.getPrecision, metadata.getScale, updater) } // For BINARY and FIXED_LEN_BYTE_ARRAY backed decimals case t: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == FIXED_LEN_BYTE_ARRAY || parquetType.asPrimitiveType().getPrimitiveTypeName == BINARY => val metadata = parquetType.asPrimitiveType().getDecimalMetadata if (metadata == null) { throw new RuntimeException(s"Unable to create Parquet converter for ${t.typeName} " + s"whose Parquet type is $parquetType without decimal metadata. Please read this " + "column/field as Spark BINARY type." ) } else { new ParquetBinaryDictionaryAwareDecimalConverter( metadata.getPrecision, metadata.getScale, updater) } case t: DecimalType => throw new RuntimeException( s"Unable to create Parquet converter for decimal type ${t.json} whose Parquet type is " + s"$parquetType. Parquet DECIMAL type can only be backed by INT32, INT64, " + "FIXED_LEN_BYTE_ARRAY, or BINARY.") case StringType => new ParquetStringConverter(updater) case TimestampType if parquetType.getOriginalType == OriginalType.TIMESTAMP_MICROS => new ParquetPrimitiveConverter(updater) { override def addLong(value: Long): Unit = { updater.setLong(timestampRebaseFunc(value)) } } case TimestampType if parquetType.getOriginalType == OriginalType.TIMESTAMP_MILLIS => new ParquetPrimitiveConverter(updater) { override def addLong(value: Long): Unit = { val micros = DateTimeUtils.millisToMicros(value) updater.setLong(timestampRebaseFunc(micros)) } } // INT96 timestamp doesn't have a logical type, here we check the physical type instead. case TimestampType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT96 => new ParquetPrimitiveConverter(updater) { // Converts nanosecond timestamps stored as INT96 override def addBinary(value: Binary): Unit = { val julianMicros = ParquetRowConverter.binaryToSQLTimestamp(value) val gregorianMicros = int96RebaseFunc(julianMicros) val adjTime = convertTz.map(DateTimeUtils.convertTz(gregorianMicros, _, ZoneOffset.UTC)) .getOrElse(gregorianMicros) updater.setLong(adjTime) } } case DateType => new ParquetPrimitiveConverter(updater) { override def addInt(value: Int): Unit = { updater.set(dateRebaseFunc(value)) } } // A repeated field that is neither contained by a `LIST`- or `MAP`-annotated group nor // annotated by `LIST` or `MAP` should be interpreted as a required list of required // elements where the element type is the type of the field. case t: ArrayType if parquetType.getOriginalType != LIST => if (parquetType.isPrimitive) { new RepeatedPrimitiveConverter(parquetType, t.elementType, updater) } else { new RepeatedGroupConverter(parquetType, t.elementType, updater) } case t: ArrayType => new ParquetArrayConverter(parquetType.asGroupType(), t, updater) case t: MapType => new ParquetMapConverter(parquetType.asGroupType(), t, updater) case t: StructType => val wrappedUpdater = { // SPARK-30338: avoid unnecessary InternalRow copying for nested structs: // There are two cases to handle here: // // 1. Parent container is a map or array: we must make a deep copy of the mutable row // because this converter may be invoked multiple times per Parquet input record // (if the map or array contains multiple elements). // // 2. Parent container is a struct: we don't need to copy the row here because either: // // (a) all ancestors are structs and therefore no copying is required because this // converter will only be invoked once per Parquet input record, or // (b) some ancestor is struct that is nested in a map or array and that ancestor's // converter will perform deep-copying (which will recursively copy this row). if (updater.isInstanceOf[RowUpdater]) { // `updater` is a RowUpdater, implying that the parent container is a struct. updater } else { // `updater` is NOT a RowUpdater, implying that the parent container a map or array. new ParentContainerUpdater { override def set(value: Any): Unit = { updater.set(value.asInstanceOf[SpecificInternalRow].copy()) // deep copy } } } } new ParquetRowConverter( schemaConverter, parquetType.asGroupType(), t, convertTz, datetimeRebaseMode, int96RebaseMode, wrappedUpdater) case t => throw new RuntimeException( s"Unable to create Parquet converter for data type ${t.json} " + s"whose Parquet type is $parquetType") } } /** * Parquet converter for strings. A dictionary is used to minimize string decoding cost. */ private final class ParquetStringConverter(updater: ParentContainerUpdater) extends ParquetPrimitiveConverter(updater) { private var expandedDictionary: Array[UTF8String] = null override def hasDictionarySupport: Boolean = true override def setDictionary(dictionary: Dictionary): Unit = { this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { i => UTF8String.fromBytes(dictionary.decodeToBinary(i).getBytes) } } override def addValueFromDictionary(dictionaryId: Int): Unit = { updater.set(expandedDictionary(dictionaryId)) } override def addBinary(value: Binary): Unit = { // The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here we // are using `Binary.toByteBuffer.array()` to steal the underlying byte array without copying // it. val buffer = value.toByteBuffer val offset = buffer.arrayOffset() + buffer.position() val numBytes = buffer.remaining() updater.set(UTF8String.fromBytes(buffer.array(), offset, numBytes)) } } /** * Parquet converter for fixed-precision decimals. */ private abstract class ParquetDecimalConverter( precision: Int, scale: Int, updater: ParentContainerUpdater) extends ParquetPrimitiveConverter(updater) { protected var expandedDictionary: Array[Decimal] = _ override def hasDictionarySupport: Boolean = true override def addValueFromDictionary(dictionaryId: Int): Unit = { updater.set(expandedDictionary(dictionaryId)) } // Converts decimals stored as INT32 override def addInt(value: Int): Unit = { addLong(value: Long) } // Converts decimals stored as INT64 override def addLong(value: Long): Unit = { updater.set(decimalFromLong(value)) } // Converts decimals stored as either FIXED_LENGTH_BYTE_ARRAY or BINARY override def addBinary(value: Binary): Unit = { updater.set(decimalFromBinary(value)) } protected def decimalFromLong(value: Long): Decimal = { Decimal(value, precision, scale) } protected def decimalFromBinary(value: Binary): Decimal = { if (precision <= Decimal.MAX_LONG_DIGITS) { // Constructs a `Decimal` with an unscaled `Long` value if possible. val unscaled = ParquetRowConverter.binaryToUnscaledLong(value) Decimal(unscaled, precision, scale) } else { // Otherwise, resorts to an unscaled `BigInteger` instead. Decimal(new BigDecimal(new BigInteger(value.getBytes), scale), precision, scale) } } } private class ParquetIntDictionaryAwareDecimalConverter( precision: Int, scale: Int, updater: ParentContainerUpdater) extends ParquetDecimalConverter(precision, scale, updater) { override def setDictionary(dictionary: Dictionary): Unit = { this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id => decimalFromLong(dictionary.decodeToInt(id).toLong) } } } private class ParquetLongDictionaryAwareDecimalConverter( precision: Int, scale: Int, updater: ParentContainerUpdater) extends ParquetDecimalConverter(precision, scale, updater) { override def setDictionary(dictionary: Dictionary): Unit = { this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id => decimalFromLong(dictionary.decodeToLong(id)) } } } private class ParquetBinaryDictionaryAwareDecimalConverter( precision: Int, scale: Int, updater: ParentContainerUpdater) extends ParquetDecimalConverter(precision, scale, updater) { override def setDictionary(dictionary: Dictionary): Unit = { this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id => decimalFromBinary(dictionary.decodeToBinary(id)) } } } /** * Parquet converter for arrays. Spark SQL arrays are represented as Parquet lists. Standard * Parquet lists are represented as a 3-level group annotated by `LIST`: * {{{ * <list-repetition> group <name> (LIST) { <-- parquetSchema points here * repeated group list { * <element-repetition> <element-type> element; * } * } * }}} * The `parquetSchema` constructor argument points to the outermost group. * * However, before this representation is standardized, some Parquet libraries/tools also use some * non-standard formats to represent list-like structures. Backwards-compatibility rules for * handling these cases are described in Parquet format spec. * * @see https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists */ private final class ParquetArrayConverter( parquetSchema: GroupType, catalystSchema: ArrayType, updater: ParentContainerUpdater) extends ParquetGroupConverter(updater) { private[this] val currentArray = ArrayBuffer.empty[Any] private[this] val elementConverter: Converter = { val repeatedType = parquetSchema.getType(0) val elementType = catalystSchema.elementType // At this stage, we're not sure whether the repeated field maps to the element type or is // just the syntactic repeated group of the 3-level standard LIST layout. Take the following // Parquet LIST-annotated group type as an example: // // optional group f (LIST) { // repeated group list { // optional group element { // optional int32 element; // } // } // } // // This type is ambiguous: // // 1. When interpreted as a standard 3-level layout, the `list` field is just the syntactic // group, and the entire type should be translated to: // // ARRAY<STRUCT<element: INT>> // // 2. On the other hand, when interpreted as a non-standard 2-level layout, the `list` field // represents the element type, and the entire type should be translated to: // // ARRAY<STRUCT<element: STRUCT<element: INT>>> // // Here we try to convert field `list` into a Catalyst type to see whether the converted type // matches the Catalyst array element type. If it doesn't match, then it's case 1; otherwise, // it's case 2. val guessedElementType = schemaConverter.convertField(repeatedType) if (DataType.equalsIgnoreCompatibleNullability(guessedElementType, elementType)) { // If the repeated field corresponds to the element type, creates a new converter using the // type of the repeated field. newConverter(repeatedType, elementType, new ParentContainerUpdater { override def set(value: Any): Unit = currentArray += value }) } else { // If the repeated field corresponds to the syntactic group in the standard 3-level Parquet // LIST layout, creates a new converter using the only child field of the repeated field. assert(!repeatedType.isPrimitive && repeatedType.asGroupType().getFieldCount == 1) new ElementConverter(repeatedType.asGroupType().getType(0), elementType) } } override def getConverter(fieldIndex: Int): Converter = elementConverter override def end(): Unit = updater.set(new GenericArrayData(currentArray.toArray)) override def start(): Unit = currentArray.clear() /** Array element converter */ private final class ElementConverter(parquetType: Type, catalystType: DataType) extends GroupConverter { private var currentElement: Any = _ private[this] val converter = newConverter(parquetType, catalystType, new ParentContainerUpdater { override def set(value: Any): Unit = currentElement = value }) override def getConverter(fieldIndex: Int): Converter = converter override def end(): Unit = currentArray += currentElement override def start(): Unit = currentElement = null } } /** Parquet converter for maps */ private final class ParquetMapConverter( parquetType: GroupType, catalystType: MapType, updater: ParentContainerUpdater) extends ParquetGroupConverter(updater) { private[this] val currentKeys = ArrayBuffer.empty[Any] private[this] val currentValues = ArrayBuffer.empty[Any] private[this] val keyValueConverter = { val repeatedType = parquetType.getType(0).asGroupType() new KeyValueConverter( repeatedType.getType(0), repeatedType.getType(1), catalystType.keyType, catalystType.valueType) } override def getConverter(fieldIndex: Int): Converter = keyValueConverter override def end(): Unit = { // The parquet map may contains null or duplicated map keys. When it happens, the behavior is // undefined. // TODO (SPARK-26174): disallow it with a config. updater.set( new ArrayBasedMapData( new GenericArrayData(currentKeys.toArray), new GenericArrayData(currentValues.toArray))) } override def start(): Unit = { currentKeys.clear() currentValues.clear() } /** Parquet converter for key-value pairs within the map. */ private final class KeyValueConverter( parquetKeyType: Type, parquetValueType: Type, catalystKeyType: DataType, catalystValueType: DataType) extends GroupConverter { private var currentKey: Any = _ private var currentValue: Any = _ private[this] val converters = Array( // Converter for keys newConverter(parquetKeyType, catalystKeyType, new ParentContainerUpdater { override def set(value: Any): Unit = currentKey = value }), // Converter for values newConverter(parquetValueType, catalystValueType, new ParentContainerUpdater { override def set(value: Any): Unit = currentValue = value })) override def getConverter(fieldIndex: Int): Converter = converters(fieldIndex) override def end(): Unit = { currentKeys += currentKey currentValues += currentValue } override def start(): Unit = { currentKey = null currentValue = null } } } private trait RepeatedConverter { private[this] val currentArray = ArrayBuffer.empty[Any] protected def newArrayUpdater(updater: ParentContainerUpdater) = new ParentContainerUpdater { override def start(): Unit = currentArray.clear() override def end(): Unit = updater.set(new GenericArrayData(currentArray.toArray)) override def set(value: Any): Unit = currentArray += value } } /** * A primitive converter for converting unannotated repeated primitive values to required arrays * of required primitives values. */ private final class RepeatedPrimitiveConverter( parquetType: Type, catalystType: DataType, parentUpdater: ParentContainerUpdater) extends PrimitiveConverter with RepeatedConverter with HasParentContainerUpdater { val updater: ParentContainerUpdater = newArrayUpdater(parentUpdater) private[this] val elementConverter: PrimitiveConverter = newConverter(parquetType, catalystType, updater).asPrimitiveConverter() override def addBoolean(value: Boolean): Unit = elementConverter.addBoolean(value) override def addInt(value: Int): Unit = elementConverter.addInt(value) override def addLong(value: Long): Unit = elementConverter.addLong(value) override def addFloat(value: Float): Unit = elementConverter.addFloat(value) override def addDouble(value: Double): Unit = elementConverter.addDouble(value) override def addBinary(value: Binary): Unit = elementConverter.addBinary(value) override def setDictionary(dict: Dictionary): Unit = elementConverter.setDictionary(dict) override def hasDictionarySupport: Boolean = elementConverter.hasDictionarySupport override def addValueFromDictionary(id: Int): Unit = elementConverter.addValueFromDictionary(id) } /** * A group converter for converting unannotated repeated group values to required arrays of * required struct values. */ private final class RepeatedGroupConverter( parquetType: Type, catalystType: DataType, parentUpdater: ParentContainerUpdater) extends GroupConverter with HasParentContainerUpdater with RepeatedConverter { val updater: ParentContainerUpdater = newArrayUpdater(parentUpdater) private[this] val elementConverter: GroupConverter = newConverter(parquetType, catalystType, updater).asGroupConverter() override def getConverter(field: Int): Converter = elementConverter.getConverter(field) override def end(): Unit = elementConverter.end() override def start(): Unit = elementConverter.start() } } private[parquet] object ParquetRowConverter { def binaryToUnscaledLong(binary: Binary): Long = { // The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here // we are using `Binary.toByteBuffer.array()` to steal the underlying byte array without // copying it. val buffer = binary.toByteBuffer val bytes = buffer.array() val start = buffer.arrayOffset() + buffer.position() val end = buffer.arrayOffset() + buffer.limit() var unscaled = 0L var i = start while (i < end) { unscaled = (unscaled << 8) | (bytes(i) & 0xff) i += 1 } val bits = 8 * (end - start) unscaled = (unscaled << (64 - bits)) >> (64 - bits) unscaled } def binaryToSQLTimestamp(binary: Binary): Long = { assert(binary.length() == 12, s"Timestamps (with nanoseconds) are expected to be stored in" + s" 12-byte long binaries. Found a ${binary.length()}-byte binary instead.") val buffer = binary.toByteBuffer.order(ByteOrder.LITTLE_ENDIAN) val timeOfDayNanos = buffer.getLong val julianDay = buffer.getInt DateTimeUtils.fromJulianDay(julianDay, timeOfDayNanos) } }
BryanCutler/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
Scala
apache-2.0
32,251
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs // License: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.server import akka.event.slf4j.SLF4JLogging import org.ensime.util.ensimefile.Implicits.DefaultCharset import org.ensime.util.file._ object PortUtil extends SLF4JLogging { def port(cacheDir: File, name: String): Option[Int] = { val portFile = cacheDir / name if (portFile.exists()) Some(portFile.readString().trim.toInt) else None } def writePort(cacheDir: File, port: Int, name: String): Unit = { val portFile = cacheDir / name if (!portFile.exists()) { portFile.createNewFile() } portFile.deleteOnExit() // doesn't work on Windows portFile.writeString(port.toString) // Some clients grep the log waiting for this file to be written - so always write the log message. log.info("creating port file: " + portFile) } }
espinhogr/ensime-server
server/src/main/scala/org/ensime/server/PortUtil.scala
Scala
gpl-3.0
936
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.s2graph.core.parsers import org.apache.s2graph.core.GraphExceptions.{LabelNotExistException, WhereParserException} import org.apache.s2graph.core.mysqls.{Label, LabelMeta} import org.apache.s2graph.core.types.InnerValLike import org.apache.s2graph.core.{S2Edge, GraphUtil} import org.apache.s2graph.core.JSONParser._ import org.apache.s2graph.core.utils.logger import scala.annotation.tailrec import scala.util.Try import scala.util.parsing.combinator.JavaTokenParsers trait ExtractValue { val parent = "_parent." def propToInnerVal(edge: S2Edge, key: String) = { val (propKey, parentEdge) = findParentEdge(edge, key) val label = parentEdge.innerLabel val metaPropInvMap = label.metaPropsInvMap val labelMeta = metaPropInvMap.getOrElse(propKey, throw WhereParserException(s"Where clause contains not existing property name: $propKey")) labelMeta match { case LabelMeta.from => parentEdge.srcVertex.innerId case LabelMeta.to => parentEdge.tgtVertex.innerId case _ => parentEdge.propertyValueInner(labelMeta).innerVal } } def valueToCompare(edge: S2Edge, key: String, value: String) = { val label = edge.innerLabel if (value.startsWith(parent) || label.metaPropsInvMap.contains(value)) propToInnerVal(edge, value) else { val (propKey, _) = findParentEdge(edge, key) val labelMeta = label.metaPropsInvMap.getOrElse(propKey, throw WhereParserException(s"Where clause contains not existing property name: $propKey")) val (srcColumn, tgtColumn) = label.srcTgtColumn(edge.labelWithDir.dir) val dataType = propKey match { case "_to" | "to" => tgtColumn.columnType case "_from" | "from" => srcColumn.columnType case _ => labelMeta.dataType } toInnerVal(value, dataType, label.schemaVersion) } } @tailrec private def findParent(edge: S2Edge, depth: Int): S2Edge = if (depth > 0) findParent(edge.parentEdges.head.edge, depth - 1) else edge private def findParentEdge(edge: S2Edge, key: String): (String, S2Edge) = { if (!key.startsWith(parent)) (key, edge) else { val split = key.split(parent) val depth = split.length - 1 val propKey = split.last val parentEdge = findParent(edge, depth) (propKey, parentEdge) } } } trait Clause extends ExtractValue { def and(otherField: Clause): Clause = And(this, otherField) def or(otherField: Clause): Clause = Or(this, otherField) def filter(edge: S2Edge): Boolean def binaryOp(binOp: (InnerValLike, InnerValLike) => Boolean)(propKey: String, value: String)(edge: S2Edge): Boolean = { val propValue = propToInnerVal(edge, propKey) val compValue = valueToCompare(edge, propKey, value) binOp(propValue, compValue) } } object Where { def apply(labelName: String, sql: String): Try[Where] = { val label = Label.findByName(labelName).getOrElse(throw new LabelNotExistException(labelName)) val parser = new WhereParser(label) parser.parse(sql) } } case class Where(clauses: Seq[Clause] = Seq.empty[Clause]) { def filter(edge: S2Edge) = if (clauses.isEmpty) true else clauses.map(_.filter(edge)).forall(identity) } case class Gt(propKey: String, value: String) extends Clause { override def filter(edge: S2Edge): Boolean = binaryOp(_ > _)(propKey, value)(edge) } case class Lt(propKey: String, value: String) extends Clause { override def filter(edge: S2Edge): Boolean = binaryOp(_ < _)(propKey, value)(edge) } case class Eq(propKey: String, value: String) extends Clause { override def filter(edge: S2Edge): Boolean = binaryOp(_ == _)(propKey, value)(edge) } case class InWithoutParent(label: Label, propKey: String, values: Set[String]) extends Clause { lazy val innerValLikeLsOut = values.map { value => val labelMeta = label.metaPropsInvMap.getOrElse(propKey, throw WhereParserException(s"Where clause contains not existing property name: $propKey")) val dataType = propKey match { case "_to" | "to" => label.tgtColumn.columnType case "_from" | "from" => label.srcColumn.columnType case _ => labelMeta.dataType } toInnerVal(value, dataType, label.schemaVersion) } lazy val innerValLikeLsIn = values.map { value => val labelMeta = label.metaPropsInvMap.getOrElse(propKey, throw WhereParserException(s"Where clause contains not existing property name: $propKey")) val dataType = propKey match { case "_to" | "to" => label.srcColumn.columnType case "_from" | "from" => label.tgtColumn.columnType case _ => labelMeta.dataType } toInnerVal(value, dataType, label.schemaVersion) } override def filter(edge: S2Edge): Boolean = { if (edge.dir == GraphUtil.directions("in")) { val propVal = propToInnerVal(edge, propKey) innerValLikeLsIn.contains(propVal) } else { val propVal = propToInnerVal(edge, propKey) innerValLikeLsOut.contains(propVal) } } } case class IN(propKey: String, values: Set[String]) extends Clause { override def filter(edge: S2Edge): Boolean = { val propVal = propToInnerVal(edge, propKey) values.exists { value => valueToCompare(edge, propKey, value) == propVal } } } case class Between(propKey: String, minValue: String, maxValue: String) extends Clause { override def filter(edge: S2Edge): Boolean = { val propVal = propToInnerVal(edge, propKey) val minVal = valueToCompare(edge, propKey, minValue) val maxVal = valueToCompare(edge, propKey, maxValue) minVal <= propVal && propVal <= maxVal } } case class Not(self: Clause) extends Clause { override def filter(edge: S2Edge) = !self.filter(edge) } case class And(left: Clause, right: Clause) extends Clause { override def filter(edge: S2Edge) = left.filter(edge) && right.filter(edge) } case class Or(left: Clause, right: Clause) extends Clause { override def filter(edge: S2Edge) = left.filter(edge) || right.filter(edge) } object WhereParser { val success = Where() } case class WhereParser(label: Label) extends JavaTokenParsers { override val stringLiteral = (("'" ~> "(\\\\\\\\'|[^'])*".r <~ "'" ) ^^ (_.replace("\\\\'", "'"))) | anyStr val anyStr = "[^\\\\s(),']+".r val and = "and|AND".r val or = "or|OR".r val between = "between|BETWEEN".r val in = "in|IN".r val notIn = "not in|NOT IN".r def where: Parser[Where] = rep(clause) ^^ (Where(_)) def paren: Parser[Clause] = "(" ~> clause <~ ")" def clause: Parser[Clause] = (predicate | paren) * (and ^^^ { (a: Clause, b: Clause) => And(a, b) } | or ^^^ { (a: Clause, b: Clause) => Or(a, b) }) def identWithDot: Parser[String] = repsep(ident, ".") ^^ { case values => values.mkString(".") } val _eq = identWithDot ~ ("!=" | "=") ~ stringLiteral ^^ { case f ~ op ~ s => if (op == "=") Eq(f, s) else Not(Eq(f, s)) } val _ltGt = identWithDot ~ (">=" | "<=" | ">" | "<") ~ stringLiteral ^^ { case f ~ op ~ s => op match { case ">" => Gt(f, s) case ">=" => Or(Gt(f, s), Eq(f, s)) case "<" => Lt(f, s) case "<=" => Or(Lt(f, s), Eq(f, s)) } } val _between = identWithDot ~ (between ~> stringLiteral <~ and) ~ stringLiteral ^^ { case f ~ minV ~ maxV => Between(f, minV, maxV) } val _in = identWithDot ~ (notIn | in) ~ ("(" ~> repsep(stringLiteral, ",") <~ ")") ^^ { case f ~ op ~ values => val inClause = if (f.startsWith("_parent")) IN(f, values.toSet) else InWithoutParent(label, f, values.toSet) if (op.toLowerCase == "in") inClause else Not(inClause) } def predicate = _eq | _ltGt | _between | _in def parse(sql: String): Try[Where] = Try { parseAll(where, sql) match { case Success(r, q) => r case fail => throw WhereParserException(s"Where parsing error: ${fail.toString}") } } }
daewon/incubator-s2graph
s2core/src/main/scala/org/apache/s2graph/core/parsers/WhereParser.scala
Scala
apache-2.0
8,643
/* * Copyright 2006-2008 Tom Adams * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.furnace.sequence sealed trait Base { def code: Char override def toString = code.toString } private case object A extends Base { override def code = 'A' } private case object C extends Base { override def code = 'C' } private case object G extends Base { override def code = 'G' } private case object T extends Base { override def code = 'T' } object Base { import Character._ def bases = List(A, C, G, T) implicit def baseToChar(b: Base) = b.code implicit def charToBase(c: Char): Base = c.toUpperCase match { case 'A' => A case 'C' => C case 'G' => G case 'T' => T case code => error("Unknown base: '" + code + "'") } implicit def byteToBase(b: Byte): Base = b.toChar }
tomjadams/furnace
src/main/scala/com/googlecode/furnace/sequence/Base.scala
Scala
apache-2.0
1,349
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.sources import org.scalatest.BeforeAndAfterAll import org.apache.spark.SparkFunSuite import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Cast, Expression, Literal} import org.apache.spark.sql.execution.datasources.DataSourceAnalysis import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{DataType, IntegerType, StructType} class DataSourceAnalysisSuite extends SparkFunSuite with BeforeAndAfterAll { private var targetAttributes: Seq[Attribute] = _ private var targetPartitionSchema: StructType = _ override def beforeAll(): Unit = { targetAttributes = Seq('a.int, 'd.int, 'b.int, 'c.int) targetPartitionSchema = new StructType() .add("b", IntegerType) .add("c", IntegerType) } private def checkProjectList(actual: Seq[Expression], expected: Seq[Expression]): Unit = { // Remove aliases since we have no control on their exprId. val withoutAliases = actual.map { case alias: Alias => alias.child case other => other } assert(withoutAliases === expected) } Seq(true, false).foreach { caseSensitive => val conf = new SQLConf().copy(SQLConf.CASE_SENSITIVE -> caseSensitive) def cast(e: Expression, dt: DataType): Expression = { Cast(e, dt, Option(conf.sessionLocalTimeZone)) } val rule = DataSourceAnalysis(conf) test( s"convertStaticPartitions only handle INSERT having at least static partitions " + s"(caseSensitive: $caseSensitive)") { intercept[AssertionError] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int), providedPartitions = Map("b" -> None, "c" -> None), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } } test(s"Missing columns (caseSensitive: $caseSensitive)") { // Missing columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int), providedPartitions = Map("b" -> Some("1"), "c" -> None), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } } test(s"Missing partitioning columns (caseSensitive: $caseSensitive)") { // Missing partitioning columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int), providedPartitions = Map("b" -> Some("1")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } // Missing partitioning columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int, 'g.int), providedPartitions = Map("b" -> Some("1")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } // Wrong partitioning columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int), providedPartitions = Map("b" -> Some("1"), "d" -> None), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } } test(s"Wrong partitioning columns (caseSensitive: $caseSensitive)") { // Wrong partitioning columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int), providedPartitions = Map("b" -> Some("1"), "d" -> Some("2")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } // Wrong partitioning columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int), providedPartitions = Map("b" -> Some("1"), "c" -> Some("3"), "d" -> Some("2")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } if (caseSensitive) { // Wrong partitioning columns. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int), providedPartitions = Map("b" -> Some("1"), "C" -> Some("3")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } } } test( s"Static partitions need to appear before dynamic partitions" + s" (caseSensitive: $caseSensitive)") { // Static partitions need to appear before dynamic partitions. intercept[AnalysisException] { rule.convertStaticPartitions( sourceAttributes = Seq('e.int, 'f.int), providedPartitions = Map("b" -> None, "c" -> Some("3")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) } } test(s"All static partitions (caseSensitive: $caseSensitive)") { if (!caseSensitive) { val nonPartitionedAttributes = Seq('e.int, 'f.int) val expected = nonPartitionedAttributes ++ Seq(cast(Literal("1"), IntegerType), cast(Literal("3"), IntegerType)) val actual = rule.convertStaticPartitions( sourceAttributes = nonPartitionedAttributes, providedPartitions = Map("b" -> Some("1"), "C" -> Some("3")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) checkProjectList(actual, expected) } { val nonPartitionedAttributes = Seq('e.int, 'f.int) val expected = nonPartitionedAttributes ++ Seq(cast(Literal("1"), IntegerType), cast(Literal("3"), IntegerType)) val actual = rule.convertStaticPartitions( sourceAttributes = nonPartitionedAttributes, providedPartitions = Map("b" -> Some("1"), "c" -> Some("3")), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) checkProjectList(actual, expected) } // Test the case having a single static partition column. { val nonPartitionedAttributes = Seq('e.int, 'f.int) val expected = nonPartitionedAttributes ++ Seq(cast(Literal("1"), IntegerType)) val actual = rule.convertStaticPartitions( sourceAttributes = nonPartitionedAttributes, providedPartitions = Map("b" -> Some("1")), targetAttributes = Seq('a.int, 'd.int, 'b.int), targetPartitionSchema = new StructType().add("b", IntegerType)) checkProjectList(actual, expected) } } test(s"Static partition and dynamic partition (caseSensitive: $caseSensitive)") { val nonPartitionedAttributes = Seq('e.int, 'f.int) val dynamicPartitionAttributes = Seq('g.int) val expected = nonPartitionedAttributes ++ Seq(cast(Literal("1"), IntegerType)) ++ dynamicPartitionAttributes val actual = rule.convertStaticPartitions( sourceAttributes = nonPartitionedAttributes ++ dynamicPartitionAttributes, providedPartitions = Map("b" -> Some("1"), "c" -> None), targetAttributes = targetAttributes, targetPartitionSchema = targetPartitionSchema) checkProjectList(actual, expected) } } }
mike0sv/spark
sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceAnalysisSuite.scala
Scala
apache-2.0
8,305
/** * Generated by Scrooge * version: 4.7.0 * rev: d9d56174937f524a1981b38ebd6280eef7eeda4a * built at: 20160427-121531 */ package com.komanov.serialization.domain.thriftscala import com.twitter.scrooge.{ LazyTProtocol, TFieldBlob, ThriftException, ThriftStruct, ThriftStructCodec3, ThriftStructFieldInfo, ThriftStructMetaData, ThriftUtil} import org.apache.thrift.protocol._ import org.apache.thrift.transport.{TMemoryBuffer, TTransport} import java.nio.ByteBuffer import java.util.Arrays import scala.collection.immutable.{Map => immutable$Map} import scala.collection.mutable.Builder import scala.collection.mutable.{ ArrayBuffer => mutable$ArrayBuffer, Buffer => mutable$Buffer, HashMap => mutable$HashMap, HashSet => mutable$HashSet} import scala.collection.{Map, Set} object PageComponentPositionSetPb extends ThriftStructCodec3[PageComponentPositionSetPb] { private val NoPassthroughFields = immutable$Map.empty[Short, TFieldBlob] val Struct = new TStruct("PageComponentPositionSetPb") val IdField = new TField("id", TType.STRING, 1) val IdFieldManifest = implicitly[Manifest[ByteBuffer]] val XField = new TField("x", TType.I32, 2) val XFieldManifest = implicitly[Manifest[Int]] val YField = new TField("y", TType.I32, 3) val YFieldManifest = implicitly[Manifest[Int]] /** * Field information in declaration order. */ lazy val fieldInfos: scala.List[ThriftStructFieldInfo] = scala.List[ThriftStructFieldInfo]( new ThriftStructFieldInfo( IdField, true, false, IdFieldManifest, _root_.scala.None, _root_.scala.None, immutable$Map.empty[String, String], immutable$Map.empty[String, String] ), new ThriftStructFieldInfo( XField, true, false, XFieldManifest, _root_.scala.None, _root_.scala.None, immutable$Map.empty[String, String], immutable$Map.empty[String, String] ), new ThriftStructFieldInfo( YField, true, false, YFieldManifest, _root_.scala.None, _root_.scala.None, immutable$Map.empty[String, String], immutable$Map.empty[String, String] ) ) lazy val structAnnotations: immutable$Map[String, String] = immutable$Map.empty[String, String] /** * Checks that all required fields are non-null. */ def validate(_item: PageComponentPositionSetPb): Unit = { } def withoutPassthroughFields(original: PageComponentPositionSetPb): PageComponentPositionSetPb = new Immutable( id = { val field = original.id field.map { field => field } }, x = { val field = original.x field.map { field => field } }, y = { val field = original.y field.map { field => field } } ) override def encode(_item: PageComponentPositionSetPb, _oproto: TProtocol): Unit = { _item.write(_oproto) } private[this] def lazyDecode(_iprot: LazyTProtocol): PageComponentPositionSetPb = { var id: Option[ByteBuffer] = None var xOffset: Int = -1 var yOffset: Int = -1 var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null var _done = false val _start_offset = _iprot.offset _iprot.readStructBegin() while (!_done) { val _field = _iprot.readFieldBegin() if (_field.`type` == TType.STOP) { _done = true } else { _field.id match { case 1 => _field.`type` match { case TType.STRING => id = Some(readIdValue(_iprot)) case _actualType => val _expectedType = TType.STRING throw new TProtocolException( "Received wrong type for field 'id' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case 2 => _field.`type` match { case TType.I32 => xOffset = _iprot.offsetSkipI32 case _actualType => val _expectedType = TType.I32 throw new TProtocolException( "Received wrong type for field 'x' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case 3 => _field.`type` match { case TType.I32 => yOffset = _iprot.offsetSkipI32 case _actualType => val _expectedType = TType.I32 throw new TProtocolException( "Received wrong type for field 'y' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case _ => if (_passthroughFields == null) _passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob] _passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot)) } _iprot.readFieldEnd() } } _iprot.readStructEnd() new LazyImmutable( _iprot, _iprot.buffer, _start_offset, _iprot.offset, id, xOffset, yOffset, if (_passthroughFields == null) NoPassthroughFields else _passthroughFields.result() ) } override def decode(_iprot: TProtocol): PageComponentPositionSetPb = _iprot match { case i: LazyTProtocol => lazyDecode(i) case i => eagerDecode(i) } private[this] def eagerDecode(_iprot: TProtocol): PageComponentPositionSetPb = { var id: _root_.scala.Option[ByteBuffer] = _root_.scala.None var x: _root_.scala.Option[Int] = _root_.scala.None var y: _root_.scala.Option[Int] = _root_.scala.None var _passthroughFields: Builder[(Short, TFieldBlob), immutable$Map[Short, TFieldBlob]] = null var _done = false _iprot.readStructBegin() while (!_done) { val _field = _iprot.readFieldBegin() if (_field.`type` == TType.STOP) { _done = true } else { _field.id match { case 1 => _field.`type` match { case TType.STRING => id = _root_.scala.Some(readIdValue(_iprot)) case _actualType => val _expectedType = TType.STRING throw new TProtocolException( "Received wrong type for field 'id' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case 2 => _field.`type` match { case TType.I32 => x = _root_.scala.Some(readXValue(_iprot)) case _actualType => val _expectedType = TType.I32 throw new TProtocolException( "Received wrong type for field 'x' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case 3 => _field.`type` match { case TType.I32 => y = _root_.scala.Some(readYValue(_iprot)) case _actualType => val _expectedType = TType.I32 throw new TProtocolException( "Received wrong type for field 'y' (expected=%s, actual=%s).".format( ttypeToString(_expectedType), ttypeToString(_actualType) ) ) } case _ => if (_passthroughFields == null) _passthroughFields = immutable$Map.newBuilder[Short, TFieldBlob] _passthroughFields += (_field.id -> TFieldBlob.read(_field, _iprot)) } _iprot.readFieldEnd() } } _iprot.readStructEnd() new Immutable( id, x, y, if (_passthroughFields == null) NoPassthroughFields else _passthroughFields.result() ) } def apply( id: _root_.scala.Option[ByteBuffer] = _root_.scala.None, x: _root_.scala.Option[Int] = _root_.scala.None, y: _root_.scala.Option[Int] = _root_.scala.None ): PageComponentPositionSetPb = new Immutable( id, x, y ) def unapply(_item: PageComponentPositionSetPb): _root_.scala.Option[scala.Product3[Option[ByteBuffer], Option[Int], Option[Int]]] = _root_.scala.Some(_item) @inline private def readIdValue(_iprot: TProtocol): ByteBuffer = { _iprot.readBinary() } @inline private def writeIdField(id_item: ByteBuffer, _oprot: TProtocol): Unit = { _oprot.writeFieldBegin(IdField) writeIdValue(id_item, _oprot) _oprot.writeFieldEnd() } @inline private def writeIdValue(id_item: ByteBuffer, _oprot: TProtocol): Unit = { _oprot.writeBinary(id_item) } @inline private def readXValue(_iprot: TProtocol): Int = { _iprot.readI32() } @inline private def writeXField(x_item: Int, _oprot: TProtocol): Unit = { _oprot.writeFieldBegin(XField) writeXValue(x_item, _oprot) _oprot.writeFieldEnd() } @inline private def writeXValue(x_item: Int, _oprot: TProtocol): Unit = { _oprot.writeI32(x_item) } @inline private def readYValue(_iprot: TProtocol): Int = { _iprot.readI32() } @inline private def writeYField(y_item: Int, _oprot: TProtocol): Unit = { _oprot.writeFieldBegin(YField) writeYValue(y_item, _oprot) _oprot.writeFieldEnd() } @inline private def writeYValue(y_item: Int, _oprot: TProtocol): Unit = { _oprot.writeI32(y_item) } object Immutable extends ThriftStructCodec3[PageComponentPositionSetPb] { override def encode(_item: PageComponentPositionSetPb, _oproto: TProtocol): Unit = { _item.write(_oproto) } override def decode(_iprot: TProtocol): PageComponentPositionSetPb = PageComponentPositionSetPb.decode(_iprot) override lazy val metaData: ThriftStructMetaData[PageComponentPositionSetPb] = PageComponentPositionSetPb.metaData } /** * The default read-only implementation of PageComponentPositionSetPb. You typically should not need to * directly reference this class; instead, use the PageComponentPositionSetPb.apply method to construct * new instances. */ class Immutable( val id: _root_.scala.Option[ByteBuffer], val x: _root_.scala.Option[Int], val y: _root_.scala.Option[Int], override val _passthroughFields: immutable$Map[Short, TFieldBlob]) extends PageComponentPositionSetPb { def this( id: _root_.scala.Option[ByteBuffer] = _root_.scala.None, x: _root_.scala.Option[Int] = _root_.scala.None, y: _root_.scala.Option[Int] = _root_.scala.None ) = this( id, x, y, Map.empty ) } /** * This is another Immutable, this however keeps strings as lazy values that are lazily decoded from the backing * array byte on read. */ private[this] class LazyImmutable( _proto: LazyTProtocol, _buf: Array[Byte], _start_offset: Int, _end_offset: Int, val id: _root_.scala.Option[ByteBuffer], xOffset: Int, yOffset: Int, override val _passthroughFields: immutable$Map[Short, TFieldBlob]) extends PageComponentPositionSetPb { override def write(_oprot: TProtocol): Unit = { _oprot match { case i: LazyTProtocol => i.writeRaw(_buf, _start_offset, _end_offset - _start_offset) case _ => super.write(_oprot) } } lazy val x: _root_.scala.Option[Int] = if (xOffset == -1) None else { Some(_proto.decodeI32(_buf, xOffset)) } lazy val y: _root_.scala.Option[Int] = if (yOffset == -1) None else { Some(_proto.decodeI32(_buf, yOffset)) } /** * Override the super hash code to make it a lazy val rather than def. * * Calculating the hash code can be expensive, caching it where possible * can provide significant performance wins. (Key in a hash map for instance) * Usually not safe since the normal constructor will accept a mutable map or * set as an arg * Here however we control how the class is generated from serialized data. * With the class private and the contract that we throw away our mutable references * having the hash code lazy here is safe. */ override lazy val hashCode = super.hashCode } /** * This Proxy trait allows you to extend the PageComponentPositionSetPb trait with additional state or * behavior and implement the read-only methods from PageComponentPositionSetPb using an underlying * instance. */ trait Proxy extends PageComponentPositionSetPb { protected def _underlying_PageComponentPositionSetPb: PageComponentPositionSetPb override def id: _root_.scala.Option[ByteBuffer] = _underlying_PageComponentPositionSetPb.id override def x: _root_.scala.Option[Int] = _underlying_PageComponentPositionSetPb.x override def y: _root_.scala.Option[Int] = _underlying_PageComponentPositionSetPb.y override def _passthroughFields = _underlying_PageComponentPositionSetPb._passthroughFields } } trait PageComponentPositionSetPb extends ThriftStruct with scala.Product3[Option[ByteBuffer], Option[Int], Option[Int]] with java.io.Serializable { import PageComponentPositionSetPb._ def id: _root_.scala.Option[ByteBuffer] def x: _root_.scala.Option[Int] def y: _root_.scala.Option[Int] def _passthroughFields: immutable$Map[Short, TFieldBlob] = immutable$Map.empty def _1 = id def _2 = x def _3 = y /** * Gets a field value encoded as a binary blob using TCompactProtocol. If the specified field * is present in the passthrough map, that value is returned. Otherwise, if the specified field * is known and not optional and set to None, then the field is serialized and returned. */ def getFieldBlob(_fieldId: Short): _root_.scala.Option[TFieldBlob] = { lazy val _buff = new TMemoryBuffer(32) lazy val _oprot = new TCompactProtocol(_buff) _passthroughFields.get(_fieldId) match { case blob: _root_.scala.Some[TFieldBlob] => blob case _root_.scala.None => { val _fieldOpt: _root_.scala.Option[TField] = _fieldId match { case 1 => if (id.isDefined) { writeIdValue(id.get, _oprot) _root_.scala.Some(PageComponentPositionSetPb.IdField) } else { _root_.scala.None } case 2 => if (x.isDefined) { writeXValue(x.get, _oprot) _root_.scala.Some(PageComponentPositionSetPb.XField) } else { _root_.scala.None } case 3 => if (y.isDefined) { writeYValue(y.get, _oprot) _root_.scala.Some(PageComponentPositionSetPb.YField) } else { _root_.scala.None } case _ => _root_.scala.None } _fieldOpt match { case _root_.scala.Some(_field) => val _data = Arrays.copyOfRange(_buff.getArray, 0, _buff.length) _root_.scala.Some(TFieldBlob(_field, _data)) case _root_.scala.None => _root_.scala.None } } } } /** * Collects TCompactProtocol-encoded field values according to `getFieldBlob` into a map. */ def getFieldBlobs(ids: TraversableOnce[Short]): immutable$Map[Short, TFieldBlob] = (ids flatMap { id => getFieldBlob(id) map { id -> _ } }).toMap /** * Sets a field using a TCompactProtocol-encoded binary blob. If the field is a known * field, the blob is decoded and the field is set to the decoded value. If the field * is unknown and passthrough fields are enabled, then the blob will be stored in * _passthroughFields. */ def setField(_blob: TFieldBlob): PageComponentPositionSetPb = { var id: _root_.scala.Option[ByteBuffer] = this.id var x: _root_.scala.Option[Int] = this.x var y: _root_.scala.Option[Int] = this.y var _passthroughFields = this._passthroughFields _blob.id match { case 1 => id = _root_.scala.Some(readIdValue(_blob.read)) case 2 => x = _root_.scala.Some(readXValue(_blob.read)) case 3 => y = _root_.scala.Some(readYValue(_blob.read)) case _ => _passthroughFields += (_blob.id -> _blob) } new Immutable( id, x, y, _passthroughFields ) } /** * If the specified field is optional, it is set to None. Otherwise, if the field is * known, it is reverted to its default value; if the field is unknown, it is removed * from the passthroughFields map, if present. */ def unsetField(_fieldId: Short): PageComponentPositionSetPb = { var id: _root_.scala.Option[ByteBuffer] = this.id var x: _root_.scala.Option[Int] = this.x var y: _root_.scala.Option[Int] = this.y _fieldId match { case 1 => id = _root_.scala.None case 2 => x = _root_.scala.None case 3 => y = _root_.scala.None case _ => } new Immutable( id, x, y, _passthroughFields - _fieldId ) } /** * If the specified field is optional, it is set to None. Otherwise, if the field is * known, it is reverted to its default value; if the field is unknown, it is removed * from the passthroughFields map, if present. */ def unsetId: PageComponentPositionSetPb = unsetField(1) def unsetX: PageComponentPositionSetPb = unsetField(2) def unsetY: PageComponentPositionSetPb = unsetField(3) override def write(_oprot: TProtocol): Unit = { PageComponentPositionSetPb.validate(this) _oprot.writeStructBegin(Struct) if (id.isDefined) writeIdField(id.get, _oprot) if (x.isDefined) writeXField(x.get, _oprot) if (y.isDefined) writeYField(y.get, _oprot) if (_passthroughFields.nonEmpty) { _passthroughFields.values.foreach { _.write(_oprot) } } _oprot.writeFieldStop() _oprot.writeStructEnd() } def copy( id: _root_.scala.Option[ByteBuffer] = this.id, x: _root_.scala.Option[Int] = this.x, y: _root_.scala.Option[Int] = this.y, _passthroughFields: immutable$Map[Short, TFieldBlob] = this._passthroughFields ): PageComponentPositionSetPb = new Immutable( id, x, y, _passthroughFields ) override def canEqual(other: Any): Boolean = other.isInstanceOf[PageComponentPositionSetPb] override def equals(other: Any): Boolean = canEqual(other) && _root_.scala.runtime.ScalaRunTime._equals(this, other) && _passthroughFields == other.asInstanceOf[PageComponentPositionSetPb]._passthroughFields override def hashCode: Int = _root_.scala.runtime.ScalaRunTime._hashCode(this) override def toString: String = _root_.scala.runtime.ScalaRunTime._toString(this) override def productArity: Int = 3 override def productElement(n: Int): Any = n match { case 0 => this.id case 1 => this.x case 2 => this.y case _ => throw new IndexOutOfBoundsException(n.toString) } override def productPrefix: String = "PageComponentPositionSetPb" }
dkomanov/scala-serialization
scala-serialization/src/main/scala/com/komanov/serialization/domain/thriftscala/PageComponentPositionSetPb.scala
Scala
mit
19,631
package service import service.EventManager.Event; case class OrderedEvent(val id: String, val event: Event) extends Ordered[OrderedEvent] { /** * Temporal order of events is the same as natural ID order. */ override def compare(that: OrderedEvent) = id.compareTo(that.id) }
uq-eresearch/aorra
app/service/OrderedEvent.scala
Scala
mit
290
package se.lu.nateko.cp.meta.test.utils.rdf4j import org.eclipse.rdf4j.sail.memory.model.MemValueFactory import org.scalatest.funsuite.AnyFunSuite import java.net.URI import se.lu.nateko.cp.meta.utils.rdf4j._ class EnrichedUriTests extends AnyFunSuite { implicit val factory = new MemValueFactory test("Java -> RDF4J -> Java round trip with non-ASCII characters"){ val javaUri = new URI("http://meta.icos-cp.eu/resources/people/Lenka_Folt%C3%BDnov%C3%A1") assert(javaUri.toRdf.toJava === javaUri) } }
ICOS-Carbon-Portal/meta
src/test/scala/se/lu/nateko/cp/meta/test/utils/rdf4j/EnrichedUriTests.scala
Scala
gpl-3.0
513
/****************************************************************************** * Copyright (c) 2014, Equal Experts Ltd * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation * are those of the authors and should not be interpreted as representing * official policies, either expressed or implied, of the Midas Project. ******************************************************************************/ package com.ee.midas.dsl.generator import com.ee.midas.dsl.grammar.Verb import org.bson.BSONObject import com.mongodb.util.JSON import com.ee.midas.transform.DocumentOperations._ import java.util.regex.Pattern import com.ee.midas.dsl.expressions.{Parser, Expression} import com.ee.midas.utils.Loggable import scala.util.Try import com.ee.midas.transform.ExceptionInjector trait SnippetProvider extends Parser with Loggable with ExceptionInjector { def toSnippet(verb: Verb, args: Array[String]): BSONObject => BSONObject = verb match { case Verb.add => add(args(0)) case Verb.remove => remove(args(0)) case Verb.copy => copy(args(0), args(1)) case Verb.split => split(args(0), args(1), args(2)) case Verb.merge => merge(args(0), args(1), args(2)) case Verb.transform => transform(args(0), args(1)) } private def add(json: String): BSONObject => BSONObject = { ((document: BSONObject) => { val fields = JSON.parse(json).asInstanceOf[BSONObject] document ++ (fields, false) }) } private def remove(json: String) : BSONObject => BSONObject = { ((document: BSONObject) => { val fields = JSON.parse(json).asInstanceOf[BSONObject] document -- fields }) } private def copy(fromField: String, toField: String): BSONObject => BSONObject = { ((document: BSONObject) => { document(fromField) match { case Some(fromFieldValue) => document(toField) = fromFieldValue case None => document } }) } private def merge(fieldsArray: String, separator: String, mergeField: String) : BSONObject => BSONObject = { val fields = fieldsArray.substring(1, fieldsArray.length() - 1) val fieldList = fields.split(",").map(field => {field.trim.replaceAll("\\"", "")}).toList ((document: BSONObject) => { document >~< (mergeField, separator, fieldList) }) } private def split(splitField: String, regex: String, json: String) : BSONObject => BSONObject = { val documentWithSplitFields = JSON.parse(json).asInstanceOf[BSONObject] val compiledRegex = Pattern.compile(regex) ((document: BSONObject) => { try { document <~> (splitField, compiledRegex, json) } catch { case t: Throwable => val errMsg = if(t.getMessage == null) s"Cannot parse $regex" else t.getMessage documentWithSplitFields.keySet.toArray.foreach { case field: String => injectException(document, field, errMsg) } document } }) } private def transform(outputField: String, expressionJson: String) : BSONObject => BSONObject = { val expression: Expression = Try { parse(expressionJson) } match { case scala.util.Success(expr) => expr case scala.util.Failure(failure) => throw failure } ((document: BSONObject) => { try { logDebug(s"Evaluating Expression = $expression") val literal = expression.evaluate(document) document + (outputField, literal.value) } catch { case t: Throwable => injectException(document, outputField, t) } }) } }
EqualExperts/Midas
src/main/scala/com/ee/midas/dsl/generator/SnippetProvider.scala
Scala
bsd-2-clause
4,880
package drainprocessor.drain import java.util.UUID import java.util.concurrent.TimeUnit import akka.actor.{Actor, Cancellable} import com.amazonaws.services.kinesis.model.Record import com.github.vonnagy.service.container.log.ActorLoggingAdapter import com.github.vonnagy.service.container.metrics.Counter import drainprocessor._ import spray.client.pipelining.sendReceive import spray.http.HttpHeaders.RawHeader import spray.http.{HttpEntity, HttpHeaders} import spray.httpx.RequestBuilding import scala.concurrent.duration.Duration /** * Created by ivannagy on 4/14/15. */ class Drainer extends Actor with ActorLoggingAdapter with RequestBuilding { import context.dispatcher val maxBundleSize = context.system.settings.config.getInt("log.drains.max-bundle-size") val bundleTimeout = context.system.settings.config.getDuration("log.drains.bundle-timeout", TimeUnit.MILLISECONDS) val drainProvider = new DrainProvider()(context.system) val droppedRecords = Counter(s"drainers.no-drain")(context.system) var appMap = Map[String, AppData]() class AppData(appId: String) { import context.system val id = appId val receivedCount = Counter(s"drainers.${appId.replace(".", "-")}.receive") val pumpCount = Counter(s"drainers.${appId.replace(".", "-")}.pump") var scheduledTimeout: Option[Cancellable] = None var bundle = Seq[String]() } case class Timeout(appId: String) val clientPipeline = sendReceive override def preStart() { log.info("Drainer starting: {}", context.self.path) context.system.eventStream.subscribe(self, classOf[Record]) super.preStart } override def postStop() { log.info("Drainer stopping: {}", context.self.path) context.system.eventStream.unsubscribe(self, classOf[Record]) super.postStop } def receive = { case r: Record => val appId = r.getPartitionKey drainProvider.getDrains(appId).size match { case 0 => droppedRecords.incr case _ => processRecord(r) } case Timeout(appId) => pumpDrains(appMap.get(appId).get) } def processRecord(rec: Record): Unit = { val app = appMap.get(rec.getPartitionKey) match { case None => new AppData(rec.getPartitionKey) case a => a.get } app.receivedCount.incr val line = new String(rec.getData.array()); app.bundle = app.bundle :+ line appMap = appMap.updated(app.id, app) if (app.bundle.size == maxBundleSize) { pumpDrains(app) } else if (app.scheduledTimeout.isEmpty) { app.scheduledTimeout = Some(context.system.scheduler.scheduleOnce(Duration(bundleTimeout, TimeUnit.MILLISECONDS), self, Timeout(rec.getPartitionKey))) appMap = appMap.updated(app.id, app) } } def pumpDrains(app: AppData): Unit = { drainProvider.getDrains(app.id) foreach { drain => val url = drain.endpoint // Send to drain val startTimestamp = System.currentTimeMillis() val response = clientPipeline { Post(url.toString).withHeaders(RawHeader(`Logplex-Msg-Count`, app.bundle.size.toString), RawHeader(`Logplex-Drain-Token`, drain.drainId), RawHeader(`Logplex-Frame-Id`, UUID.randomUUID().toString), HttpHeaders.`Content-Type`(`application/logplex-1`)).withEntity(HttpEntity(app.bundle.mkString(""))) } response.onComplete { x => app.pumpCount.incr log.info(s"Request to ${app.id} completed in ${System.currentTimeMillis() - startTimestamp} millis.") } } if (app.scheduledTimeout.isDefined) app.scheduledTimeout.get.cancel() app.bundle = Seq[String]() appMap = appMap.updated(app.id, app) } }
vonnagy/drain-processor
src/main/scala/drainprocessor/drain/Drainer.scala
Scala
apache-2.0
3,676
/* * Copyright 2016 Coursera Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.coursera.naptime.access.authenticator.combiner import org.coursera.common.concurrent.Futures import org.coursera.naptime.NaptimeActionException import org.coursera.naptime.access.authenticator.Authenticator import play.api.mvc.RequestHeader import scala.concurrent.ExecutionContext import scala.concurrent.Future private[authenticator] trait AnyOf { /** * Combines an UNORDERED collection of [[Authenticator]]s into an authenticator that: * 1. If any authentication succeeds, return an arbitrary successful authenticator's response. * 2. If any returns an error, return an arbitrary authenticator's error. * 3. If all skip, return a skip response. * * Note: the weakness of ordering guarantees here allows optimization like not waiting for all * authenticators to respond if an early one is successful. * * TODO(josh): Should existence of even a single error trigger failure? * TODO(josh): Write unit tests. */ def anyOf[A](authenticators: Set[Authenticator[A]]): Authenticator[A] = new Authenticator[A] { override def maybeAuthenticate( requestHeader: RequestHeader) (implicit ec: ExecutionContext): Future[Option[Either[NaptimeActionException, A]]] = { val authenticationResponses = authenticators .map(Authenticator.authenticateAndRecover(_, requestHeader)) val successOptionFuture = Futures.findMatch(authenticationResponses) { case Some(Right(authentication)) => Right(authentication) } lazy val errorOptionFuture = Futures.findMatch(authenticationResponses) { case Some(Left(error)) => Left(error) } Common.combineAuthenticationResponses(successOptionFuture, errorOptionFuture) } } def anyOf[A, A1, A2]( authenticator1: Authenticator[A1], authenticator2: Authenticator[A2]) (implicit transformer1: AuthenticationTransformer[A1, A], transformer2: AuthenticationTransformer[A2, A]): Authenticator[A] = { anyOf(Set( authenticator1.collect(transformer1.partial), authenticator2.collect(transformer2.partial))) } def anyOf[A, A1, A2, A3]( authenticator1: Authenticator[A1], authenticator2: Authenticator[A2], authenticator3: Authenticator[A3]) (implicit transformer1: AuthenticationTransformer[A1, A], transformer2: AuthenticationTransformer[A2, A], transformer3: AuthenticationTransformer[A3, A]): Authenticator[A] = { anyOf(Set( authenticator1.collect(transformer1.partial), authenticator2.collect(transformer2.partial), authenticator3.collect(transformer3.partial))) } // TODO(josh): Generate for remaining arities. }
vkuo-coursera/naptime
naptime/src/main/scala/org/coursera/naptime/access/authenticator/combiner/AnyOf.scala
Scala
apache-2.0
3,290
package pl.newicom.dddd.cluster import akka.cluster.sharding.ShardRegion._ import pl.newicom.dddd.cluster.ShardResolution._ import pl.newicom.dddd.aggregate.Command import pl.newicom.dddd.messaging.EntityMessage import pl.newicom.dddd.messaging.command.CommandMessage import pl.newicom.dddd.messaging.correlation.EntityIdResolution import pl.newicom.dddd.messaging.correlation.EntityIdResolution.EntityIdResolver object ShardResolution { type ShardResolutionStrategy = EntityIdResolver => ShardResolver } trait ShardResolution[A] extends EntityIdResolution[A] { def shardResolutionStrategy: ShardResolutionStrategy def shardResolver: ShardResolver = shardResolutionStrategy(entityIdResolver) val idExtractor: IdExtractor = { case em: EntityMessage => (entityIdResolver(em), em) case c: Command => (entityIdResolver(c), CommandMessage(c)) } }
ahjohannessen/akka-ddd
akka-ddd-core/src/main/scala/pl/newicom/dddd/cluster/ShardResolution.scala
Scala
mit
868
/* * Distributed as part of Scalala, a linear algebra library. * * Copyright (C) 2008- Daniel Ramage * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110 USA */ package scalala.tensor; import org.scalacheck._ import org.scalatest._; import org.scalatest.junit._; import org.scalatest.prop._; import org.junit.runner.RunWith @RunWith(classOf[JUnitRunner]) class CounterTest extends FunSuite with Checkers { val TOLERANCE = 1e-4; def assertClose(a : Double, b : Double) = assert(math.abs(a - b) < TOLERANCE); test("Addition") { assert(mutable.Counter("a"->1,"b"->2) + mutable.Counter("a"->3) === Counter("a"->4,"b"->2)); assert(mutable.Counter("a"->3) + mutable.Counter("a"->1,"b"->2) === Counter("a"->4,"b"->2)); } test("Subtraction") { assert(mutable.Counter("a"->1,"b"->2) - mutable.Counter("a"->3) === Counter("a" -> -2, "b" -> 2)); assert(mutable.Counter("a"->3) - mutable.Counter("a"->1,"b"->2) === Counter("a" -> 2, "b" -> -2)); } test("Multiplication") { assert(mutable.Counter("a"->1,"b"->2) :* mutable.Counter("a"->3) === Counter("a"->3)); assert(mutable.Counter("a"->3) :* mutable.Counter("a"->1,"b"->2) === Counter("a"->3)); } test("MulInner") { val a = mutable.Counter(1->0.56390,2->0.36231,3->0.14601,4->0.60294,5->0.14535); val b = mutable.Counter(1->0.15951,2->0.83671,3->0.56002,4->0.57797,5->0.54450); assertClose(a dot b, .90249); } test("Zero + non zero is nonzero") { val a = mutable.Counter[Int,Double](1->0.0) val b = mutable.Counter(1->0.15951); assert(a + b === b, (a + b).toString + " not equal " + b) } test("Mean") { assert(Counter(0->0,1->1,2->2).mean === 1.0); assert(Counter(0->0.0,1->3.0).mean === 1.5); assert(Counter(0->3l).mean === 3.0); } test("assignment checks both domains") { val a = Counter[Int,Int]() val b = Counter[Int,Int](3->4) a := b assert(a === b) } }
scalala/Scalala
src/test/scala/scalala/tensor/CounterTest.scala
Scala
lgpl-2.1
2,605
package org.jetbrains.plugins.scala package annotator import org.intellij.lang.annotations.Language import org.jetbrains.plugins.scala.base.SimpleTestCase import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.api.expr.ScTypedStmt class TypedStatementAnnotatorTest extends SimpleTestCase { final val Header = "class A; class B; object A extends A; object B extends B\\n" def testFine() { assertMatches(messages("A: A")) { case Nil => } } def testTypeMismatch() { assertMatches(messages("B: A")) { case Error("B", TypeMismatch()) :: Nil => } } def testTypeMismatchMessage() { assertMatches(messages("B: A")) { case Error(_, "Type mismatch, found: B.type, required: A") :: Nil => } } //todo: requires Function1 trait in scope /*def testImplicitConversion { assertMatches(messages("implicit def toA(b: B) = A; B: A")) { case Nil => } }*/ def messages(@Language(value = "Scala", prefix = Header) code: String): List[Message] = { val definition = (Header + code).parse.depthFirst.findByType(classOf[ScTypedStmt]).get val annotator = new TypedStatementAnnotator() {} val mock = new AnnotatorHolderMock annotator.annotateTypedStatement(definition, mock, highlightErrors = true) mock.annotations } val TypeMismatch = ContainsPattern("Type mismatch") }
LPTK/intellij-scala
test/org/jetbrains/plugins/scala/annotator/TypedStatementAnnotatorTest.scala
Scala
apache-2.0
1,403
/* Copyright 2009-2016 EPFL, Lausanne */ object MyTuple2 { abstract class A case class B(i: Int) extends A case class C(a: A) extends A def foo(): Int = { val t = (B(2), C(B(3))) t match { case (B(x), C(y)) => x } } ensuring(_ == 2) }
epfl-lara/leon
src/test/resources/regression/verification/purescala/valid/MyTuple2.scala
Scala
gpl-3.0
267
/* * Copyright (c) 2013 Aviat Networks. * This file is part of DocReg+Web. Please refer to the NOTICE.txt file for license details. */ package vvv.docreg.snippet import org.specs2.mutable._ import java.util.Calendar import vvv.docreg.model.Revision import java.sql.Timestamp class HistoryTest extends Specification { "MonthHistory" should { "have date range back 30 days" >> { val d = Calendar.getInstance d.set(2012, Calendar.JANUARY, 5, 11, 49, 58) val r1 = new Revision r1.date = new Timestamp(d.getTimeInMillis) d.set(2012, Calendar.JANUARY, 2, 11, 49, 58) val r2 = new Revision r2.date = new Timestamp(d.getTimeInMillis) val r3 = new Revision r3.date = new Timestamp(d.getTimeInMillis) d.set(2011, Calendar.DECEMBER, 27, 3, 4, 5) val r4 = new Revision r4.date = new Timestamp(d.getTimeInMillis) d.set(2011, Calendar.DECEMBER, 1, 3, 4, 5) val r5 = new Revision r5.date = new Timestamp(d.getTimeInMillis) val revisions = List(r1, r2, r3, r4, r5) val now = Calendar.getInstance now.set(2012, Calendar.JANUARY, 5, 11, 49, 59) val x = new MonthHistory(){ override def load() = Nil }.analyse(now, revisions) x must haveSize(30) x(29) must be_==(Sample(0, 1, "5")) x(28) must be_==(Sample(-1, 0, "4")) x(26) must be_==(Sample(-3, 2, "2")) x(20) must be_==(Sample(-9, 1, "27")) x(19) must be_==(Sample(-10, 0, "26")) x(0) must be_==(Sample(-29, 0, "7")) } } }
scott-abernethy/docreg-web
src/test/scala/vvv/docreg/snippet/HistoryTest.scala
Scala
gpl-3.0
1,556
/* * This file is part of the \\BlueLaTeX project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package bluelatex import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.server.Directives._ import akka.stream.ActorMaterializer import com.typesafe.config.ConfigFactory import scala.concurrent.Future import config._ import org.slf4j.LoggerFactory class Server(implicit val system: ActorSystem) extends StdReaders { val conf = ConfigFactory.load val logger = LoggerFactory.getLogger(getClass) implicit val materializer = ActorMaterializer() implicit val ec = system.dispatcher val prefix = conf.as[String]("bluelatex.api.prefix") val services = conf.as[List[String]]("bluelatex.api.services") val route = services match { case s :: sl => def serv(s: String): Service = { val const = Class.forName(s).getConstructor(classOf[ActorSystem]) const.newInstance(system).asInstanceOf[Service] } sl.foldLeft(serv(s).route)((acc, s) => acc ~ serv(s).route) case Nil => reject } val prefixed = if (prefix.isEmpty) route else pathPrefix(separateOnSlashes(prefix)) { route } private var bindingFuture: Future[Http.ServerBinding] = null def start(): Unit = if (bindingFuture == null) { val host = conf.as[String]("bluelatex.http.host") val port = conf.as[Int]("bluelatex.http.port") bindingFuture = Http().bindAndHandle(prefixed, host, port) bindingFuture.onFailure { case e: Exception => logger.error(f"Failed to bind to $host, $port", e) } } def stop(): Unit = if (bindingFuture != null) { bindingFuture .flatMap(_.unbind()) // trigger unbinding from the port .onComplete(_ => system.shutdown()) // and shutdown when done bindingFuture = null } }
bluelatex/bluelatex-server
core/src/main/scala/bluelatex/Server.scala
Scala
apache-2.0
2,424
/******************************************************************************* * Copyright 2017 Capital One Services, LLC and Bitwise, Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package hydrograph.engine.spark.components.adapter import hydrograph.engine.core.component.generator.OutputFileMixedSchemeEntityGenerator import hydrograph.engine.jaxb.commontypes.TypeBaseComponent import hydrograph.engine.spark.components.OutputFileMixedSchemeComponent import hydrograph.engine.spark.components.adapter.base.OutputAdatperBase import hydrograph.engine.spark.components.base.SparkFlow import hydrograph.engine.spark.components.platform.BaseComponentParams /** * The Class OutputFileMixedSchemeAdapter. * * @author Bitwise * */ class OutputFileMixedSchemeAdapter(typeBaseComponent: TypeBaseComponent) extends OutputAdatperBase{ private var outputFileMixedScheme:OutputFileMixedSchemeEntityGenerator=null private var sparkOFileMixedSchemeComponent:OutputFileMixedSchemeComponent=null override def createGenerator(): Unit = { outputFileMixedScheme=new OutputFileMixedSchemeEntityGenerator(typeBaseComponent) } override def createComponent(baseComponentParams: BaseComponentParams): Unit = { sparkOFileMixedSchemeComponent = new OutputFileMixedSchemeComponent(outputFileMixedScheme.getEntity, baseComponentParams) } override def getComponent(): SparkFlow = sparkOFileMixedSchemeComponent }
capitalone/Hydrograph
hydrograph.engine/hydrograph.engine.spark/src/main/scala/hydrograph/engine/spark/components/adapter/OutputFileMixedSchemeAdapter.scala
Scala
apache-2.0
2,014
package zzb.srvbox import akka.actor._ import spray.routing._ import scala.collection.JavaConverters._ import java.util /** * Created with IntelliJ IDEA. * User: Simon Xiao * Date: 13-7-29 * Time: 下午3:32 * Copyright goodsl.org 2012~2020 */ class RestInterface extends HttpServiceActor with RestApi { def receive = runRoute(routes) } trait RestApi extends HttpService with ActorLogging { actor: Actor ⇒ import zzb.srvbox.SrvManageProtocol.RestRequest val serviceManagerActor = context.actorOf(Props[ServiceManagerActor], "serviceManagerActor") //url设置 private val pathMatchs: collection.mutable.Map[String, List[String]] = collection.mutable.Map.empty override def preStart { if (context.system.settings.config.hasPath("services.path")) { context.system.settings.config.getConfig("services.path").entrySet().asScala.foreach { entry => pathMatchs.getOrElseUpdate(entry.getKey, entry.getValue.unwrapped.asInstanceOf[util.ArrayList[String]].asScala.toList) } } } def routes: Route = requestUri { uri => //查找是否有路径配置 val founds = for { pathMatch <- pathMatchs key = pathMatch._1 urlMap <- pathMatch._2 if uri.path.toString().matches(urlMap) } yield (key, urlMap.dropRight(2).drop(1)) val (serviceName, path) = if (founds.isEmpty) (uri.path.tail.head.toString, segmentStringToPathMatcher(uri.path.tail.head.toString)) else (founds.head._1, separateOnSlashes(founds.head._2)) //去除设置地路径或者服务名 pathPrefix(path) { requestContext => { serviceManagerActor ! RestRequest(serviceName, requestContext) } } } }
xiefeifeihu/zzb
zzb-box/src/main/scala/zzb/srvbox/RestInterface.scala
Scala
mit
1,791
/* * Copyright 2014–2018 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.sql import slamdata.Predef._ import quasar._, RenderTree.ops._ import quasar.common.CIName import pathy.Path import pathy.Path._ import matryoshka._ import matryoshka.data.Fix import monocle.macros.Lenses import monocle.Prism import scalaz._, Scalaz._ import scalaz.Liskov._ sealed abstract class Statement[BODY] extends Product with Serializable { def pprint(implicit ev: BODY <~< String): String def pprintF(implicit ev: BODY <~< Fix[Sql]): String = this.map(b => quasar.sql.pprint(ev(b))).pprint } object Statement { implicit val traverse: Traverse[Statement] = new Traverse[Statement] { def traverseImpl[G[_]:Applicative,A,B](fa: Statement[A])(f: A => G[B]): G[Statement[B]] = fa match { case funcDef: FunctionDecl[_] => funcDef.transformBodyM(f).map(x => (x:Statement[B])) case Import(path) => (Import(path):Statement[B]).point[G] } } implicit def renderTreeStatement[BODY:RenderTree]: RenderTree[Statement[BODY]] = new RenderTree[Statement[BODY]] { def render(statement: Statement[BODY]) = statement match { case func: FunctionDecl[_] => func.render case Import(path) => NonTerminal("Import" :: Nil, Some(posixCodec.unsafePrintPath(path)), Nil) } } implicit def equal[BODY:Equal]: Equal[Statement[BODY]] = Equal.equalBy(s => (functionDecl.getOption(s), import_.getOption(s))) def functionDecl[BODY] = Prism.partial[Statement[BODY], (CIName, List[CIName], BODY)] { case FunctionDecl(name, args, body) => (name, args, body) } ((FunctionDecl[BODY](_,_,_)).tupled) def import_[BODY] = Prism.partial[Statement[BODY], Path[Any, Dir, Unsandboxed]] { case Import(path) => path } (Import(_)) } @Lenses final case class FunctionDecl[BODY](name: CIName, args: List[CIName], body: BODY) extends Statement[BODY] { def transformBody[B](f: BODY => B): FunctionDecl[B] = FunctionDecl(name, args, f(body)) def transformBodyM[M[_]: Functor, B](f: BODY => M[B]) = f(body).map(FunctionDecl(name, args, _)) override def pprint(implicit ev: BODY <~< String) = { val argList = args.map(name => ":" + escape("`", name.shows)).mkString(", ") s"CREATE FUNCTION ${name.shows}($argList)\\n BEGIN\\n ${ev(body)}\\n END" } } object FunctionDecl { implicit def renderTreeFunctionDecl[BODY:RenderTree]: RenderTree[FunctionDecl[BODY]] = new RenderTree[FunctionDecl[BODY]] { def render(funcDec: FunctionDecl[BODY]) = NonTerminal("Function Declaration" :: Nil, Some(funcDec.name.value), List(funcDec.body.render)) } implicit val traverse: Traverse[FunctionDecl] = new Traverse[FunctionDecl] { def traverseImpl[G[_]:Applicative,A,B](funcDec: FunctionDecl[A])(f: A => G[B]): G[FunctionDecl[B]] = funcDec.transformBodyM(f) } } @Lenses final case class Import[BODY](path: Path[Any, Dir, Unsandboxed]) extends Statement[BODY] { override def pprint(implicit ev: BODY <~< String) = // We need to escape any backticks in the resulting String as pathy is // indiferent but since this is a SQL string they yield invalid SQL // if not escaped s"import `${posixCodec.unsafePrintPath(path).replace("`", "\\\\`")}`" }
slamdata/slamengine
sql/src/main/scala/quasar/sql/Statement.scala
Scala
apache-2.0
3,789
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.model.feats import io.truthencode.ddo.providers.SimpleValueProvider class ToughnessHitPointsPerLevelProvider extends SimpleValueProvider[Int] with HitPointsPerLevelProvider { override def createValue: Int => Int = createValue }
adarro/ddo-calc
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/ToughnessHitPointsPerLevelProvider.scala
Scala
apache-2.0
905
package fds.scala.cache /** * Stub for Movie Cache */ object MovieCache { // TODO : // Merge the CacheClient and the MovieDB client to make a Movie Cache }
Vigil365/fds-project
src/main/scala/fds/scala/cache/MovieCache.scala
Scala
cc0-1.0
167
import sbinary._ trait B { def format(a: A): Format[A] }
jamesward/xsbt
sbt/src/sbt-test/dependency-management/provided-multi/changes/B.scala
Scala
bsd-3-clause
58
package controllers.codegen import javax.inject.Inject import models.slick.Tables import play.api.mvc.{ Controller, Action } import slick.driver.MySQLDriver.api._ import scala.concurrent.ExecutionContext.Implicits.global import scala.util.{ Failure, Success } /** * Created by elarib on 10/21/16. */ class CodeGenController @Inject() (environment: play.api.Environment, configuration: play.api.Configuration) extends Controller { val db = Database.forConfig("db") def generateTableScala() = Action { val slickDriver = "slick.driver.MySQLDriver" val jdbcDriver = configuration.underlying.getString("db.driver") val url = configuration.underlying.getString("db.url") val outputFolder = environment.rootPath.getPath + "/app" val pkg = "models.slick" val user = configuration.underlying.getString("db.properties.user") val password = configuration.underlying.getString("db.properties.password") slick.codegen.SourceCodeGenerator.main( Array(slickDriver, jdbcDriver, url, outputFolder, pkg, user, password) ) Ok("Setup Done") } def generateTableDB() = Action.async { db.run(DBIO.seq( Tables.schema.create ).asTry).map(_ match { case Success(s) => Ok("Good! Database was intilitiazed succefuly") case Failure(e) => print(e.fillInStackTrace()) NotAcceptable("Error ! : Database was NOT intilitiazed because " + e.getMessage) }) } }
elarib/cvManager
app/controllers/codegen/CodeGenController.scala
Scala
mit
1,449
package model.facebook import spray.json._ case class FacebookOauth2Response(id: String, email: String, name: String) object FacebookOauth2Response extends DefaultJsonProtocol { implicit object FacebookOauth2ResponseFormat extends RootJsonFormat[FacebookOauth2Response] { def write(response: FacebookOauth2Response) = JsArray(JsString(response.id), JsString(response.email), JsString(response.name)) def read(value: JsValue): FacebookOauth2Response = value.asJsObject.getFields("id", "email", "name") match { case Seq(JsString(id), JsString(email), JsString(name)) => FacebookOauth2Response(id, email, name) case _ => throw DeserializationException("FacebookOauth2Response expected") } } }
sysgears/apollo-universal-starter-kit
modules/authentication/server-scala/src/main/scala/model/facebook/FacebookOauth2Response.scala
Scala
mit
730
package com.temportalist.morphadditions.common.abilities import net.minecraft.entity.Entity import net.minecraft.entity.player.EntityPlayer import net.minecraft.entity.projectile.EntitySnowball import net.minecraft.util.MathHelper /** * * * @author TheTemportalist */ class AbilityEjectSnowball() extends AbilityEject("Snowball") { override def getEntityClass(): Class[_ <: Entity] = { null } override def trigger(player: EntityPlayer): Unit = { val targetCoords: Array[Double] = this.getTargetCoords(player) if (targetCoords == null) return val snowball: EntitySnowball = new EntitySnowball(player.worldObj, player) val x: Double = targetCoords(0) - player.posX val y: Double = targetCoords(1) + targetCoords(3) - 1.100000023841858D - snowball.posY val z: Double = targetCoords(2) - player.posZ val distance: Float = MathHelper.sqrt_double(x * x + z * z) * 0.2F snowball.setThrowableHeading(x, y + distance, z, 1.6F, 12.0F) player.playSound("random.bow", 1.0F, 1.0F / (player.getRNG().nextFloat() * 0.4F + 0.8F)) player.worldObj.spawnEntityInWorld(snowball) } }
TheTemportalist/MorphAdditions
src/main/scala/com/temportalist/morphadditions/common/abilities/AbilityEjectSnowball.scala
Scala
apache-2.0
1,104
package rml.args.conversions.basic import rml.args.arg._ import rml.args.arg.input._ import rml.args.exceptions.IllegalArgException import rml.args.arg.restriction.NotRestricted import rml.args.arg.input.SingleArg import rml.args.arg.input.PositionalArg import rml.args.arg.input.ListArg0 import rml.args.arg.input.ListArg import rml.args.arg.input.JoinArg trait ToDouble extends NotRestricted { val baseType: String = "Double" def mapToType(value: String): Double = try { value.toDouble } catch { case nfe: NumberFormatException => throw new IllegalArgException("Value '" + value + "' is not a valid Double") } } object ADouble { def apply(key: String) = InputArg(key, new SingleArg[Double] with ToDouble) } object JDouble { def apply(key: String) = InputArg(key, new JoinArg[Double] with ToDouble { override val sep = ""} ) } object Doubles { def apply(key: String) = InputArg(key, new ListArg[Double] with ToDouble) } object Doubles0{ def apply(key: String) = InputArg(key, new ListArg0[Double] with ToDouble) } object PDouble { def apply(pos: Int) = InputArg("-", new ToDouble with PositionalArg[Double]{ val position = pos }) }
rml/scala_args
src/main/scala/rml/args/conversions/basic/ToDouble.scala
Scala
gpl-3.0
1,176
println(/* resolved: false */ Long.getClass) println(classOf[/* */ Long]) 1L.asInstanceOf[Long]./* */ toFloat
katejim/intellij-scala
testdata/resolve2/predef/literal/Long.scala
Scala
apache-2.0
109
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs // Licence: http://www.gnu.org/licenses/gpl-3.0.en.html /** * To keep interaction with Lucene really simple, we make the * following assumptions about the entities that we index: * * 1. all entities are flat (no nested objects). * * 2. all values have a `String` representation. * * 3. field names are universal: e.g. a "name" field in one type of * entity should be analyzed the same way as in another. * * 4. entities have a unique id that is derived from their content. * * which allows us to use case classes to define entities, getting us * serialisation and deserialisation with minimal boilerplate. * Field-based `Analyzer`s and `Query`s, on the other hand, can be * arbitrarily complex. * * In addition, Option[T]s are indexed but not stored (not * fully persistent). */ package org.ensime.indexer import org.apache.lucene.document._ package object lucene { implicit class RichEntity[T <: Entity](e: T) { def toDocument(implicit p: DocumentProvider[T]) = p.toDocument(e) } implicit class RichDocument(d: Document) { def toEntity[T](implicit p: DocumentRecovery[T]) = p.toEntity(d) } }
j-mckitrick/ensime-sbt
src/sbt-test/ensime-sbt/ensime-server/core/src/main/scala/org/ensime/indexer/lucene/package.scala
Scala
apache-2.0
1,208
package viscel.tests import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Gen} import viscel.netzi.VRequest import viscel.shared.{Blob, DataRow, Vurl} //import viscel.store.v3.{BlobData, ImageRef, Link, Normal, PageData, ScribeDataRow, Volatile} object DataGenerators { implicit val genBlob: Arbitrary[Blob] = Arbitrary(for { sha <- Gen.alphaNumStr mime <- Gen.alphaNumStr } yield Blob(sha, mime)) implicit val genVurl: Arbitrary[Vurl] = Arbitrary(for { str <- Gen.alphaNumStr } yield Vurl.fromString(s"viscel://$str")) //implicit val genLink: Arbitrary[Link] = Arbitrary(for { // policy <- Gen.oneOf(Normal, Volatile) // url <- genVurl.arbitrary // data <- Gen.listOf(arbitrary[String]) //} yield Link(url, policy, data)) //implicit val genInstant: Arbitrary[Instant] = Arbitrary(arbitrary[Long].map((i: Long) => Instant.ofEpochMilli(i))) //implicit val genImageRef: Arbitrary[ImageRef] = Arbitrary(for { // ref <- genVurl.arbitrary // origin <- genVurl.arbitrary // data <- arbitrary[Map[String, String]] //} yield ImageRef(ref, origin, data)) //implicit val genPageData: Arbitrary[PageData] = Arbitrary(for { // ref <- arbitrary[Vurl] // loc <- arbitrary[Vurl] // date <- arbitrary[Instant] // contents <- Gen.listOf(Gen.oneOf(arbitrary[ImageRef], arbitrary[Link])) //} yield PageData(ref, loc, date, contents)) //implicit val genBlobData: Arbitrary[BlobData] = Arbitrary(for { // ref <- arbitrary[Vurl] // loc <- arbitrary[Vurl] // date <- arbitrary[Instant] // blob <- arbitrary[Blob] //} yield BlobData(ref, loc, date, blob)) //implicit val genScribeDataRow: Arbitrary[ScribeDataRow] = Arbitrary(Gen.oneOf(arbitrary[BlobData], arbitrary[PageData])) implicit val genVRequest: Arbitrary[VRequest] = Arbitrary(for { href <- arbitrary[Vurl] origin <- arbitrary[Option[Vurl]] } yield VRequest(href, Nil, origin)) implicit val genDataRow: Arbitrary[DataRow] = Arbitrary(for { vurl <- arbitrary[Vurl] } yield DataRow(vurl, contents = Nil)) }
rmgk/viscel
code/jvm/src/test/scala/viscel/tests/DataGenerators.scala
Scala
agpl-3.0
2,070
/* * Copyright 2019 ACINQ SAS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.acinq.eclair.wire.protocol import fr.acinq.bitcoin.ByteVector32 import fr.acinq.bitcoin.Crypto.PublicKey import fr.acinq.eclair.payment.{Bolt11Invoice, Invoice} import fr.acinq.eclair.wire.protocol.CommonCodecs._ import fr.acinq.eclair.wire.protocol.OnionRoutingCodecs.MissingRequiredTlv import fr.acinq.eclair.wire.protocol.TlvCodecs._ import fr.acinq.eclair.{CltvExpiry, MilliSatoshi, ShortChannelId, UInt64} import scodec.bits.{BitVector, ByteVector} /** * Created by t-bast on 05/07/2019. */ /* We support multiple payment flows, each having different requirements for what the onions contain. The following is an overview of the onion contents we support. STANDARD PAYMENT (fully source-routed, single-part): a -------------> b --------------------------> c --------------------------> d ---------------------------> e +-----------------------+ +-----------------------+ +-----------------------+ +-----------------------+ | amount_fwd: 1025 msat | | amount_fwd: 1010 msat | | amount_fwd: 1000 msat | | amount_fwd: 1000 msat | | expiry: 600030 | | expiry: 600012 | | expiry: 600000 | | expiry: 600000 | | channel_id: 1105 | | channel_id: 561 | | channel_id: 42 | | secret: xyz (opt) | |-----------------------| |-----------------------| |-----------------------| +-----------------------+ | (encrypted) | | (encrypted) | | (encrypted) | | EOF | +-----------------------+ +-----------------------+ +-----------------------+ +-----------------------+ STANDARD MULTI-PART PAYMENT (fully source-routed, multi-part): a -------------> b --------------------------> c --------------------------> d ---------------------------> e +-----------------------+ +-----------------------+ +-----------------------+ +-------------------------+ | amount_fwd: 1025 msat | | amount_fwd: 1010 msat | | amount_fwd: 1000 msat | | amount_fwd: 1000 msat | | expiry: 600030 | | expiry: 600012 | | expiry: 600000 | | expiry: 600000 | | channel_id: 1105 | | channel_id: 561 | | channel_id: 42 | | secret: xyz | |-----------------------| |-----------------------| |-----------------------| | total_amount: 1500 msat | | (encrypted) | | (encrypted) | | (encrypted) | +-------------------------+ +-----------------------+ +-----------------------+ +-----------------------+ | EOF | +-------------------------+ TRAMPOLINE PAYMENT (partially source-routed, multi-part): a -------------> b ---------------------------> t1 -----------------------------> t2 -------------------------------> e +----------------------+ +---------------------------+ +---------------------------+ +-----------------------------+ | amount_fwd: 900 msat | | amount_fwd: 900 msat | | amount_fwd: 750 msat | | amount_fwd: 1000 msat | | expiry: 600112 | | expiry: 600112 | | expiry: 600042 | | expiry: 600000 | | channel_id: 42 | | secret: aaaaa | | secret: zzzzz | | secret: xxxxx | <- randomly generated by t2 (NOT the invoice secret) |----------------------| | total_amount: 1650 msat | | total_amount: 1600 msat | | total_amount: 1500 msat | <- t2 is using multi-part to pay e, still 500 msat more to receive | (encrypted) | | trampoline_onion: | | trampoline_onion: | | trampoline_onion: | +----------------------+ | +-----------------------+ | | +-----------------------+ | | +-------------------------+ | | | amount_fwd: 1600 msat | | | | amount_fwd: 1500 msat | | | | amount_fwd: 1500 msat | | | | expiry: 600042 | | | | expiry: 600000 | | | | expiry: 600000 | | | | node_id: t2 | | | | node_id: e | | | | total_amount: 2500 msat | | <- may be bigger than amount_fwd in case the payment is split among multiple trampoline routes | +-----------------------+ | | +-----------------------+ | | | secret: yyyyy | | <- invoice secret | | (encrypted) | | | | (encrypted) | | | +-------------------------+ | | +-----------------------+ | | +-----------------------+ | | | EOF | | +---------------------------+ +---------------------------+ | +-------------------------+ | | EOF | | EOF | +-----------------------------+ +---------------------------+ +---------------------------+ | EOF | +-----------------------------+ Notes: - there may be two layers of multi-part: a may split the payment between multiple trampoline routes, and inside each trampoline route payments may be split into multiple parts. - when multi-part is used to reach trampoline nodes, the payment secret in the outer onion is NOT the invoice secret. We want only the recipient to receive the invoice payment secret. The payment secrets in outer onions are generated randomly by the sender to simply prevent next-to-last non-trampoline nodes from probing their position in the route or steal some fees. TRAMPOLINE PAYMENT TO LEGACY RECIPIENT (the last trampoline node converts to a standard payment to the final recipient): a -------------> b ----------------------------> t1 -----------------------------> t2 ---------------------------------- -> e ---------------------------> f +-----------------------+ +---------------------------+ +---------------------------------+ +-----------------------+ +-------------------------+ | amount_fwd: 1750 msat | | amount_fwd: 1750 msat | | amount_fwd: 1600 msat | | amount_fwd: 1000 msat | | amount_fwd: 1000 msat | | expiry: 600112 | | expiry: 600112 | | expiry: 600042 | | expiry: 600000 | | expiry: 600000 | | channel_id: 42 | | secret: yyyyy | | secret: zzzzz | +---->| channel_id: 42 |---->| secret: xyz | <- invoice secret (omitted if not supported by invoice) +-----------------------+ | total_amount: 1750 msat | | total_amount: 1600 msat | | +-----------------------+ | total_amount: 2500 msat | <- t2 is using multi-part to pay 1500 msat to f, for a total payment | (encrypted) | | trampoline_onion: | | trampoline_onion: | | | (encrypted) | +-------------------------+ of 2500 msat split between multiple trampoline routes (omitted if +-----------------------+ | +-----------------------+ | | +-----------------------------+ | | +-----------------------+ | EOF | MPP not supported by invoice). | | amount_fwd: 1600 msat | | | | amount_fwd: 1500 msat | | | +-------------------------+ The remaining 1000 msat needed to reach the total 2500 msat have | | expiry: 600042 | | | | expiry: 600000 | |--+ been sent via a completely separate trampoline route (not included | | node_id: t2 | | | | total_amount: 2500 msat | | | +-----------------------+ +-------------------------+ in this diagram). | +-----------------------+ | | | secret: xyz | | | | amount_fwd: 500 msat | | amount_fwd: 500 msat | | | (encrypted) | | | | node_id: f | | | | expiry: 600000 | | expiry: 600000 | | +-----------------------+ | | | invoice_features: 0x0a | | +---->| channel_id: 43 |---->| secret: xyz | +---------------------------+ | | invoice_routing_info: ..... | | +-----------------------+ | total_amount: 2500 msat | | EOF | | +-----------------------------+ | | (encrypted) | +-------------------------+ +---------------------------+ | | (encrypted) | | +-----------------------+ | EOF | | +-----------------------------+ | +-------------------------+ +---------------------------------+ | EOF | +---------------------------------+ Notes: - the last trampoline node learns the payment details (who the recipient is, the payment amount and secret) - but it doesn't learn the sender's identity - if the invoice doesn't specify an amount, the last trampoline node can pay a lower amount than what the sender intended, thus stealing a lot of fees for himself (the wallet should disable paying to a 0-value invoice via trampoline to prevent this attack) - if f doesn't support MPP, t2 will send a single-part payment or fail if there isn't enough capacity - as in normal trampoline scenario, payment secrets in the outer onion are NOT the invoice secret */ /** Tlv types used inside a payment onion. */ sealed trait OnionPaymentPayloadTlv extends Tlv object OnionPaymentPayloadTlv { /** Amount to forward to the next node. */ case class AmountToForward(amount: MilliSatoshi) extends OnionPaymentPayloadTlv /** CLTV value to use for the HTLC offered to the next node. */ case class OutgoingCltv(cltv: CltvExpiry) extends OnionPaymentPayloadTlv /** Id of the channel to use to forward a payment to the next node. */ case class OutgoingChannelId(shortChannelId: ShortChannelId) extends OnionPaymentPayloadTlv /** * Bolt 11 payment details (only included for the last node). * * @param secret payment secret specified in the Bolt 11 invoice. * @param totalAmount total amount in multi-part payments. When missing, assumed to be equal to AmountToForward. */ case class PaymentData(secret: ByteVector32, totalAmount: MilliSatoshi) extends OnionPaymentPayloadTlv /** * Route blinding lets the recipient provide some encrypted data for each intermediate node in the blinded part of the * route. This data cannot be decrypted or modified by the sender and usually contains information to locate the next * node without revealing it to the sender. */ case class EncryptedRecipientData(data: ByteVector) extends OnionPaymentPayloadTlv /** Blinding ephemeral public key that should be used to derive shared secrets when using route blinding. */ case class BlindingPoint(publicKey: PublicKey) extends OnionPaymentPayloadTlv /** Id of the next node. */ case class OutgoingNodeId(nodeId: PublicKey) extends OnionPaymentPayloadTlv /** * When payment metadata is included in a Bolt 11 invoice, we should send it as-is to the recipient. * This lets recipients generate invoices without having to store anything on their side until the invoice is paid. */ case class PaymentMetadata(data: ByteVector) extends OnionPaymentPayloadTlv /** * Invoice feature bits. Only included for intermediate trampoline nodes when they should convert to a legacy payment * because the final recipient doesn't support trampoline. */ case class InvoiceFeatures(features: ByteVector) extends OnionPaymentPayloadTlv /** * Invoice routing hints. Only included for intermediate trampoline nodes when they should convert to a legacy payment * because the final recipient doesn't support trampoline. */ case class InvoiceRoutingInfo(extraHops: List[List[Bolt11Invoice.ExtraHop]]) extends OnionPaymentPayloadTlv /** An encrypted trampoline onion packet. */ case class TrampolineOnion(packet: OnionRoutingPacket) extends OnionPaymentPayloadTlv /** Pre-image included by the sender of a payment in case of a donation */ case class KeySend(paymentPreimage: ByteVector32) extends OnionPaymentPayloadTlv } object PaymentOnion { import OnionPaymentPayloadTlv._ /* * We use the following architecture for payment onion payloads: * * PerHopPayload * _______________________/\\_______________ * / \\ * RelayPayload FinalPayload * _______________/\\_________________ \\______ * / \\ \\ * ChannelRelayPayload \\ \\ * ________/\\______________ \\ \\ * / \\ \\ \\ * RelayLegacyPayload ChannelRelayTlvPayload NodeRelayPayload FinalTlvPayload * * We also introduce additional traits to separate payloads based on their encoding (PerHopPayloadFormat) and on the * type of onion packet they can be used with (PacketType). */ sealed trait PerHopPayloadFormat /** Legacy fixed-size 65-bytes onion payload. */ sealed trait LegacyFormat extends PerHopPayloadFormat /** Variable-length onion payload with optional additional tlv records. */ sealed trait TlvFormat extends PerHopPayloadFormat { def records: TlvStream[OnionPaymentPayloadTlv] } /** Payment onion packet type. */ sealed trait PacketType /** A payment onion packet is used when offering an HTLC to a remote node. */ sealed trait PaymentPacket extends PacketType /** * A trampoline onion packet is used to defer route construction to trampoline nodes. * It is usually embedded inside a [[PaymentPacket]] in the final node's payload. */ sealed trait TrampolinePacket extends PacketType /** Per-hop payload from an HTLC's payment onion (after decryption and decoding). */ sealed trait PerHopPayload /** Per-hop payload for an intermediate node. */ sealed trait RelayPayload extends PerHopPayload with PerHopPayloadFormat { /** Amount to forward to the next node. */ val amountToForward: MilliSatoshi /** CLTV value to use for the HTLC offered to the next node. */ val outgoingCltv: CltvExpiry } sealed trait ChannelRelayPayload extends RelayPayload with PaymentPacket { /** Id of the channel to use to forward a payment to the next node. */ val outgoingChannelId: ShortChannelId } /** Per-hop payload for a final node. */ sealed trait FinalPayload extends PerHopPayload with PerHopPayloadFormat with TrampolinePacket with PaymentPacket { val amount: MilliSatoshi val expiry: CltvExpiry val paymentSecret: ByteVector32 val totalAmount: MilliSatoshi val paymentPreimage: Option[ByteVector32] val paymentMetadata: Option[ByteVector] } case class RelayLegacyPayload(outgoingChannelId: ShortChannelId, amountToForward: MilliSatoshi, outgoingCltv: CltvExpiry) extends ChannelRelayPayload with LegacyFormat case class ChannelRelayTlvPayload(records: TlvStream[OnionPaymentPayloadTlv]) extends ChannelRelayPayload with TlvFormat { override val amountToForward = records.get[AmountToForward].get.amount override val outgoingCltv = records.get[OutgoingCltv].get.cltv override val outgoingChannelId = records.get[OutgoingChannelId].get.shortChannelId } object ChannelRelayTlvPayload { def apply(outgoingChannelId: ShortChannelId, amountToForward: MilliSatoshi, outgoingCltv: CltvExpiry): ChannelRelayTlvPayload = ChannelRelayTlvPayload(TlvStream(OnionPaymentPayloadTlv.AmountToForward(amountToForward), OnionPaymentPayloadTlv.OutgoingCltv(outgoingCltv), OnionPaymentPayloadTlv.OutgoingChannelId(outgoingChannelId))) } case class NodeRelayPayload(records: TlvStream[OnionPaymentPayloadTlv]) extends RelayPayload with TlvFormat with TrampolinePacket { val amountToForward = records.get[AmountToForward].get.amount val outgoingCltv = records.get[OutgoingCltv].get.cltv val outgoingNodeId = records.get[OutgoingNodeId].get.nodeId // The following fields are only included in the trampoline-to-legacy case. val totalAmount = records.get[PaymentData].map(_.totalAmount match { case MilliSatoshi(0) => amountToForward case totalAmount => totalAmount }).getOrElse(amountToForward) val paymentSecret = records.get[PaymentData].map(_.secret) val paymentMetadata = records.get[PaymentMetadata].map(_.data) val invoiceFeatures = records.get[InvoiceFeatures].map(_.features) val invoiceRoutingInfo = records.get[InvoiceRoutingInfo].map(_.extraHops) } case class FinalTlvPayload(records: TlvStream[OnionPaymentPayloadTlv]) extends FinalPayload with TlvFormat { override val amount = records.get[AmountToForward].get.amount override val expiry = records.get[OutgoingCltv].get.cltv override val paymentSecret = records.get[PaymentData].get.secret override val totalAmount = records.get[PaymentData].map(_.totalAmount match { case MilliSatoshi(0) => amount case totalAmount => totalAmount }).getOrElse(amount) override val paymentPreimage = records.get[KeySend].map(_.paymentPreimage) override val paymentMetadata = records.get[PaymentMetadata].map(_.data) } def createNodeRelayPayload(amount: MilliSatoshi, expiry: CltvExpiry, nextNodeId: PublicKey): NodeRelayPayload = NodeRelayPayload(TlvStream(AmountToForward(amount), OutgoingCltv(expiry), OutgoingNodeId(nextNodeId))) /** Create a trampoline inner payload instructing the trampoline node to relay via a non-trampoline payment. */ def createNodeRelayToNonTrampolinePayload(amount: MilliSatoshi, totalAmount: MilliSatoshi, expiry: CltvExpiry, targetNodeId: PublicKey, invoice: Invoice): NodeRelayPayload = { val tlvs = Seq( Some(AmountToForward(amount)), Some(OutgoingCltv(expiry)), invoice.paymentSecret.map(s => PaymentData(s, totalAmount)), invoice.paymentMetadata.map(m => PaymentMetadata(m)), Some(OutgoingNodeId(targetNodeId)), Some(InvoiceFeatures(invoice.features.toByteVector)), Some(InvoiceRoutingInfo(invoice.routingInfo.toList.map(_.toList))) ).flatten NodeRelayPayload(TlvStream(tlvs)) } def createSinglePartPayload(amount: MilliSatoshi, expiry: CltvExpiry, paymentSecret: ByteVector32, paymentMetadata: Option[ByteVector], userCustomTlvs: Seq[GenericTlv] = Nil): FinalPayload = { val tlvs = Seq( Some(AmountToForward(amount)), Some(OutgoingCltv(expiry)), Some(PaymentData(paymentSecret, amount)), paymentMetadata.map(m => PaymentMetadata(m)) ).flatten FinalTlvPayload(TlvStream(tlvs, userCustomTlvs)) } def createMultiPartPayload(amount: MilliSatoshi, totalAmount: MilliSatoshi, expiry: CltvExpiry, paymentSecret: ByteVector32, paymentMetadata: Option[ByteVector], additionalTlvs: Seq[OnionPaymentPayloadTlv] = Nil, userCustomTlvs: Seq[GenericTlv] = Nil): FinalPayload = { val tlvs = Seq( Some(AmountToForward(amount)), Some(OutgoingCltv(expiry)), Some(PaymentData(paymentSecret, totalAmount)), paymentMetadata.map(m => PaymentMetadata(m)) ).flatten FinalTlvPayload(TlvStream(tlvs ++ additionalTlvs, userCustomTlvs)) } /** Create a trampoline outer payload. */ def createTrampolinePayload(amount: MilliSatoshi, totalAmount: MilliSatoshi, expiry: CltvExpiry, paymentSecret: ByteVector32, trampolinePacket: OnionRoutingPacket): FinalPayload = { FinalTlvPayload(TlvStream(AmountToForward(amount), OutgoingCltv(expiry), PaymentData(paymentSecret, totalAmount), TrampolineOnion(trampolinePacket))) } } object PaymentOnionCodecs { import OnionPaymentPayloadTlv._ import PaymentOnion._ import scodec.bits.HexStringSyntax import scodec.codecs._ import scodec.{Attempt, Codec, DecodeResult, Decoder} val paymentOnionPayloadLength = 1300 val trampolineOnionPayloadLength = 400 val paymentOnionPacketCodec: Codec[OnionRoutingPacket] = OnionRoutingCodecs.onionRoutingPacketCodec(paymentOnionPayloadLength) val trampolineOnionPacketCodec: Codec[OnionRoutingPacket] = OnionRoutingCodecs.onionRoutingPacketCodec(trampolineOnionPayloadLength) /** * The 1.1 BOLT spec changed the payment onion frame format to use variable-length per-hop payloads. * The first bytes contain a varint encoding the length of the payload data (not including the trailing mac). * That varint is considered to be part of the payload, so the payload length includes the number of bytes used by * the varint prefix. */ val payloadLengthDecoder = Decoder[Long]((bits: BitVector) => varintoverflow.decode(bits).map(d => DecodeResult(d.value + (bits.length - d.remainder.length) / 8, d.remainder))) private val amountToForward: Codec[AmountToForward] = ("amount_msat" | ltmillisatoshi).as[AmountToForward] private val outgoingCltv: Codec[OutgoingCltv] = ("cltv" | ltu32).xmap(cltv => OutgoingCltv(CltvExpiry(cltv)), (c: OutgoingCltv) => c.cltv.toLong) private val outgoingChannelId: Codec[OutgoingChannelId] = variableSizeBytesLong(varintoverflow, "short_channel_id" | shortchannelid).as[OutgoingChannelId] private val paymentData: Codec[PaymentData] = variableSizeBytesLong(varintoverflow, ("payment_secret" | bytes32) :: ("total_msat" | tmillisatoshi)).as[PaymentData] private val encryptedRecipientData: Codec[EncryptedRecipientData] = variableSizeBytesLong(varintoverflow, "encrypted_data" | bytes).as[EncryptedRecipientData] private val blindingPoint: Codec[BlindingPoint] = (("length" | constant(hex"21")) :: ("blinding" | publicKey)).as[BlindingPoint] private val outgoingNodeId: Codec[OutgoingNodeId] = (("length" | constant(hex"21")) :: ("node_id" | publicKey)).as[OutgoingNodeId] private val paymentMetadata: Codec[PaymentMetadata] = variableSizeBytesLong(varintoverflow, "payment_metadata" | bytes).as[PaymentMetadata] private val invoiceFeatures: Codec[InvoiceFeatures] = variableSizeBytesLong(varintoverflow, bytes).as[InvoiceFeatures] private val invoiceRoutingInfo: Codec[InvoiceRoutingInfo] = variableSizeBytesLong(varintoverflow, list(listOfN(uint8, Bolt11Invoice.Codecs.extraHopCodec))).as[InvoiceRoutingInfo] private val trampolineOnion: Codec[TrampolineOnion] = variableSizeBytesLong(varintoverflow, trampolineOnionPacketCodec).as[TrampolineOnion] private val keySend: Codec[KeySend] = variableSizeBytesLong(varintoverflow, bytes32).as[KeySend] private val onionTlvCodec = discriminated[OnionPaymentPayloadTlv].by(varint) .typecase(UInt64(2), amountToForward) .typecase(UInt64(4), outgoingCltv) .typecase(UInt64(6), outgoingChannelId) .typecase(UInt64(8), paymentData) .typecase(UInt64(10), encryptedRecipientData) .typecase(UInt64(12), blindingPoint) .typecase(UInt64(16), paymentMetadata) // Types below aren't specified - use cautiously when deploying (be careful with backwards-compatibility). .typecase(UInt64(66097), invoiceFeatures) .typecase(UInt64(66098), outgoingNodeId) .typecase(UInt64(66099), invoiceRoutingInfo) .typecase(UInt64(66100), trampolineOnion) .typecase(UInt64(5482373484L), keySend) val tlvPerHopPayloadCodec: Codec[TlvStream[OnionPaymentPayloadTlv]] = TlvCodecs.lengthPrefixedTlvStream[OnionPaymentPayloadTlv](onionTlvCodec).complete private val legacyRelayPerHopPayloadCodec: Codec[RelayLegacyPayload] = ( ("realm" | constant(ByteVector.fromByte(0))) :: ("short_channel_id" | shortchannelid) :: ("amt_to_forward" | millisatoshi) :: ("outgoing_cltv_value" | cltvExpiry) :: ("unused_with_v0_version_on_header" | ignore(8 * 12))).as[RelayLegacyPayload] val channelRelayPerHopPayloadCodec: Codec[ChannelRelayPayload] = fallback(tlvPerHopPayloadCodec, legacyRelayPerHopPayloadCodec).narrow({ case Left(tlvs) if tlvs.get[AmountToForward].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(2))) case Left(tlvs) if tlvs.get[OutgoingCltv].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(4))) case Left(tlvs) if tlvs.get[OutgoingChannelId].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(6))) case Left(tlvs) => Attempt.successful(ChannelRelayTlvPayload(tlvs)) case Right(legacy) => Attempt.successful(legacy) }, { case legacy: RelayLegacyPayload => Right(legacy) case ChannelRelayTlvPayload(tlvs) => Left(tlvs) }) val nodeRelayPerHopPayloadCodec: Codec[NodeRelayPayload] = tlvPerHopPayloadCodec.narrow({ case tlvs if tlvs.get[AmountToForward].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(2))) case tlvs if tlvs.get[OutgoingCltv].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(4))) case tlvs if tlvs.get[OutgoingNodeId].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(66098))) case tlvs => Attempt.successful(NodeRelayPayload(tlvs)) }, { case NodeRelayPayload(tlvs) => tlvs }) val finalPerHopPayloadCodec: Codec[FinalPayload] = tlvPerHopPayloadCodec.narrow({ case tlvs if tlvs.get[AmountToForward].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(2))) case tlvs if tlvs.get[OutgoingCltv].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(4))) case tlvs if tlvs.get[PaymentData].isEmpty => Attempt.failure(MissingRequiredTlv(UInt64(8))) case tlvs => Attempt.successful(FinalTlvPayload(tlvs)) }, { case FinalTlvPayload(tlvs) => tlvs }) def paymentOnionPerHopPayloadCodec(isLastPacket: Boolean): Codec[PaymentPacket] = if (isLastPacket) finalPerHopPayloadCodec.upcast[PaymentPacket] else channelRelayPerHopPayloadCodec.upcast[PaymentPacket] def trampolineOnionPerHopPayloadCodec(isLastPacket: Boolean): Codec[TrampolinePacket] = if (isLastPacket) finalPerHopPayloadCodec.upcast[TrampolinePacket] else nodeRelayPerHopPayloadCodec.upcast[TrampolinePacket] }
ACINQ/eclair
eclair-core/src/main/scala/fr/acinq/eclair/wire/protocol/PaymentOnion.scala
Scala
apache-2.0
28,302