code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Copyright (c) 2014, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.oryx.example
import com.cloudera.oryx.lambda.{TopicProducer, ScalaBatchLayerUpdate}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
class ExampleScalaBatchLayerUpdate extends ScalaBatchLayerUpdate[String,String,String] {
def configureUpdate(sparkContext: SparkContext,
timestamp: Long,
newData: RDD[(String,String)],
pastData: RDD[(String,String)],
modelDirString: String,
modelUpdateTopic: TopicProducer[String,String]): Unit = {
}
}
|
jhlch/anomaly-detector
|
src/main/scala/com/cloudera/oryx/example/ExampleScalaBatchLayerUpdate.scala
|
Scala
|
apache-2.0
| 1,166
|
package net.liftmodules.cluster.kryo
import com.twitter.chill._
import net.liftmodules.cluster.SessionMaster
import net.liftweb.http.provider.HTTPSession
import _root_.java.util.{ ResourceBundle, Locale }
class LiftInstantiator extends ScalaKryoInstantiator {
override def newKryo(): KryoBase = {
val k = super.newKryo()
(new LiftRegistrar).apply(k)
k
}
}
class LiftRegistrar extends IKryoRegistrar {
override def apply(k: Kryo): Unit = {
k.forSubclass[HTTPSession](new HTTPSessionSerializer)
k.forSubclass[ResourceBundle](new ResourceBundleSerializer)
}
}
class HTTPSessionSerializer extends KSerializer[HTTPSession] {
override def read(kryo: Kryo, input: Input, t: Class[HTTPSession]): HTTPSession = {
val id = kryo.readObject(input, classOf[String])
SessionMaster.getHttpSession(id).openOrThrowException(s"Unable to find underlying HTTPSession with ID $id")
}
override def write(kryo: Kryo, output: Output, session: HTTPSession): Unit = {
kryo.writeObject(output, session.sessionId)
}
}
class ResourceBundleSerializer extends KSerializer[ResourceBundle] {
override def read(kryo: Kryo, input: Input, t: Class[ResourceBundle]): ResourceBundle = {
val name = kryo.readObject(input, classOf[String])
val locale = kryo.readObject(input, classOf[Locale])
ResourceBundle.getBundle(name, locale)
}
override def write(kryo: Kryo, output: Output, bundle: ResourceBundle): Unit = {
kryo.writeObject(output, bundle.getBaseBundleName)
kryo.writeObject(output, bundle.getLocale)
}
}
|
joescii/lift-cluster
|
kryo/src/main/scala/net/liftmodules/cluster/kryo/LiftInstantiator.scala
|
Scala
|
apache-2.0
| 1,555
|
package org.mccandless.minotaur.utils
/**
* Memoizes functions for more efficient evaluation.
*
* Created by tdm on 12/3/17.
*/
class Memoizer[A, B] {
import scala.collection.mutable
val cache: mutable.Map[A, B] = mutable.Map.empty
def memoize(f: A => B): A => B = {
(a: A) => this.cache.getOrElseUpdate(a, f(a))
}
}
|
tomnis/minotaur
|
src/main/scala/org/mccandless/minotaur/utils/Memoizer.scala
|
Scala
|
mit
| 339
|
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.atomic
/** Atomic references wrapping `Boolean` values.
*
* Note that the equality test in `compareAndSet` is value based,
* since `Boolean` is a primitive.
*/
final class AtomicBoolean private[atomic] (initialValue: Boolean) extends Atomic[Boolean] {
private[this] var ref = initialValue
def getAndSet(update: Boolean): Boolean = {
val current = ref
ref = update
current
}
def compareAndSet(expect: Boolean, update: Boolean): Boolean = {
if (ref == expect) {
ref = update
true
} else
false
}
def set(update: Boolean): Unit = ref = update
def get(): Boolean = ref
}
/** @define createDesc Constructs an [[AtomicBoolean]] reference, allowing
* for fine-tuning of the created instance.
*
* A [[PaddingStrategy]] can be provided in order to counter
* the "false sharing" problem.
*
* Note that for ''Scala.js'' we aren't applying any padding,
* as it doesn't make much sense, since Javascript execution
* is single threaded, but this builder is provided for
* syntax compatibility anyway across the JVM and Javascript
* and we never know how Javascript engines will evolve.
*/
object AtomicBoolean {
/** Builds an [[AtomicBoolean]] reference.
*
* @param initialValue is the initial value with which to initialize the atomic
*/
def apply(initialValue: Boolean): AtomicBoolean =
new AtomicBoolean(initialValue)
/** $createDesc
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
*/
def withPadding(initialValue: Boolean, padding: PaddingStrategy): AtomicBoolean =
new AtomicBoolean(initialValue)
/** $createDesc
*
* Also this builder on top Java 8 also allows for turning off the
* Java 8 intrinsics, thus forcing usage of CAS-loops for
* `getAndSet` and for `getAndAdd`.
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
* @param allowPlatformIntrinsics is a boolean parameter that specifies whether
* the instance is allowed to use the Java 8 optimized operations
* for `getAndSet` and for `getAndAdd`
*/
def create(initialValue: Boolean, padding: PaddingStrategy, allowPlatformIntrinsics: Boolean): AtomicBoolean =
new AtomicBoolean(initialValue)
/** $createDesc
*
* This builder guarantees to construct a safe atomic reference that
* does not make use of `sun.misc.Unsafe`. On top of platforms that
* don't support it, notably some versions of Android or on top of
* the upcoming Java 9, this might be desirable.
*
* NOTE that explicit usage of this builder is not usually necessary
* because [[create]] can auto-detect whether the underlying platform
* supports `sun.misc.Unsafe` and if it does, then its usage is
* recommended, because the "safe" atomic instances have overhead.
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
*/
def safe(initialValue: Boolean, padding: PaddingStrategy): AtomicBoolean =
new AtomicBoolean(initialValue)
}
|
monixio/monix
|
monix-execution/js/src/main/scala/monix/execution/atomic/AtomicBoolean.scala
|
Scala
|
apache-2.0
| 4,020
|
package org.bitcoins.spvnode.bloom
import org.bitcoins.core.crypto.{DoubleSha256Digest, HashDigest, Sha256Hash160Digest}
import org.bitcoins.core.number.{UInt32, UInt64}
import org.bitcoins.core.protocol.script.{MultiSignatureScriptPubKey, P2PKHScriptPubKey, P2PKScriptPubKey, ScriptPubKey}
import org.bitcoins.core.protocol.transaction.{Transaction, TransactionOutPoint}
import org.bitcoins.core.protocol.{CompactSizeUInt, NetworkElement}
import org.bitcoins.core.script.constant.{ScriptConstant, ScriptToken}
import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil, Factory, NumberUtil}
import org.bitcoins.spvnode.serializers.messages.control.RawBloomFilterSerializer
import scala.annotation.tailrec
import scala.util.hashing.MurmurHash3
/**
* Created by chris on 8/2/16.
* Implements a bloom fitler that abides by the semantics of BIP37
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki]]
* [[https://github.com/bitcoin/bitcoin/blob/master/src/bloom.h]]
*/
sealed trait BloomFilter extends NetworkElement with BitcoinSLogger {
/** How large the bloom filter is, in Bytes */
def filterSize: CompactSizeUInt
/** The bits that are set inside of the bloom filter */
def data: Seq[Byte]
/** The number of hash functions used in the bloom filter */
def hashFuncs: UInt32
/** An arbitrary value to add to the seed value in the hash function used by the bloom filter. */
def tweak: UInt32
/** A set of flags that control how outpoints corresponding to a matched pubkey script are added to the filter.
* See the 'Comparing Transaction Elements to a Bloom Filter' section in this link
* [[https://bitcoin.org/en/developer-reference#filterload]]
*/
def flags: BloomFlag
/** Inserts a sequence of bytes into the [[BloomFilter]] */
def insert(bytes: Seq[Byte]): BloomFilter = {
//these are the bit indexes that need to be set inside of data
val bitIndexes = (0 until hashFuncs.toInt).map(i => murmurHash(i,bytes))
logger.debug("Bitindexes that need to be set: " + bitIndexes)
@tailrec
def loop(remainingBitIndexes: Seq[Int], accum: Seq[Byte]): Seq[Byte] = {
if (remainingBitIndexes.isEmpty) accum
else {
val currentIndex = remainingBitIndexes.head
//since we are dealing with a bit vector, this gets the byteIndex we need to set
//the bit inside of.
val byteIndex = currentIndex >>> 3
//we need to calculate the bitIndex we need to set inside of our byte
val bitIndex = (1 << (7 & currentIndex)).toByte
val byte = accum(byteIndex)
val setBitByte: Byte = (byte | bitIndex ).toByte
//replace old byte with new byte with bit set
val newAccum: Seq[Byte] = accum.updated(byteIndex,setBitByte)
loop(remainingBitIndexes.tail,newAccum)
}
}
val newData = loop(bitIndexes,data)
BloomFilter(filterSize,newData,hashFuncs,tweak,flags)
}
/** Inserts a [[HashDigest]] into [[data]] */
def insert(hash: HashDigest): BloomFilter = insert(hash.bytes)
/** Inserts a sequence of [[HashDigest]]'s into our BloomFilter */
def insertHashes(hashes: Seq[HashDigest]): BloomFilter = {
val byteVectors = hashes.map(_.bytes)
insertByteVectors(byteVectors)
}
/** Inserts a [[TransactionOutPoint]] into [[data]] */
def insert(outPoint: TransactionOutPoint): BloomFilter = insert(outPoint.bytes)
/** Checks if [[data]] contains the given sequence of bytes */
def contains(bytes: Seq[Byte]): Boolean = {
val bitIndexes = (0 until hashFuncs.toInt).map(i => murmurHash(i,bytes))
@tailrec
def loop(remainingBitIndexes: Seq[Int], accum: Seq[Boolean]): Boolean = {
if (remainingBitIndexes.isEmpty) !accum.exists(_ == false)
else {
val currentIndex = remainingBitIndexes.head
val byteIndex = currentIndex >>> 3
val bitIndex = (1 << (7 & currentIndex)).toByte
val byte = data(byteIndex)
val isBitSet = (byte & bitIndex) != 0
loop(remainingBitIndexes.tail, isBitSet +: accum)
}
}
loop(bitIndexes,Nil)
}
/** Checks if [[data]] contains a [[DoubleSha256Digest]] */
def contains(hash: DoubleSha256Digest): Boolean = contains(hash.bytes)
/** Checks if [[data]] contains a [[TransactionOutPoint]] */
def contains(outPoint: TransactionOutPoint): Boolean = contains(outPoint.bytes)
/** Checks if [[data]] contains a [[Sha256Hash160Digest]] */
def contains(hash: Sha256Hash160Digest): Boolean = contains(hash.bytes)
/** Checks if the transaction's txid, or any of the constants in it's scriptPubKeys/scriptSigs match our BloomFilter
* See BIP37 for exact details on what is relevant to a bloom filter and what is not relevant
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#filter-matching-algorithm]]
* */
def isRelevant(transaction: Transaction): Boolean = {
val scriptPubKeys = transaction.outputs.map(_.scriptPubKey)
//pull out all of the constants in the scriptPubKey's
val constantsWithOuputIndex = scriptPubKeys.zipWithIndex.flatMap { case (scriptPubKey, index) =>
val constants = scriptPubKey.asm.filter(_.isInstanceOf[ScriptConstant])
constants.map(c => (c,index))
}
//check if the bloom filter contains any of the script constants in our outputs
val constantsOutput = constantsWithOuputIndex.filter {
case (c,index) => contains(c.bytes)
}
val scriptSigs = transaction.inputs.map(_.scriptSignature)
val constantsWithInputIndex = scriptSigs.zipWithIndex.flatMap { case (scriptSig, index) =>
val constants = scriptSig.asm.filter(_.isInstanceOf[ScriptConstant])
constants.map(c => (c,index))
}
//check if the filter contains any of the prevouts in this tx
val containsOutPoint = transaction.inputs.filter(i => contains(i.previousOutput))
//check if the bloom filter contains any of the script constants in our inputs
val constantsInput = constantsWithInputIndex.filter {
case (c, index) =>
logger.debug("Checking input constant: " + c)
contains(c.bytes)
}
constantsOutput.nonEmpty || constantsInput.nonEmpty ||
containsOutPoint.nonEmpty || contains(transaction.txId)
}
/** Updates this bloom filter to contain the relevant information for the given Transaction
* See BIP37 for the exact details on what parts of a transaction is added to the bloom filter
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#filter-matching-algorithm]]
* */
def update(transaction: Transaction): BloomFilter = flags match {
case BloomUpdateAll =>
val scriptPubKeys = transaction.outputs.map(_.scriptPubKey)
//a sequence of outPoints that need to be inserted into the filter
val outPoints: Seq[TransactionOutPoint] = scriptPubKeys.zipWithIndex.flatMap {
case (scriptPubKey,index) =>
//constants that matched inside of our current filter
val constants = scriptPubKey.asm.filter(c => c.isInstanceOf[ScriptConstant] && contains(c.bytes))
//we need to create a new outpoint in the filter if a constant in the scriptPubKey matched
constants.map(c => TransactionOutPoint(transaction.txId,UInt32(index)))
}
logger.debug("Inserting outPoints: " + outPoints)
val outPointsBytes = outPoints.map(_.bytes)
val filterWithOutPoints = insertByteVectors(outPointsBytes)
//add txid
val filterWithTxIdAndOutPoints = filterWithOutPoints.insert(transaction.txId)
filterWithTxIdAndOutPoints
case BloomUpdateNone =>
logger.warn("You are attempting to update a bloom filter when the flag is set to BloomUpdateNone, " +
"no information will be added to the bloom filter, specifically this transaction: " + transaction)
this
case BloomUpdateP2PKOnly =>
//update the filter with the outpoint if the filter matches any of the constants in a p2pkh or multisig script pubkey
val scriptPubKeysWithIndex = transaction.outputs.map(_.scriptPubKey).zipWithIndex
updateP2PKOnly(scriptPubKeysWithIndex,transaction.txId)
}
/** Updates a bloom filter according to the rules specified by the [[BloomUpdateP2PKOnly]] flag
* See BIP37 for the exact rules on updating a bloom filter with this flag set
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#filter-matching-algorithm]]
* */
def updateP2PKOnly(scriptPubKeysWithIndex: Seq[(ScriptPubKey,Int)],txId: DoubleSha256Digest): BloomFilter = {
logger.debug("Updating bloom filter with " + BloomUpdateP2PKOnly)
logger.debug("ScriptPubKeys: " + scriptPubKeysWithIndex)
@tailrec
def loop(constantsWithIndex: Seq[(ScriptToken,Int)], accumFilter: BloomFilter): BloomFilter = constantsWithIndex match {
case h :: t if (accumFilter.contains(h._1.bytes)) =>
logger.debug("Found constant in bloom filter: " + h._1.hex)
val filter = accumFilter.insert(TransactionOutPoint(txId,UInt32(h._2)))
loop(constantsWithIndex.tail, filter)
case h :: t => loop(t,accumFilter)
case Nil => accumFilter
}
val p2pkOrMultiSigScriptPubKeys: Seq[(ScriptPubKey,Int)] = scriptPubKeysWithIndex.filter {
case (s,index) => s.isInstanceOf[P2PKScriptPubKey] ||
s.isInstanceOf[MultiSignatureScriptPubKey]
}
//gets rid of all asm operations in the scriptPubKey except for the constants
val scriptConstantsWithOutputIndex: Seq[(ScriptToken,Int)] = p2pkOrMultiSigScriptPubKeys.flatMap { case (scriptPubKey,index) =>
(scriptPubKey.asm.map(token => (token,index))).filter {
case (token,index) => token.isInstanceOf[ScriptConstant]
}
}
loop(scriptConstantsWithOutputIndex,this)
}
/**
* Performs the [[MurmurHash3]] on the given hash
*
* @param hashNum the nth hash function we are using
* @param bytes the bytes of the data that needs to be inserted into the [[BloomFilter]]
* @return the index of the bit inside of [[data]] that needs to be set to 1
*/
private def murmurHash(hashNum: Int, bytes: Seq[Byte]): Int = {
//TODO: The call of .toInt is probably the source of a bug here, need to come back and look at this
//since this isn't consensus critical though I'm leaving this for now
val seed = (hashNum * murmurConstant.underlying + tweak.underlying).toInt
val murmurHash = MurmurHash3.bytesHash(bytes.toArray, seed)
val uint32 = UInt32(BitcoinSUtil.encodeHex(murmurHash))
val modded = uint32.underlying % (filterSize.num.toInt * 8)
modded.toInt
}
/** See BIP37 to see where this number comes from
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#bloom-filter-format]] */
private def murmurConstant = UInt32("fba4c795")
/** Adds a sequence of byte vectors to our bloom filter then returns that new filter*/
def insertByteVectors(bytes: Seq[Seq[Byte]]): BloomFilter = {
@tailrec
def loop(remainingByteVectors: Seq[Seq[Byte]], accumBloomFilter: BloomFilter): BloomFilter = {
if (remainingByteVectors.isEmpty) accumBloomFilter
else loop(remainingByteVectors.tail,accumBloomFilter.insert(remainingByteVectors.head))
}
loop(bytes,this)
}
override def hex = RawBloomFilterSerializer.write(this)
}
object BloomFilter extends Factory[BloomFilter] {
private case class BloomFilterImpl(filterSize: CompactSizeUInt, data: Seq[Byte], hashFuncs : UInt32,
tweak: UInt32, flags: BloomFlag) extends BloomFilter
/** Max bloom filter size as per [[https://bitcoin.org/en/developer-reference#filterload]] */
val maxSize = UInt32(36000)
/** Max hashFunc size as per [[https://bitcoin.org/en/developer-reference#filterload]] */
val maxHashFuncs = UInt32(50)
/**
* Creates a bloom filter based on the number of elements to be inserted into the filter
* and the desired false positive rate
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#bloom-filter-format]]
* @param numElements
* @param falsePositiveRate
* @param tweak
* @param flags
* @return
*/
def apply(numElements: Int, falsePositiveRate: Double, tweak: UInt32, flags: BloomFlag): BloomFilter = {
import scala.math._
//m = number of bits in the array
//n = number of elements in the array
//from https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#bloom-filter-format
val optimalFilterSize : Double = (-1 / pow(log(2),2) * numElements * log(falsePositiveRate)) / 8
logger.debug("optimalFilterSize " + optimalFilterSize)
//BIP37 places limitations on the filter size, namely it cannot be > 36,000 bytes
val actualFilterSize: Int = max(1,min(optimalFilterSize, maxSize.underlying * 8)).toInt
logger.debug("actualFilterSize: " + actualFilterSize)
val optimalHashFuncs: Double = (actualFilterSize * 8 / numElements * log(2))
//BIP37 places a limit on the amount of hashFuncs we can use, which is 50
val actualHashFuncs: Int = max(1,min(optimalHashFuncs, maxHashFuncs.underlying)).toInt
val emptyByteArray = Seq.fill(actualFilterSize)(0.toByte)
BloomFilter(CompactSizeUInt(UInt64(actualFilterSize)), emptyByteArray, UInt32(actualHashFuncs), tweak, flags)
}
def apply(filterSize: CompactSizeUInt, data: Seq[Byte], hashFuncs: UInt32, tweak: UInt32, flags: BloomFlag): BloomFilter = {
BloomFilterImpl(filterSize, data, hashFuncs, tweak, flags)
}
override def fromBytes(bytes: Seq[Byte]): BloomFilter = RawBloomFilterSerializer.read(bytes)
}
|
bitcoin-s/bitcoin-s-spv-node
|
src/main/scala/org/bitcoins/spvnode/bloom/BloomFilter.scala
|
Scala
|
mit
| 13,556
|
package deebee
package sql
package ast
/**
* Trait for a node in a SQL abstract syntax tree.
*
* @author Hawk Weisman
*
* Created by hawk on 11/21/14.
*/
trait Node {
/**
* Re-emit the SQL statement(s) corresponding to this node
* @return
*/
def emitSQL: String
override def toString = emitSQL
}
|
hawkw/deebee
|
src/main/scala/deebee/sql/ast/Node.scala
|
Scala
|
mit
| 317
|
package org.jetbrains.plugins.scala
package codeInsight
package intention
package types
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.command.undo.UndoUtil
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParameterizedTypeElement, ScParenthesisedTypeElement, ScTypeArgs}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
/** Converts type element `@@[A, B]` to `(A @@ B)` */
class ConvertToInfixIntention extends PsiElementBaseIntentionAction {
def getFamilyName = "Use Infix Type Syntax"
override def getText = getFamilyName
def isAvailable(project: Project, editor: Editor, element: PsiElement) = {
element match {
case Parent(Both(ref: ScStableCodeReferenceElement, Parent(Parent(param: ScParameterizedTypeElement))))
//TODO: probably replace
if param.typeArgList.typeArgs.size == 2 && !ref.refName.inName.forall(_.isLetterOrDigit) => true
case _ => false
}
}
override def invoke(project: Project, editor: Editor, element: PsiElement) {
if (element == null || !element.isValid) return
val paramTypeElement: ScParameterizedTypeElement = PsiTreeUtil.getParentOfType(element, classOf[ScParameterizedTypeElement], false)
val Seq(targ1, targ2) = paramTypeElement.typeArgList.typeArgs
val needParens = paramTypeElement.getParent match {
case _: ScTypeArgs | _: ScParenthesisedTypeElement => false
case _ => true
}
val newTypeText = Seq(targ1, paramTypeElement.typeElement, targ2).map(_.getText).mkString(" ").parenthesisedIf(needParens)
val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText(newTypeText, element.getManager)
if (paramTypeElement.isValid) {
val replaced = try {
paramTypeElement.replace(newTypeElement)
} catch {
case npe: NullPointerException =>
throw new RuntimeException("Unable to replace: %s with %s".format(paramTypeElement, newTypeText), npe)
}
UndoUtil.markPsiFileForUndo(replaced.getContainingFile)
}
}
}
|
katejim/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertToInfixIntention.scala
|
Scala
|
apache-2.0
| 2,374
|
package io.github.loustler.abstractInternalControl
/**
* @author loustler
* @since 07/21/2017 21:03
*/
object ClientSimpleCode {
def containsNeg(nums: List[Int]): Boolean = {
for (num <- nums)
if (num < 0)
return true
false
}
def containsNeg2(nums: List[Int]) = nums.exists(_ < 0)
def containsOdd(nums: List[Int]): Boolean = {
for (num <- nums)
if (num % 2 == 1)
return true
false
}
def containsOdd2(nums: List[Int]) = nums.exists(_ % 2 == 1)
}
|
loustler/sKaLa
|
src/main/scala/io/github/loustler/abstractInternalControl/ClientSimpleCode.scala
|
Scala
|
apache-2.0
| 513
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.jdbc.connection
import java.sql.Driver
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
private[jdbc] class MariaDBConnectionProvider(driver: Driver, options: JDBCOptions)
extends SecureConnectionProvider(driver, options) {
override val appEntry: String = {
"Krb5ConnectorContext"
}
override def setAuthenticationConfigIfNeeded(): Unit = SecurityConfigurationLock.synchronized {
val (parent, configEntry) = getConfigWithAppEntry()
/**
* Couple of things to mention here (v2.5.4 client):
* 1. MariaDB doesn't support JAAS application name configuration
* 2. MariaDB sets a default JAAS config if "java.security.auth.login.config" is not set
*/
val entryUsesKeytab = configEntry != null &&
configEntry.exists(_.getOptions().get("useKeyTab") == "true")
if (configEntry == null || configEntry.isEmpty || !entryUsesKeytab) {
setAuthenticationConfig(parent)
}
}
}
private[sql] object MariaDBConnectionProvider {
val driverClass = "org.mariadb.jdbc.Driver"
}
|
dbtsai/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/MariaDBConnectionProvider.scala
|
Scala
|
apache-2.0
| 1,896
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.h2o.ui
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
import org.apache.spark.status.{ElementTrackingStore, LiveEntity}
/**
* Listener processing Sparkling Water events
*/
class AppStatusListener(conf: SparkConf, store: ElementTrackingStore, live: Boolean)
extends SparkListener
with Logging {
private def onSparklingWaterStart(event: H2OContextStartedEvent): Unit = {
val H2OContextStartedEvent(h2oClusterInfo, h2oBuildInfo, swProperties) = event
val now = System.nanoTime()
new SparklingWaterInfo(h2oClusterInfo, h2oBuildInfo, swProperties).write(store, now)
}
private def onSparklingWaterUpdate(event: SparklingWaterHeartbeatEvent): Unit = {
val SparklingWaterHeartbeatEvent(cloudHealthy, timeInMillis, memoryInfo) = event
val now = System.nanoTime()
new SparklingWaterUpdate(cloudHealthy, timeInMillis, memoryInfo).write(store, now)
}
override def onOtherEvent(event: SparkListenerEvent): Unit = event match {
case e: H2OContextStartedEvent => onSparklingWaterStart(e)
case e: SparklingWaterHeartbeatEvent => onSparklingWaterUpdate(e)
case _ => // ignore
}
private class SparklingWaterInfo(
h2oClusterInfo: H2OClusterInfo,
h2oBuildInfo: H2OBuildInfo,
swProperties: Array[(String, String, String)])
extends LiveEntity {
override protected def doUpdate(): Any = {
new SparklingWaterStartedInfo(h2oClusterInfo, h2oBuildInfo, swProperties)
}
}
private class SparklingWaterUpdate(cloudHealthy: Boolean, timeInMillis: Long, val memoryInfo: Array[(String, String)])
extends LiveEntity {
override protected def doUpdate(): Any = {
new SparklingWaterUpdateInfo(cloudHealthy, timeInMillis, memoryInfo)
}
}
}
|
h2oai/sparkling-water
|
core/src/main/scala_spark_others/org/apache/spark/h2o/ui/AppStatusListener.scala
|
Scala
|
apache-2.0
| 2,653
|
package w.tool
import scala.io.Source
import scala.util.Random
import java.lang.Boolean
import java.io.FileOutputStream
import java.io.BufferedWriter
import java.io.OutputStream
import java.io.OutputStreamWriter
import java.io.IOException
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import java.io.FileWriter
import java.nio.file.Files
import java.nio.file.Paths
import w.util.str.RandomStr
case class Obfuscation(
symbolFile: String,
macroFile: String,
mapFile: String) {
private def getSymbolList(): Seq[String] = {
println("get symbol list")
Source.fromFile(symbolFile, "utf-8").getLines.toList.filter { line =>
println(s"read line $line")
line != "" && !line.startsWith("#")
}
}
private def hasDuplicateElement[T](list: Seq[T]): Boolean = {
list.headOption.map { elem =>
for (otherElem <- list.tail) {
if (elem == otherElem) {
println("duplicated element! : ${elem)")
return true
}
}
hasDuplicateElement(list.tail)
}.getOrElse {
println("has no duplicated element")
false
}
}
def makeMacroFile: Seq[(String, String)] = {
val symbolMap = getSymbolList()
.map { symbol =>
val randomName = RandomStr.nextStr(5)
println(s"randomName = ${randomName}")
(symbol, randomName)
}
val randomNameList = symbolMap.map { case (symbol, randomName) => randomName }
if (!hasDuplicateElement[String](randomNameList)) {
val symbolMapText = symbolMap.foldLeft("") {
case (acc, (symbol, randomName)) =>
acc + s"${symbol} \t ${randomName} \n"
}
Future {
var buf: BufferedWriter = null
try {
buf = new BufferedWriter(new FileWriter(mapFile))
buf.write(symbolMapText)
} finally {
buf.close()
}
}
val symbolMacroText = symbolMap
.foldLeft("") {
case (acc, (symbol, randomName)) =>
val macroStr = s"""
#ifndef ${symbol}
#define ${symbol} ${randomName}
#endif
"""
acc + macroStr
}
Files.delete(Paths.get(macroFile))
Future {
var buf: BufferedWriter = null
try {
buf = new BufferedWriter(new FileWriter(macroFile))
buf.write(symbolMacroText)
} finally {
buf.close()
}
}
}
symbolMap.flatMap {
case (symbol, randomName) =>
Seq((symbol + ".h", randomName + ".h"), (symbol + ".m", randomName + ".m"))
}
}
}
|
wuv1982/ObjGuard
|
src/main/scala/w/tool/Obfuscation.scala
|
Scala
|
mit
| 2,354
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
import java.util.{ArrayDeque, ArrayList, Collection, Collections, HashMap, Iterator}
import java.util.Map.Entry
import kafka.utils.ShutdownableThread
import org.apache.kafka.clients.{ClientRequest, ClientResponse, NetworkClient, RequestCompletionHandler}
import org.apache.kafka.common.Node
import org.apache.kafka.common.internals.FatalExitError
import org.apache.kafka.common.requests.AbstractRequest
import org.apache.kafka.common.utils.Time
import scala.collection.JavaConverters._
/**
* Class for inter-broker send thread that utilize a non-blocking network client.
*/
abstract class InterBrokerSendThread(name: String,
networkClient: NetworkClient,
time: Time,
isInterruptible: Boolean = true)
extends ShutdownableThread(name, isInterruptible) {
def generateRequests(): Iterable[RequestAndCompletionHandler]
def unsentExpiryMs: Int
private val unsentRequests = new UnsentRequests
def hasUnsentRequests = unsentRequests.iterator().hasNext
override def shutdown(): Unit = {
initiateShutdown()
// wake up the thread in case it is blocked inside poll
networkClient.wakeup()
awaitShutdown()
}
override def doWork() {
var now = time.milliseconds()
generateRequests().foreach { request =>
val completionHandler = request.handler
unsentRequests.put(request.destination,
networkClient.newClientRequest(request.destination.idString, request.request, now, true, completionHandler))
}
try {
val timeout = sendRequests(now)
networkClient.poll(timeout, now)
now = time.milliseconds()
checkDisconnects(now)
failExpiredRequests(now)
unsentRequests.clean()
} catch {
case e: FatalExitError => throw e
case t: Throwable =>
error(s"unhandled exception caught in InterBrokerSendThread", t)
// rethrow any unhandled exceptions as FatalExitError so the JVM will be terminated
// as we will be in an unknown state with potentially some requests dropped and not
// being able to make progress. Known and expected Errors should have been appropriately
// dealt with already.
throw new FatalExitError()
}
}
private def sendRequests(now: Long): Long = {
var pollTimeout = Long.MaxValue
for (node <- unsentRequests.nodes.asScala) {
val requestIterator = unsentRequests.requestIterator(node)
while (requestIterator.hasNext) {
val request = requestIterator.next
if (networkClient.ready(node, now)) {
networkClient.send(request, now)
requestIterator.remove()
} else
pollTimeout = Math.min(pollTimeout, networkClient.connectionDelay(node, now))
}
}
pollTimeout
}
private def checkDisconnects(now: Long): Unit = {
// any disconnects affecting requests that have already been transmitted will be handled
// by NetworkClient, so we just need to check whether connections for any of the unsent
// requests have been disconnected; if they have, then we complete the corresponding future
// and set the disconnect flag in the ClientResponse
val iterator = unsentRequests.iterator()
while (iterator.hasNext) {
val entry = iterator.next
val (node, requests) = (entry.getKey, entry.getValue)
if (!requests.isEmpty && networkClient.connectionFailed(node)) {
iterator.remove()
for (request <- requests.asScala) {
if (networkClient.authenticationException(node) != null)
error(s"Failed to send the following request due to authentication error: $request")
completeWithDisconnect(request, now)
}
}
}
}
private def failExpiredRequests(now: Long): Unit = {
// clear all expired unsent requests
val expiredRequests = unsentRequests.removeExpiredRequests(now, unsentExpiryMs)
for (request <- expiredRequests.asScala) {
debug(s"Failed to send the following request after $unsentExpiryMs ms: $request")
completeWithDisconnect(request, now)
}
}
def completeWithDisconnect(request: ClientRequest, now: Long): Unit = {
val handler = request.callback
handler.onComplete(new ClientResponse(request.makeHeader(request.requestBuilder().latestAllowedVersion()),
handler, request.destination, now /* createdTimeMs */ , now /* receivedTimeMs */ , true /* disconnected */ ,
null /* versionMismatch */ , null /* responseBody */))
}
def wakeup(): Unit = networkClient.wakeup()
}
case class RequestAndCompletionHandler(destination: Node, request: AbstractRequest.Builder[_ <: AbstractRequest],
handler: RequestCompletionHandler)
private class UnsentRequests {
private val unsent = new HashMap[Node, ArrayDeque[ClientRequest]]
def put(node: Node, request: ClientRequest): Unit = {
var requests = unsent.get(node)
if (requests == null) {
requests = new ArrayDeque[ClientRequest]
unsent.put(node, requests)
}
requests.add(request)
}
def removeExpiredRequests(now: Long, unsentExpiryMs: Long): Collection[ClientRequest] = {
val expiredRequests = new ArrayList[ClientRequest]
for (requests <- unsent.values.asScala) {
val requestIterator = requests.iterator
var foundExpiredRequest = false
while (requestIterator.hasNext && !foundExpiredRequest) {
val request = requestIterator.next
if (request.createdTimeMs < now - unsentExpiryMs) {
expiredRequests.add(request)
requestIterator.remove()
foundExpiredRequest = true
}
}
}
expiredRequests
}
def clean(): Unit = {
val iterator = unsent.values.iterator
while (iterator.hasNext) {
val requests = iterator.next
if (requests.isEmpty)
iterator.remove()
}
}
def iterator(): Iterator[Entry[Node, ArrayDeque[ClientRequest]]] = {
unsent.entrySet().iterator()
}
def requestIterator(node: Node): Iterator[ClientRequest] = {
val requests = unsent.get(node)
if (requests == null)
Collections.emptyIterator[ClientRequest]
else
requests.iterator
}
def nodes = unsent.keySet
}
|
sebadiaz/kafka
|
core/src/main/scala/kafka/common/InterBrokerSendThread.scala
|
Scala
|
apache-2.0
| 7,083
|
package io.iohk.ethereum.network.handshaker
import io.iohk.ethereum.network.EtcPeerManagerActor.{PeerInfo, RemoteStatus}
import io.iohk.ethereum.network.ForkResolver
import io.iohk.ethereum.network.handshaker.Handshaker.NextMessage
import io.iohk.ethereum.network.p2p.messages.PV62.{BlockHeaders, GetBlockHeaders}
import io.iohk.ethereum.network.p2p.messages.WireProtocol.Disconnect
import io.iohk.ethereum.network.p2p.{Message, MessageSerializable}
import io.iohk.ethereum.utils.Logger
case class EtcForkBlockExchangeState(
handshakerConfiguration: EtcHandshakerConfiguration,
forkResolver: ForkResolver,
remoteStatus: RemoteStatus
) extends InProgressState[PeerInfo]
with Logger {
import handshakerConfiguration._
def nextMessage: NextMessage =
NextMessage(
messageToSend = GetBlockHeaders(Left(forkResolver.forkBlockNumber), maxHeaders = 1, skip = 0, reverse = false),
timeout = peerConfiguration.waitForChainCheckTimeout
)
def applyResponseMessage: PartialFunction[Message, HandshakerState[PeerInfo]] = { case BlockHeaders(blockHeaders) =>
val forkBlockHeaderOpt = blockHeaders.find(_.number == forkResolver.forkBlockNumber)
forkBlockHeaderOpt match {
case Some(forkBlockHeader) =>
val fork = forkResolver.recognizeFork(forkBlockHeader)
log.debug("Peer is running the {} fork", fork)
if (forkResolver.isAccepted(fork)) {
log.debug("Fork is accepted")
//setting maxBlockNumber to 0, as we do not know best block number yet
ConnectedState(PeerInfo.withForkAccepted(remoteStatus))
} else {
log.debug("Fork is not accepted")
DisconnectedState[PeerInfo](Disconnect.Reasons.UselessPeer)
}
case None =>
log.debug("Peer did not respond with fork block header")
ConnectedState(PeerInfo.withNotForkAccepted(remoteStatus))
}
}
override def respondToRequest(receivedMessage: Message): Option[MessageSerializable] = receivedMessage match {
case GetBlockHeaders(Left(number), numHeaders, _, _) if number == forkResolver.forkBlockNumber && numHeaders == 1 =>
log.debug("Received request for fork block")
blockchain.getBlockHeaderByNumber(number) match {
case Some(header) => Some(BlockHeaders(Seq(header)))
case None => Some(BlockHeaders(Nil))
}
case _ => None
}
def processTimeout: HandshakerState[PeerInfo] =
DisconnectedState(Disconnect.Reasons.TimeoutOnReceivingAMessage)
}
|
input-output-hk/etc-client
|
src/main/scala/io/iohk/ethereum/network/handshaker/EtcForkBlockExchangeState.scala
|
Scala
|
mit
| 2,509
|
package org.rebeam.boxes.graph
import java.awt.Image
import org.rebeam.boxes.swing.ImageThirds
//Draw a horizontal component made up of a left, middle and right portion. Portions are
//taken from the thirds of an image, and middle is stretched horizontally to fit.
class GraphThreePartPainter(image: Image) {
val thirds = ImageThirds.horizontalImageThirds(image)
def paint(canvas: GraphCanvas, p: Vec2, s: Vec2) {
val middle = s.x - thirds.pieceWidth * 2
canvas.image(thirds.parts._1, p)
canvas.image(thirds.parts._3, p + Vec2(s.x - thirds.pieceWidth))
if (middle > 0) {
canvas.image(thirds.parts._2, p + Vec2(thirds.pieceWidth, 0), Vec2(middle, thirds.pieceHeight))
}
}
}
class GraphThreePartPainterVertical(image:Image) {
val thirds = ImageThirds.verticalImageThirds(image)
def paint(canvas: GraphCanvas, p: Vec2, s: Vec2) {
val middle = s.y - thirds.pieceHeight * 2
canvas.image(thirds.parts._1, p)
canvas.image(thirds.parts._3, p + Vec2(0, s.y - thirds.pieceHeight))
if (middle > 0) {
canvas.image(thirds.parts._2, p + Vec2(0, thirds.pieceHeight), Vec2(thirds.pieceWidth, middle))
}
}
}
|
trepidacious/boxes-graph
|
src/main/scala/org/rebeam/boxes/graph/GraphThreePartPainter.scala
|
Scala
|
gpl-2.0
| 1,159
|
package be.objectify.deadbolt.scala.test.controllers.composed
import be.objectify.deadbolt.scala.test.controllers.{AbstractControllerSpec, CompositionBased}
import play.api.test.WithServer
/**
* @author Steve Chaloner (steve@objectify.be)
*/
class CompositeSpec extends AbstractControllerSpec with CompositionBased {
"The application" should {
"deny access if" >> {
"a subject is present but does not meet the pattern constraint" in new WithServer(app = testApp, port = 3333) {
await(ws(implicitApp).url(s"http://localhost:3333/$pathSegment/composite/subjectDoesNotHavePermission")
.addHttpHeaders(("x-deadbolt-test-user", "steve"))
.get()).status must equalTo(UNAUTHORIZED)
}
"the required role is held but the dynamic check fails" in new WithServer(app = testApp, port = 3333) {
await(ws(implicitApp).url(s"http://localhost:3333/$pathSegment/composite/roleButNotDynamic")
.addHttpHeaders(("x-deadbolt-test-user", "trippel"))
.get()).status must equalTo(UNAUTHORIZED)
}
"the dynamic check passes but the required role is not help" in new WithServer(app = testApp, port = 3333) {
await(ws(implicitApp).url(s"http://localhost:3333/$pathSegment/composite/noRoleButPassesDynamic")
.addHttpHeaders(("x-deadbolt-test-user", "lotte"))
.get()).status must equalTo(UNAUTHORIZED)
}
}
"allow access if" >> {
"a subject is present and meets the pattern constraint" in new WithServer(app = testApp, port = 3333) {
await(ws(implicitApp).url(s"http://localhost:3333/$pathSegment/composite/subjectDoesNotHavePermission")
.addHttpHeaders(("x-deadbolt-test-user", "greet"))
.get()).status must equalTo(OK)
}
"a subject is not present" in new WithServer(app = testApp, port = 3333) {
await(ws(implicitApp).url(s"http://localhost:3333/$pathSegment/composite/subjectDoesNotHavePermission")
.get()).status must equalTo(OK)
}
"the required role is held and the dynamic check passes" in new WithServer(app = testApp, port = 3333) {
await(ws(implicitApp).url(s"http://localhost:3333/$pathSegment/composite/hasRoleAndPassesDynamic")
.addHttpHeaders(("x-deadbolt-test-user", "trippel"))
.get()).status must equalTo(OK)
}
}
}
}
|
schaloner/deadbolt-2-scala
|
test-app/test/be/objectify/deadbolt/scala/test/controllers/composed/CompositeSpec.scala
|
Scala
|
apache-2.0
| 2,407
|
package net.chwthewke.passman
package cli
import cats.instances.string._
import cats.syntax.either._
import cats.syntax.eq._
import java.nio.file.Path
import mouse.boolean._
import scala.xml.XML
import data.legacy._
import engine._
import model._
object PasswordEngine {
val engine = PasswordDerivationEngine.Default
def interactive(key: String, variant: Int, id: PasswordGeneratorId)(masterPassword: String): Either[String, String] =
for {
e <- engine.leftMap(_.message)
p <- e.derive1(PasswordRequest(key, masterPassword, variant), id)
.leftMap(_.message)
} yield p
def withFile(passwordFile: Path, key: String)(masterPassword: String): Either[String, String] = {
for {
e <- engine.leftMap(_.message)
passwords <- readPasswords(passwordFile)
password <- passwords.passwords.find(_.key === key).toRight(s"Key not found $key.")
request = PasswordRequest(key, masterPassword, password.variant)
id = password.generatorId
derived <- e.derive1(request, id).leftMap(_.message)
_ <- e.check(derived, id, password.hash).either("Incorrect master password.", ())
} yield derived
}
def readPasswords(f: Path): Either[String, Passwords] = {
Either
.catchNonFatal(XML.loadFile(f.toFile))
.leftMap(_.getMessage)
.flatMap(elem => LegacyXml(elem).describe.toEither)
}
}
|
chwthewke/passman
|
passman-cli/src/main/scala/net/chwthewke/passman/cli/PasswordEngine.scala
|
Scala
|
bsd-3-clause
| 1,400
|
/*
* Copyright 2017 WeightWatchers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.weightwatchers.reactive.kinesis.consumer
import java.util.concurrent.atomic.AtomicBoolean
import akka.actor.{ActorContext, ActorRef, ActorRefFactory, ActorSystem, Props}
import akka.util.Timeout
import com.amazonaws.services.kinesis.clientlibrary.config.KinesisClientLibConfigurator
import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.{
IRecordProcessor,
IRecordProcessorFactory
}
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{
KinesisClientLibConfiguration,
Worker
}
import com.typesafe.config.{Config, ConfigValueFactory}
import com.typesafe.scalalogging.LazyLogging
import com.weightwatchers.reactive.kinesis.consumer.CheckpointWorker.CheckpointerConf
import com.weightwatchers.reactive.kinesis.consumer.ConsumerWorker.{ConsumerWorkerConf, _}
import com.weightwatchers.reactive.kinesis.consumer.KinesisConsumer.ConsumerConf
import com.weightwatchers.reactive.kinesis.utils.TypesafeConfigExtensions
import scala.concurrent.Future
import scala.concurrent.duration.DurationDouble
import scala.util.{Failure, Success, Try}
object KinesisConsumer {
val KCL_CONFIGURATOR = new KinesisClientLibConfigurator()
/**
* CompanionObject object for the [[ConsumerConf]].
*/
object ConsumerConf {
/**
* Given the `kinesisConfig`, builds a combined configuration by taking the `consumerName` specific configuration
* within, and using the `default-consumer` configuration as a fallback for all values.
*
* @param kinesisConfig The top level Kinesis Configuration, containing the specified consumer.
* @param consumerName The name of the consumer, which MUST be contained within the `kinesisConfig`
* @return A [[ConsumerConf]] case class used for constructing the [[KinesisConsumer]]
*/
def apply(kinesisConfig: Config, consumerName: String): ConsumerConf = {
import com.weightwatchers.reactive.kinesis.utils.TypesafeConfigExtensions._
val consumerConfig = kinesisConfig
.getConfig(consumerName)
.withFallback(kinesisConfig.getConfig("default-consumer"))
val streamName = consumerConfig.getString("stream-name")
require(!streamName.isEmpty,
"A valid stream name must be provided to start the Kinesis Producer")
//This represents the table name in dynamo - which MUST be unique per application AND stream
val applicationName = s"${kinesisConfig.getString("application-name")}-$streamName"
require(!applicationName.isEmpty,
"A valid application name must be provided to start the Kinesis Producer")
val dispatcher: Option[String] =
if (consumerConfig.getIsNull("akka.dispatcher"))
None
else {
val dispatcherProp = consumerConfig.getString("akka.dispatcher")
if (dispatcherProp.isEmpty)
None
else
Some(dispatcherProp)
}
// Load and modify the KCL config, adding the required properties.
// Whilst a little hacky, it keeps things consistent with the producer and makes the config structure more manageable.
val kclConfig = consumerConfig
.getConfig("kcl")
.withValue("streamName", ConfigValueFactory.fromAnyRef(streamName))
.withValue("applicationName", ConfigValueFactory.fromAnyRef(applicationName))
ConsumerConf(
KCL_CONFIGURATOR
.getConfiguration(kclConfig.toProperties), //Convert to java properties to make use of the AWS library
ConsumerWorkerConf(consumerConfig),
CheckpointerConf(consumerConfig),
dispatcher
)
}
}
/**
* The collection of configuration values required for constructing a consumer.
*
* @param kclConfiguration the AWS KCL Configuration.
* @param workerConf the configuration for the worker
* @param checkpointerConf the configuration for the checkpointer
* @param dispatcher an optional dispatcher to be used by this consumer
*/
final case class ConsumerConf(kclConfiguration: KinesisClientLibConfiguration,
workerConf: ConsumerWorkerConf,
checkpointerConf: CheckpointerConf,
dispatcher: Option[String] = None)
/**
* Creates an instance of the [[KinesisConsumer]] along with a ConsumerWorker
* which will be shared among shards.
*
* @param consumerConf The consumer specific configuration, containing all configuration required for this consumer instance.
* @param eventProcessor see ConsumerWorker.
*/
def apply(consumerConf: ConsumerConf,
eventProcessor: ActorRef,
context: ActorContext): KinesisConsumer = {
val workerProps = ConsumerWorker.props(eventProcessor,
consumerConf.workerConf,
consumerConf.checkpointerConf,
consumerConf.dispatcher)
//Specify the dispatcher according to the config
new KinesisConsumer(consumerConf,
consumerConf.dispatcher.fold(workerProps)(workerProps.withDispatcher),
context.system,
context)
}
/**
* Creates an instance of the [[KinesisConsumer]] along with a ConsumerWorker
* which will be shared among shards.
* - The eventProcessor MUST handle
* [[com.weightwatchers.reactive.kinesis.consumer.ConsumerWorker.ProcessEvent]] messages (for each message)
* - The eventProcesser MUST respond with [[com.weightwatchers.reactive.kinesis.consumer.ConsumerWorker.EventProcessed]] after
* processing of the [[com.weightwatchers.reactive.kinesis.consumer.ConsumerWorker.ProcessEvent]]
* - The eventProcessor may set `successful` to false to indicate the message can be skipped
* - The eventProcesser SHOULD handle [[com.weightwatchers.reactive.kinesis.consumer.ConsumerWorker.ConsumerWorkerFailure]]
* messages which signal a critical failure in the Consumer.
* - The eventProcessor SHOULD handle [[com.weightwatchers.reactive.kinesis.consumer.ConsumerWorker.ConsumerShutdown]]
* messages which siganl a graceful shutdown of the Consumer.
*
* @param consumerConf The consumer specific configuration, containing all configuration required for this consumer instance.
* @param eventProcessor see ConsumerWorker for more information.
*/
def apply(consumerConf: ConsumerConf,
eventProcessor: ActorRef,
system: ActorSystem): KinesisConsumer = {
val workerProps = ConsumerWorker.props(eventProcessor,
consumerConf.workerConf,
consumerConf.checkpointerConf,
consumerConf.dispatcher)
//Specify the dispatcher according to the config
new KinesisConsumer(consumerConf,
consumerConf.dispatcher.fold(workerProps)(workerProps.withDispatcher),
system,
system)
}
}
/**
* A Kinesis consumer which wraps Amazon's KCL and performs reliable asynchronous checkpointing.
*
* NOTE: This should be created via the companion object which will create the worker.
*
* @param consumerConf The consumer specific configuration, containing all configuration required
* for this consumer instance.
* @param consumerWorkerProps the worker props for processing requests and handling all checkpointing.
* @param system This is required to lookup the dispatchers and create the actors.
* We specifically need a system for this purpose.
* @param context This will be used to create the actor hierarchy.
* So all Actors created will be children/grandchildren of this context.
* This can be the same value as the `system` but we don't want to force the user
* into using an ActorSystem vs ActorContext.
*
*/
class KinesisConsumer(consumerConf: ConsumerConf,
consumerWorkerProps: Props,
system: ActorSystem,
context: ActorRefFactory)
extends ConsumerService
with LazyLogging {
//The manager timeout needs to be just longer than the batch timeout * retries
val managerBatchTimeout = Timeout(
(consumerConf.workerConf.batchTimeout.toMillis
* (consumerConf.workerConf.failedMessageRetries + 1.25)).millis
)
val isShuttingDown = new AtomicBoolean(false)
implicit val dispatcher =
consumerConf.dispatcher.fold(system.dispatcher)(system.dispatchers.lookup)
private[consumer] val recordProcessorFactory: IRecordProcessorFactory =
new IRecordProcessorFactory {
/**
* Creates an instance of [[ConsumerProcessingManager]].
* Passing a newly created ConsumerWorker Actor specific to this shard (and manager).
*/
override def createProcessor(): IRecordProcessor = {
logger.debug(s"Creating ConsumerWorker: ${consumerWorkerProps.args}")
//TODO define supervisor
//multiply timeout to ensure worker always times out first
new ConsumerProcessingManager(
context.actorOf(consumerWorkerProps, s"consumer-worker-${UUID_GENERATOR.generate()}"),
kclWorker,
managerBatchTimeout,
consumerConf.workerConf.shutdownTimeout.duration
)
}
}
lazy val kclWorker: Worker = new Worker.Builder()
.recordProcessorFactory(recordProcessorFactory)
.config(consumerConf.kclConfiguration)
.build()
// Ensure the JVM will refresh the cached IP values of AWS resources (e.g. service endpoints).
java.security.Security.setProperty("networkaddress.cache.ttl", "60")
/**
* The Future returned is long running, completion of the future indicates we're no
* longer processing messages and should be handled accordingly by the callee.
*/
def start(): Future[Unit] = {
logger.info(s"""
|-----------------------------------------------------------------------
|-------------------- Initialising Kinesis Consumer --------------------
|-----------------------------------------------------------------------
|***** Running ${consumerConf.kclConfiguration.getApplicationName} *****
|***** Processing Stream: ${consumerConf.kclConfiguration.getStreamName} *****
|***** WorkerId: ${consumerConf.kclConfiguration.getWorkerIdentifier} *****
|-----------------------------------------------------------------------
""".stripMargin)
if (consumerConf.workerConf.shutdownHook) {
// Adding the shutdown hook for shutting down consumer
sys.addShutdownHook({
stop()
})
}
Future {
kclWorker.run()
logger.info(
s"*** Kinesis consumer for Stream ${consumerConf.kclConfiguration.getStreamName} completed ***"
)
} recoverWith {
case t: Throwable =>
logger.error(
s"*** Caught throwable while processing data from Kinesis Stream: ${consumerConf.kclConfiguration.getStreamName} ***",
t
)
Future.failed(t)
}
}
/**
* Gracefully Shutdown this Consumer.
*/
def stop(): Unit = {
val canShutdown = isShuttingDown.compareAndSet(false, true)
if (canShutdown) {
logger.info(
s"*** Shutting down Kinesis Consumer for Stream ${consumerConf.kclConfiguration.getStreamName} ***"
)
val shutdownTimeoutLength = consumerConf.workerConf.shutdownTimeout.duration.length
val shutdownTimeoutUnit = consumerConf.workerConf.shutdownTimeout.duration.unit
Try {
kclWorker
.startGracefulShutdown()
.get(shutdownTimeoutLength, shutdownTimeoutUnit)
} match {
case Success(_) =>
logger.info(
s"*** Shutdown for Kinesis Consumer for Stream ${consumerConf.kclConfiguration.getStreamName} completed ***"
)
case Failure(ex) =>
logger.error(
s"*** Shutdown failed for Kinesis Consumer for Stream ${consumerConf.kclConfiguration.getStreamName} ***",
ex
)
}
} else {
logger.warn(
s"*** Shutdown attempted twice for Kinesis Consumer for Stream ${consumerConf.kclConfiguration.getStreamName} ***"
)
}
}
}
|
WW-Digital/reactive-kinesis
|
src/main/scala/com/weightwatchers/reactive/kinesis/consumer/KinesisConsumer.scala
|
Scala
|
apache-2.0
| 13,133
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.collection
package object generic {
@deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0")
type Clearable = scala.collection.mutable.Clearable
@deprecated("Use scala.collection.BuildFrom instead", "2.13.0")
type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C]
@deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0")
type Growable[-A] = scala.collection.mutable.Growable[A]
@deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0")
type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A]
@deprecated("Use IsIterable instead", "2.13.0")
type IsTraversableLike[Repr] = IsIterable[Repr]
@deprecated("Use IsIterableOnce instead", "2.13.0")
type IsTraversableOnce[Repr] = IsIterableOnce[Repr]
}
|
scala/scala
|
src/library/scala/collection/generic/package.scala
|
Scala
|
apache-2.0
| 1,154
|
///////////////////////////////////////////////////////////////////////////////
// Hadoop.scala
//
// Copyright (C) 2011, 2012 Ben Wing, The University of Texas at Austin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////////
package opennlp.fieldspring.geolocate
import collection.JavaConversions._
import org.apache.hadoop.io._
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.fs.Path
import opennlp.fieldspring.util.argparser._
import opennlp.fieldspring.util.distances._
import opennlp.fieldspring.util.experiment.ExperimentMeteredTask
import opennlp.fieldspring.util.hadoop._
import opennlp.fieldspring.util.ioutil.FileHandler
import opennlp.fieldspring.util.mathutil.{mean, median}
import opennlp.fieldspring.util.printutil.{errprint, warning}
import opennlp.fieldspring.gridlocate.{CellGridEvaluator,FieldspringInfo,DistDocumentFileProcessor}
/* Basic idea for hooking up Geolocate with Hadoop. Hadoop works in terms
of key-value pairs, as follows:
(1) A preprocessor generates key-value pairs, which are passed to hadoop.
Note that typically at this stage what's passed to Hadoop is not
in the form of a key-value pair but just some sort of item, e.g. a
line of text. This typically becomes the value of the key-value pair,
while something that most programs ignore becomes the key (e.g. the
item count of the item that was seen). Note that these keys and values,
as for all data passed around by Hadoop, is typed, and the type is
under the programmer's control. Hence, although the value is commonly
text, it may not be.
(2) Hadoop creates a number of mappers and partitions the input items in
some way, passing some fraction of the input items to each mapper.
(3) Each mapper iterates over its input items, and for each in turn,
generates a possibly-empty set of key-value output items. Note that
the types of the output keys and values may be totally different from
those of the input keys and values. The output key has much more
significance than the input key.
(5) A "shuffle" step happens internally, where all output items are
grouped according to their keys, and the keys are further sorted.
(Or equivalently, the entire set of output items is sorted on their
keys, which will naturally place identical keys next to each other
as well as ensuring that non-identical keys are sorted, and then
sets of items with identical keys are transformed into single items
with the same key and a value consisting of a list of all of items
grouped together.) What actually happens is that the items are
sorted and grouped at the end of the map stage on the map node,
*before* being sent over the network; the overall "shuffle" then
simply involves merging.
(4.5) To reduce the amount of data sent over the network, a combiner can
be defined, which runs on the map node after sorting but before
sending the data over. This is optional, and if it exists, does
a preliminary reduce. Depending on the task in question, this may
be exactly the same as the reducer. For example, if the reducer
simply adds up all of the items passed to it, the same function
can be used as a combiner, since a set of number can be added up
all at once or in parts. (An example of where this can't be done
is when the reducer needs to find the median of a set of items.
Computing the median involves selecting one of the items of a
set rather than mashing them all together, and which item is to
be selected cannot be known until the entire set is seen. Given
a subset, the median could be any value in the subset; hence the
entire subset must be sent along, and cannot in general be
"combined" in any way.)
(6) A set of reducers are created, and the resulting grouped items are
partitioned based on their keys, with each reducer receiving one
sorted partition, i.e. a list of all the items whose keys were
assigned to that reducer, in sorted order, where the value of each
item (remember, items are key-value pairs) is a list of items
(all values associated with the same key in the items output by
the mapper). Each key is seen only once (assuming no crashes/restarts),
and only on a single reducer. The reducer then outputs its own
output pairs, typically by "reducing" the value list of each key
into a single item.
(7) A post-processor might take these final output items and do something
with them.
Note about types:
In general:
MAP STAGE:
Mapper input is of type A -> B
Mapper output is of type C -> D
Hence map() is of type (A -> B) -> Iterable[(C -> D)]
Often types B and C are identical or related.
COMBINE STAGE:
The combiner is strictly an optimization, and the program must work
correctly regardless of whether the combiner is run or not -- or, for
that matter, if run multiple times. This means that the input and
output types of the combiner must be the same, and in most cases
the combiner must be idempotent (i.e. if its input is a previous
output, it should output is input unchanged; in other words, it
does nothing if run multiple times on the same input).
Combiner input is of type C -> Iterable[D]
Combiner output is of type C -> Iterable[D]
Hence combine() is of type (C -> Iterable[D]) -> (C -> Iterable[D])
(The output of the cominber is grouped, just like its input from the
map output.)
REDUCE STAGE:
Reducer input is of type C -> Iterable[D]
Reducer output is of type E -> F
Hence reduce() is of type (C -> Iterable[D]) -> (E -> F)
In our case, we assume that the mappers() do the real work and the
reducers just collect the stats and combine them. We can break a
big job in two ways: Either by partitioning the set of test documents
and having each mapper do a full evaluation on a limited number of
test documents, or by partitioning the grid and have each mapper
compare all test documents against a portion of the grid. A third
possibility is to combine both, where a mapper does a portion of
the test documents against a portion of the grid.
OUR IMPLEMENTATION:
Input values to map() are tuples (strategy, document). Output items
are have key = (cellgrid-details, strategy), value = result for
particular document (includes various items, including document,
predicted cell, true rank, various distances). No combiner, since
we have to compute a median, meaning we need all values. Reducer
computes mean/median for all values for a given cellgrid/strategy.
NOTE: For identifying a particular cell, we use indices, since we
can't pass pointers. For KD trees and such, we conceivably might have
to pass in to the reducer some complex details identifying the
cell grid parameters. If so, this probably would get passed first
to all reducers using the trick of creating a custom partitioner
that ensures the reducer gets this info first.
*/
/************************************************************************/
/* General Hadoop code for Geolocate app */
/************************************************************************/
abstract class HadoopGeolocateApp(
progname: String
) extends GeolocateApp(progname) with HadoopTextDBApp {
override type TDriver <: HadoopGeolocateDriver
def corpus_suffix =
driver.params.eval_set + "-" + driver.document_file_suffix
def corpus_dirs = params.input_corpus
override def initialize_hadoop_input(job: Job) {
super.initialize_hadoop_input(job)
FileOutputFormat.setOutputPath(job, new Path(params.outfile))
}
}
trait HadoopGeolocateParameters extends GeolocateParameters {
var fieldspring_dir =
ap.option[String]("fieldspring-dir",
help = """Directory to use in place of FIELDSPRING_DIR environment
variable (e.g. in Hadoop).""")
var outfile =
ap.positional[String]("outfile",
help = """File to store evaluation results in.""")
}
/**
* Base mix-in for a Geolocate application using Hadoop.
*
* @see HadoopGeolocateDriver
*/
trait HadoopGeolocateDriver extends
GeolocateDriver with HadoopExperimentDriver {
override type TParam <: HadoopGeolocateParameters
override def handle_parameters() {
super.handle_parameters()
need(params.fieldspring_dir, "fieldspring-dir")
FieldspringInfo.set_fieldspring_dir(params.fieldspring_dir)
}
}
/************************************************************************/
/* Hadoop implementation of geolocate-document */
/************************************************************************/
class DocumentEvaluationMapper extends
Mapper[Object, Text, Text, DoubleWritable] with
HadoopExperimentMapReducer {
def progname = HadoopGeolocateDocumentApp.progname
type TContext = Mapper[Object, Text, Text, DoubleWritable]#Context
type TDriver = HadoopGeolocateDocumentDriver
// more type erasure crap
def create_param_object(ap: ArgParser) = new TParam(ap)
def create_driver() = new TDriver
var evaluators: Iterable[CellGridEvaluator[SphereCoord,SphereDocument,_,_,_]] = _
val task = new ExperimentMeteredTask(driver, "document", "evaluating")
class HadoopDocumentFileProcessor(
context: TContext
) extends DistDocumentFileProcessor(
driver.params.eval_set + "-" + driver.document_file_suffix, driver
) {
override def get_shortfile =
filename_to_counter_name(driver.get_file_handler,
driver.get_configuration.get("mapred.input.dir"))
/* #### FIXME!!! Need to redo things so that different splits are
separated into different files. */
def handle_document(fieldvals: Seq[String]) = {
val table = driver.document_table
val doc = table.create_and_init_document(schema, fieldvals, false)
val retval = if (doc != null) {
doc.dist.finish_after_global()
var skipped = 0
var not_skipped = 0
for (e <- evaluators) {
val num_processed = task.num_processed
val doctag = "#%d" format (1 + num_processed)
if (e.would_skip_document(doc, doctag)) {
skipped += 1
errprint("Skipped document %s because evaluator would skip it",
doc)
} else {
not_skipped += 1
// Don't put side-effecting code inside of an assert!
val result =
e.evaluate_document(doc, doctag)
assert(result != null)
context.write(new Text(e.stratname),
new DoubleWritable(result.asInstanceOf[SphereDocumentEvaluationResult].pred_truedist))
task.item_processed()
}
context.progress
}
if (skipped > 0 && not_skipped > 0)
warning("""Something strange: %s evaluator(s) skipped document, but %s evaluator(s)
didn't skip. Usually all or none should skip.""", skipped, not_skipped)
(not_skipped > 0)
} else false
context.progress
(retval, true)
}
def process_lines(lines: Iterator[String],
filehand: FileHandler, file: String,
compression: String, realname: String) =
throw new IllegalStateException(
"process_lines should never be called here")
}
var processor: HadoopDocumentFileProcessor = _
override def init(context: TContext) {
super.init(context)
if (driver.params.eval_format != "internal")
driver.params.parser.error(
"For Hadoop, '--eval-format' must be 'internal'")
else {
evaluators =
for ((stratname, strategy) <- driver.strategies)
yield driver.create_document_evaluator(strategy, stratname).
asInstanceOf[CellGridEvaluator[
SphereCoord,SphereDocument,_,_,_]]
if (driver.params.input_corpus.length != 1) {
driver.params.parser.error(
"FIXME: For Hadoop, currently need exactly one corpus")
} else {
processor = new HadoopDocumentFileProcessor(context)
processor.read_schema_from_textdb(driver.get_file_handler,
driver.params.input_corpus(0))
context.progress
}
}
}
override def setup(context: TContext) { init(context) }
override def map(key: Object, value: Text, context: TContext) {
processor.parse_row(value.toString)
context.progress
}
}
class DocumentResultReducer extends
Reducer[Text, DoubleWritable, Text, DoubleWritable] {
type TContext = Reducer[Text, DoubleWritable, Text, DoubleWritable]#Context
var driver: HadoopGeolocateDocumentDriver = _
override def setup(context: TContext) {
driver = new HadoopGeolocateDocumentDriver
driver.set_task_context(context)
}
override def reduce(key: Text, values: java.lang.Iterable[DoubleWritable],
context: TContext) {
val errordists = (for (v <- values) yield v.get).toSeq
val mean_dist = mean(errordists)
val median_dist = median(errordists)
context.write(new Text(key.toString + " mean"), new DoubleWritable(mean_dist))
context.write(new Text(key.toString + " median"), new DoubleWritable(median_dist))
}
}
class HadoopGeolocateDocumentParameters(
parser: ArgParser = null
) extends GeolocateDocumentParameters(parser) with HadoopGeolocateParameters {
}
/**
* Class for running the geolocate-document app using Hadoop.
*/
class HadoopGeolocateDocumentDriver extends
GeolocateDocumentTypeDriver with HadoopGeolocateDriver {
override type TParam = HadoopGeolocateDocumentParameters
}
object HadoopGeolocateDocumentApp extends
HadoopGeolocateApp("Fieldspring geolocate-document") {
type TDriver = HadoopGeolocateDocumentDriver
// FUCKING TYPE ERASURE
def create_param_object(ap: ArgParser) = new TParam(ap)
def create_driver() = new TDriver()
def initialize_hadoop_classes(job: Job) {
job.setJarByClass(classOf[DocumentEvaluationMapper])
job.setMapperClass(classOf[DocumentEvaluationMapper])
job.setReducerClass(classOf[DocumentResultReducer])
job.setOutputKeyClass(classOf[Text])
job.setOutputValueClass(classOf[DoubleWritable])
}
}
// Old code. Probably won't ever be needed. If we feel the need to move
// to more complex types when serializing, we should switch to Avro rather
// than reinventing the wheel.
// /**
// * Hadoop has a standard Writable class but it isn't so good for us, since
// * it assumes its read method
// */
// trait HadoopGeolocateWritable[T] {
// def write(out: DataOutput): Unit
// def read(in: DataInput): T
// }
//
// /**
// * Class for writing out in a format suitable for Hadoop. Implements
// Hadoop's Writable interface. Because the
// */
//
// abstract class RecordWritable() extends WritableComparable[RecordWritable] {
// }
/*
abstract class ObjectConverter {
type Type
type TWritable <: Writable
def makeWritable(): TWritable
def toWritable(obj: Type, w: TWritable)
def fromWritable(w: TWritable): obj
}
object IntConverter {
type Type = Int
type TWritable = IntWritable
def makeWritable() = new IntWritable
def toWritable(obj: Int, w: IntWritable) { w.set(obj) }
def fromWritable(w: TWritable) = w.get
}
abstract class RecordWritable(
fieldtypes: Seq[Class]
) extends WritableComparable[RecordWritable] {
type Type
var obj: Type = _
var obj_set: Boolean = false
def set(xobj: Type) {
obj = xobj
obj_set = true
}
def get() = {
assert(obj_set)
obj
}
def write(out: DataOutput) {}
def readFields(in: DataInput) {}
val writables = new Array[Writable](fieldtypes.length)
}
object SphereDocumentConverter extends RecordWriterConverter {
type Type = SphereDocument
def serialize(doc: SphereDocument) = doc.title
def deserialize(title: String) = FIXME
def init() {
RecordWriterConverter.register_converter(SphereDocument, this)
}
}
class DocumentEvaluationResultWritable extends RecordWritable {
type Type = DocumentEvaluationResult
def to_properties(obj: Type) =
Seq(obj.document, obj.pred_cell, obj.true_rank,
obj.true_cell, obj.num_docs_in_true_cell,
obj.true_center, obj.true_truedist, obj.true_degdist,
obj.pred_center, obj.pred_truedist, obj.pred_degdist)
def from_properties(props: Seq[Any]) = {
val Seq(document, pred_cell, true_rank,
true_cell, num_docs_in_true_cell,
true_center, true_truedist, true_degdist,
pred_center, pred_truedist, pred_degdist) = props
new HadoopDocumentEvaluationResult(
document.asInstanceOf[SphereDocument],
pred_cell.asInstanceOf[GeoCell],
true_rank.asInstanceOf[Int],
true_cell.asInstanceOf[GeoCell],
num_docs_in_true_cell.asInstanceOf[Int],
true_center.asInstanceOf[SphereCoord],
true_truedist.asInstanceOf[Double],
true_degdist.asInstanceOf[Double],
pred_center.asInstanceOf[SphereCoord],
pred_truedist.asInstanceOf[Double],
pred_degdist.asInstanceOf[Double]
)
}
}
*/
|
utcompling/fieldspring
|
src/main/scala/opennlp/fieldspring/geolocate/Hadoop.scala
|
Scala
|
apache-2.0
| 17,869
|
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.summingbird.store
import com.twitter.storehaus.algebra.MergeableStore
import com.twitter.util.Future
/**
* MergeableStore that triggers a side effect on every call to put or
* merge.
*
* @author Sam Ritchie
* @author Oscar Boykin
*/
class SideEffectStore[K, V](store: MergeableStore[K, V])(sideEffectFn: K => Future[Unit])
extends MergeableStore[K, V] {
override def semigroup = store.semigroup
override def get(k: K) = store.get(k)
override def multiGet[K1 <: K](ks: Set[K1]) = store.multiGet(ks)
def after[T](t: Future[T])(fn: T => Unit): Future[T] = { t.foreach(fn); t }
override def put(pair: (K, Option[V])) =
after(store.put(pair)) { _ => sideEffectFn(pair._1) }
override def merge(pair: (K, V)) =
after(store.merge(pair)) { _ => sideEffectFn(pair._1) }
}
|
sengt/summingbird-batch
|
summingbird-client/src/main/scala/com/twitter/summingbird/store/SideEffectStore.scala
|
Scala
|
apache-2.0
| 1,382
|
/*
* CirceBridgeProtocol.scala
*
* Updated: Feb 19, 2016
*
* Copyright (c) 2016, CodeMettle
*/
package jsactor.bridge.protocol
import java.util.UUID
import io.circe._
import io.circe.parser._
import jsactor.bridge.protocol.CirceBridgeProtocol.{MessageRegistry, failureEntry, successEntry}
import scala.annotation.implicitNotFound
import scala.collection.immutable.ListMap
import scala.reflect.ClassTag
/**
* @author steven
*
*/
object CirceBridgeProtocol {
class MessageRegistry {
private[CirceBridgeProtocol] var msgMap = Map.empty[String, (Decoder[_], Encoder[_])]
def add[A : Decoder : Encoder : ClassTag] = {
msgMap += (implicitly[ClassTag[A]].runtimeClass.getName → (implicitly[Decoder[A]] → implicitly[Encoder[A]]))
}
def addObj[A <: Singleton : Decoder : Encoder : ClassTag](obj: A) = {
msgMap += (implicitly[ClassTag[A]].runtimeClass.getName → (implicitly[Decoder[A]] → implicitly[Encoder[A]]))
}
}
object Implicits {
trait MapKeyEncodeDecode[T] {
def encode(t: T): String
def decode(s: String): T
}
object MapKeyEncodeDecode {
implicit object UuidMKED extends MapKeyEncodeDecode[UUID] {
override def encode(t: UUID): String = t.toString
override def decode(s: String): UUID = UUID fromString s
}
}
implicit def nonStrKeyMapLikeEncode[K : MapKeyEncodeDecode, V : Encoder]: Encoder[Map[K, V]] = {
val mked = implicitly[MapKeyEncodeDecode[K]]
Encoder[Map[String, V]].contramap[Map[K, V]](_.map(e ⇒ mked.encode(e._1) → e._2))
}
implicit def nonStrKeyMapLikeDecode[K : MapKeyEncodeDecode, V : Decoder]: Decoder[Map[K, V]] = {
val mked = implicitly[MapKeyEncodeDecode[K]]
Decoder[Map[String, V]].map[Map[K, V]](_.map(e ⇒ mked.decode(e._1) → e._2))
}
implicit def listMapEncode[K : Encoder, V : Encoder]: Encoder[ListMap[K, V]] =
Encoder[Seq[(K, V)]].contramap[ListMap[K, V]](_.map(e ⇒ e._1 → e._2).toSeq)
implicit def listMapDecode[K : Decoder, V : Decoder]: Decoder[ListMap[K, V]] =
Decoder[Seq[(K, V)]].map(s ⇒ ListMap(s.map(e ⇒ e._1 → e._2): _*))
}
private val failureEntry = "__failure__"
private val successEntry = "__success__"
}
@implicitNotFound("Need an implicit CirceBridgeProtocol in scope, consider creating an implicit object extending CirceBridgeProtocol")
trait CirceBridgeProtocol extends BridgeProtocol[String] {
private val registry = new MessageRegistry
registerMessages(registry)
private val msgMap = registry.msgMap
/**
* Register messages that go across bridge
*
* @param registry messages are registered with registry.add / registry.addObj
*/
def registerMessages(registry: MessageRegistry): Unit
def pickleJs(obj: Any): Json = {
val encoder = Encoder[(String, Json)]
obj match {
case StatusFailure(cause) ⇒ encoder(failureEntry → pickleJs(cause))
case _ ⇒
val className = obj.getClass.getName
val (_, objEncoder) = msgMap.getOrElse(className, sys.error(s"$className is not registered"))
encoder(successEntry → encoder(className → objEncoder.asInstanceOf[Encoder[Any]](obj)))
}
}
def pickle(obj: Any): String = pickleJs(obj).noSpaces
private def unpickleCursor(c: ACursor): Decoder.Result[Any] = {
def error(err: String) = DecodingFailure(err, Nil)
c.as[(String, Json)].right flatMap {
case (`failureEntry`, jsVal) ⇒ unpickleCursor(ACursor ok jsVal.hcursor).right flatMap {
case t: Throwable ⇒ Right(StatusFailure(t))
case other ⇒ Left(error(s"Expected Throwable for failure, got $other"))
}
case (`successEntry`, jsVal) ⇒ jsVal.as[(String, Json)].right flatMap {
case (className, js) ⇒ msgMap get className match {
case None ⇒ Left(error(s"$className is not registered"))
case Some((decoder, _)) ⇒ decoder.apply(js.hcursor)
}
}
case (other, _) ⇒ Left(error(s"Expected failure or success, got $other"))
}
}
def unpickleJs(js: Json): Decoder.Result[Any] = unpickleCursor(ACursor ok js.hcursor)
def unpickle(json: String): Any = parse(json).right.flatMap(unpickleJs).valueOrThrow
}
|
CodeMettle/jsactor
|
jsactor-bridge-shared-circe/src/main/scala/jsactor/bridge/protocol/CirceBridgeProtocol.scala
|
Scala
|
apache-2.0
| 4,240
|
package io.github.mandar2812.dynaml.utils.sumac.types
import collection.mutable.LinkedHashSet
class SelectInput[T](var value: Option[T], val options: LinkedHashSet[T])
object SelectInput{
def apply[T](options: T*) = new SelectInput[T](value = None, options = (LinkedHashSet.empty ++ options))
def apply[T](value: Option[T], options: Traversable[T]) = new SelectInput[T](value = value, options = (LinkedHashSet.empty ++ options))
}
|
transcendent-ai-labs/DynaML
|
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/utils/sumac/types/SelectInput.scala
|
Scala
|
apache-2.0
| 438
|
package com.lot.marketEvent.tests
import com.lot.test.BaseTest
import com.lot.generators.OrderFactory
import com.lot.marketEvent.model.MarketEvent
import com.lot.marketEvent.dao.MarketEventDao
import scala.collection.mutable.ListBuffer
import com.lot.generators.MarketEventFactory
class MarketEventDaoTest extends BaseTest {
"MarketEventDao" should "save MarketEvent correctly" in {
/*
* Create an entity
*/
val marketEvent = MarketEventFactory.generate(name="Name1", summary="This is a test event")
/*
* Save it
*/
val fSaved = MarketEventDao.save(marketEvent)
val saved = wait(fSaved)
/*
* Get it back from the DB
*/
val dbMarketEvent = wait(MarketEventDao.get(saved.id.get)).get
/*
* They should be the same
*/
assert(saved == dbMarketEvent)
}
"MarketEventDao" should "list MarketEvents correctly" in {
/*
* Create some entities and save
*/
val marketEventList = new ListBuffer[MarketEvent]
for (i <- 1 to 10) {
val b = MarketEventFactory.generate(name="Name1", summary="This is a test event")
marketEventList += wait(MarketEventDao.save(b))
}
//println(marketEventList)
/*
* Get it back from the DB
*/
val dbList = wait(MarketEventDao.list)
// println(dbList)
/*
* They should be the same
*/
assert(dbList.length == marketEventList.length)
val mixed = marketEventList zip dbList
for {
(marketEvent, dbMarketEvent) <- mixed
x = println(s"comparing marketEvent = $marketEvent with dbMarketEvent = $dbMarketEvent")
} yield (assert(marketEvent == dbMarketEvent))
}
"MarketEventDao" should "update MarketEvent correctly" in {
/*
* Create an entity
*/
val marketEvent = MarketEventFactory.generate(name="Name1", summary="This is a test event")
/*
* Save it
*/
val fSaved = MarketEventDao.save(marketEvent)
val saved = wait(fSaved)
val modified = MarketEventFactory.generate(name="Name1", summary="This is a test event").copy(id=saved.id, created_at=saved.created_at, updated_at=saved.updated_at)
wait(MarketEventDao.update(modified))
/*
* Get it back from the DB
*/
val dbMarketEvent = wait(MarketEventDao.get(saved.id.get)).get
/*
* They should be the same. We need to copy the updated_at
*/
assert(modified.copy(updated_at = dbMarketEvent.updated_at) == dbMarketEvent)
}
"MarketEventDao" should "updateWithOptimisticLocking MarketEvent correctly" in {
/*
* Create an entity
*/
val marketEvent = MarketEventFactory.generate(name="Name1", summary="This is a test event")
/*
* Save it
*/
val fSaved = MarketEventDao.save(marketEvent)
val saved = wait(fSaved)
val modified1 = MarketEventFactory.generate(name="Name1", summary="This is a test event").copy(id=saved.id, created_at=saved.created_at, updated_at=saved.updated_at)
val modified2 = MarketEventFactory.generate(name="Name1", summary="This is a test event").copy(id=saved.id, created_at=saved.created_at, updated_at=saved.updated_at)
val rowCount1 = wait(MarketEventDao.updateWithOptimisticLocking(modified1))
val rowCount2 = wait(MarketEventDao.updateWithOptimisticLocking(modified1))
assert(rowCount1 == 1)
assert(rowCount2 == 0)
}
"MarketEventDao" should "delete MarketEvent correctly" in {
/*
* Create an entity
*/
val marketEvent = MarketEventFactory.generate(name="Name1", summary="This is a test event")
/*
* Save it
*/
val fSaved = MarketEventDao.save(marketEvent)
val saved = wait(fSaved)
/*
* Delete it
*/
wait(MarketEventDao.delete(saved.id.get))
/*
* Get it back from the DB
*/
val dbMarketEvent = wait(MarketEventDao.get(saved.id.get))
/*
* They should be None
*/
assert(dbMarketEvent == None)
}
}
|
thimmaiah/life_of_a_trade_scala
|
src/test/scala/com/lot/market_event/test/MarketEventDaoTest.scala
|
Scala
|
apache-2.0
| 3,952
|
package controllers
import javax.inject.Inject
import play.api.mvc.{Action, Controller}
import scala.concurrent.{ExecutionContext, Future}
/**
* @author ponkotuy
* Date: 15/03/10.
*/
class MyAssets @Inject()(implicit val ec: ExecutionContext) extends Controller {
def at(path: String, file: String, aggressiveCaching: Boolean = false) = Action.async { implicit req =>
val accepts = req.headers.get(ACCEPT_ENCODING).map(_.split(",").map(_.stripMargin)).getOrElse(Array())
if(accepts.contains("pack200-gzip")) {
Assets.at(path, file + ".pack.gz").apply(req).flatMap { result =>
if(result.header.status >= 400) Assets.at(path, file, aggressiveCaching).apply(req)
else Future(result.withHeaders(CONTENT_ENCODING -> "pack200-gzip"))
}
} else {
controllers.Assets.at(path, file, aggressiveCaching).apply(req)
}
}
}
|
kxbmap/MyFleetGirls
|
server/app/controllers/MyAssets.scala
|
Scala
|
mit
| 870
|
import org.junit.Test
import org.junit.Assert._
class Test1 {
@Test def t1(): Unit = {
assertEquals("I was compiled by Scala 3. :)", msg)
}
}
|
diegopacheco/scala-playground
|
scala-3-playground/scala-3-playground/src/test/scala/Test1.scala
|
Scala
|
unlicense
| 151
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.concurrent.Semaphore
import scala.collection.mutable
import scala.collection.JavaConverters._
import org.mockito.Mockito
import org.scalatest.Matchers
import org.apache.spark._
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.internal.config.LISTENER_BUS_EVENT_QUEUE_CAPACITY
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.util.{ResetSystemProperties, RpcUtils}
class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Matchers
with ResetSystemProperties {
/** Length of time to wait while draining listener events. */
val WAIT_TIMEOUT_MILLIS = 10000
val jobCompletionTime = 1421191296660L
private val mockSparkContext: SparkContext = Mockito.mock(classOf[SparkContext])
private val mockMetricsSystem: MetricsSystem = Mockito.mock(classOf[MetricsSystem])
test("don't call sc.stop in listener") {
sc = new SparkContext("local", "SparkListenerSuite", new SparkConf())
val listener = new SparkContextStoppingListener(sc)
val bus = new LiveListenerBus(sc.conf)
bus.addListener(listener)
// Starting listener bus should flush all buffered events
bus.start(sc, sc.env.metricsSystem)
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
bus.stop()
assert(listener.sparkExSeen)
}
test("basic creation and shutdown of LiveListenerBus") {
val conf = new SparkConf()
val counter = new BasicJobCounter
val bus = new LiveListenerBus(conf)
bus.addListener(counter)
// Metrics are initially empty.
assert(bus.metrics.numEventsPosted.getCount === 0)
assert(bus.metrics.numDroppedEvents.getCount === 0)
assert(bus.metrics.queueSize.getValue === 0)
assert(bus.metrics.eventProcessingTime.getCount === 0)
// Post five events:
(1 to 5).foreach { _ => bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded)) }
// Five messages should be marked as received and queued, but no messages should be posted to
// listeners yet because the the listener bus hasn't been started.
assert(bus.metrics.numEventsPosted.getCount === 5)
assert(bus.metrics.queueSize.getValue === 5)
assert(counter.count === 0)
// Starting listener bus should flush all buffered events
bus.start(mockSparkContext, mockMetricsSystem)
Mockito.verify(mockMetricsSystem).registerSource(bus.metrics)
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(counter.count === 5)
assert(bus.metrics.queueSize.getValue === 0)
assert(bus.metrics.eventProcessingTime.getCount === 5)
// After listener bus has stopped, posting events should not increment counter
bus.stop()
(1 to 5).foreach { _ => bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded)) }
assert(counter.count === 5)
assert(bus.metrics.numEventsPosted.getCount === 5)
// Make sure per-listener-class timers were created:
assert(bus.metrics.getTimerForListenerClass(
classOf[BasicJobCounter].asSubclass(classOf[SparkListenerInterface])).get.getCount == 5)
// Listener bus must not be started twice
intercept[IllegalStateException] {
val bus = new LiveListenerBus(conf)
bus.start(mockSparkContext, mockMetricsSystem)
bus.start(mockSparkContext, mockMetricsSystem)
}
// ... or stopped before starting
intercept[IllegalStateException] {
val bus = new LiveListenerBus(conf)
bus.stop()
}
}
test("bus.stop() waits for the event queue to completely drain") {
@volatile var drained = false
// When Listener has started
val listenerStarted = new Semaphore(0)
// Tells the listener to stop blocking
val listenerWait = new Semaphore(0)
// When stopper has started
val stopperStarted = new Semaphore(0)
// When stopper has returned
val stopperReturned = new Semaphore(0)
class BlockingListener extends SparkListener {
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = {
listenerStarted.release()
listenerWait.acquire()
drained = true
}
}
val bus = new LiveListenerBus(new SparkConf())
val blockingListener = new BlockingListener
bus.addListener(blockingListener)
bus.start(mockSparkContext, mockMetricsSystem)
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
listenerStarted.acquire()
// Listener should be blocked after start
assert(!drained)
new Thread("ListenerBusStopper") {
override def run() {
stopperStarted.release()
// stop() will block until notify() is called below
bus.stop()
stopperReturned.release()
}
}.start()
stopperStarted.acquire()
// Listener should remain blocked after stopper started
assert(!drained)
// unblock Listener to let queue drain
listenerWait.release()
stopperReturned.acquire()
assert(drained)
}
test("metrics for dropped listener events") {
val bus = new LiveListenerBus(new SparkConf().set(LISTENER_BUS_EVENT_QUEUE_CAPACITY, 1))
val listenerStarted = new Semaphore(0)
val listenerWait = new Semaphore(0)
bus.addListener(new SparkListener {
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = {
listenerStarted.release()
listenerWait.acquire()
}
})
bus.start(mockSparkContext, mockMetricsSystem)
// Post a message to the listener bus and wait for processing to begin:
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
listenerStarted.acquire()
assert(bus.metrics.queueSize.getValue === 0)
assert(bus.metrics.numDroppedEvents.getCount === 0)
// If we post an additional message then it should remain in the queue because the listener is
// busy processing the first event:
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
assert(bus.metrics.queueSize.getValue === 1)
assert(bus.metrics.numDroppedEvents.getCount === 0)
// The queue is now full, so any additional events posted to the listener will be dropped:
bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded))
assert(bus.metrics.queueSize.getValue === 1)
assert(bus.metrics.numDroppedEvents.getCount === 1)
// Allow the the remaining events to be processed so we can stop the listener bus:
listenerWait.release(2)
bus.stop()
}
test("basic creation of StageInfo") {
sc = new SparkContext("local", "SparkListenerSuite")
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
val rdd1 = sc.parallelize(1 to 100, 4)
val rdd2 = rdd1.map(_.toString)
rdd2.setName("Target RDD")
rdd2.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val (stageInfo, taskInfoMetrics) = listener.stageInfos.head
stageInfo.rddInfos.size should be {2}
stageInfo.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo.rddInfos.exists(_.name == "Target RDD") should be {true}
stageInfo.numTasks should be {4}
stageInfo.submissionTime should be ('defined)
stageInfo.completionTime should be ('defined)
taskInfoMetrics.length should be {4}
}
test("basic creation of StageInfo with shuffle") {
sc = new SparkContext("local", "SparkListenerSuite")
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
val rdd1 = sc.parallelize(1 to 100, 4)
val rdd2 = rdd1.filter(_ % 2 == 0).map(i => (i, i))
val rdd3 = rdd2.reduceByKey(_ + _)
rdd1.setName("Un")
rdd2.setName("Deux")
rdd3.setName("Trois")
rdd1.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val stageInfo1 = listener.stageInfos.keys.find(_.stageId == 0).get
stageInfo1.rddInfos.size should be {1} // ParallelCollectionRDD
stageInfo1.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo1.rddInfos.exists(_.name == "Un") should be {true}
listener.stageInfos.clear()
rdd2.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val stageInfo2 = listener.stageInfos.keys.find(_.stageId == 1).get
stageInfo2.rddInfos.size should be {3}
stageInfo2.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo2.rddInfos.exists(_.name == "Deux") should be {true}
listener.stageInfos.clear()
rdd3.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {2} // Shuffle map stage + result stage
val stageInfo3 = listener.stageInfos.keys.find(_.stageId == 3).get
stageInfo3.rddInfos.size should be {1} // ShuffledRDD
stageInfo3.rddInfos.forall(_.numPartitions == 4) should be {true}
stageInfo3.rddInfos.exists(_.name == "Trois") should be {true}
}
test("StageInfo with fewer tasks than partitions") {
sc = new SparkContext("local", "SparkListenerSuite")
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
val rdd1 = sc.parallelize(1 to 100, 4)
val rdd2 = rdd1.map(_.toString)
sc.runJob(rdd2, (items: Iterator[String]) => items.size, Seq(0, 1))
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be {1}
val (stageInfo, _) = listener.stageInfos.head
stageInfo.numTasks should be {2}
stageInfo.rddInfos.size should be {2}
stageInfo.rddInfos.forall(_.numPartitions == 4) should be {true}
}
test("local metrics") {
sc = new SparkContext("local", "SparkListenerSuite")
val listener = new SaveStageAndTaskInfo
sc.addSparkListener(listener)
sc.addSparkListener(new StatsReportListener)
// just to make sure some of the tasks take a noticeable amount of time
val w = { i: Int =>
if (i == 0) {
Thread.sleep(100)
}
i
}
val numSlices = 16
val d = sc.parallelize(0 to 10000, numSlices).map(w)
d.count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be (1)
val d2 = d.map { i => w(i) -> i * 2 }.setName("shuffle input 1")
val d3 = d.map { i => w(i) -> (0 to (i % 5)) }.setName("shuffle input 2")
val d4 = d2.cogroup(d3, numSlices).map { case (k, (v1, v2)) =>
w(k) -> (v1.size, v2.size)
}
d4.setName("A Cogroup")
d4.collectAsMap()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
listener.stageInfos.size should be (4)
listener.stageInfos.foreach { case (stageInfo, taskInfoMetrics) =>
/**
* Small test, so some tasks might take less than 1 millisecond, but average should be greater
* than 0 ms.
*/
checkNonZeroAvg(
taskInfoMetrics.map(_._2.executorRunTime),
stageInfo + " executorRunTime")
checkNonZeroAvg(
taskInfoMetrics.map(_._2.executorDeserializeTime),
stageInfo + " executorDeserializeTime")
/* Test is disabled (SEE SPARK-2208)
if (stageInfo.rddInfos.exists(_.name == d4.name)) {
checkNonZeroAvg(
taskInfoMetrics.map(_._2.shuffleReadMetrics.get.fetchWaitTime),
stageInfo + " fetchWaitTime")
}
*/
taskInfoMetrics.foreach { case (taskInfo, taskMetrics) =>
taskMetrics.resultSize should be > (0L)
if (stageInfo.rddInfos.exists(info => info.name == d2.name || info.name == d3.name)) {
assert(taskMetrics.shuffleWriteMetrics.bytesWritten > 0L)
}
if (stageInfo.rddInfos.exists(_.name == d4.name)) {
assert(taskMetrics.shuffleReadMetrics.totalBlocksFetched == 2 * numSlices)
assert(taskMetrics.shuffleReadMetrics.localBlocksFetched == 2 * numSlices)
assert(taskMetrics.shuffleReadMetrics.remoteBlocksFetched == 0)
assert(taskMetrics.shuffleReadMetrics.remoteBytesRead == 0L)
}
}
}
}
test("onTaskGettingResult() called when result fetched remotely") {
val conf = new SparkConf().set("spark.rpc.message.maxSize", "1")
sc = new SparkContext("local", "SparkListenerSuite", conf)
val listener = new SaveTaskEvents
sc.addSparkListener(listener)
// Make a task whose result is larger than the RPC message size
val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf)
assert(maxRpcMessageSize === 1024 * 1024)
val result = sc.parallelize(Seq(1), 1)
.map { x => 1.to(maxRpcMessageSize).toArray }
.reduce { case (x, y) => x }
assert(result === 1.to(maxRpcMessageSize).toArray)
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
val TASK_INDEX = 0
assert(listener.startedTasks.contains(TASK_INDEX))
assert(listener.startedGettingResultTasks.contains(TASK_INDEX))
assert(listener.endedTasks.contains(TASK_INDEX))
}
test("onTaskGettingResult() not called when result sent directly") {
sc = new SparkContext("local", "SparkListenerSuite")
val listener = new SaveTaskEvents
sc.addSparkListener(listener)
// Make a task whose result is larger than the RPC message size
val result = sc.parallelize(Seq(1), 1).map(2 * _).reduce { case (x, y) => x }
assert(result === 2)
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
val TASK_INDEX = 0
assert(listener.startedTasks.contains(TASK_INDEX))
assert(listener.startedGettingResultTasks.isEmpty)
assert(listener.endedTasks.contains(TASK_INDEX))
}
test("onTaskEnd() should be called for all started tasks, even after job has been killed") {
sc = new SparkContext("local", "SparkListenerSuite")
val WAIT_TIMEOUT_MILLIS = 10000
val listener = new SaveTaskEvents
sc.addSparkListener(listener)
val numTasks = 10
val f = sc.parallelize(1 to 10000, numTasks).map { i => Thread.sleep(10); i }.countAsync()
// Wait until one task has started (because we want to make sure that any tasks that are started
// have corresponding end events sent to the listener).
var finishTime = System.currentTimeMillis + WAIT_TIMEOUT_MILLIS
listener.synchronized {
var remainingWait = finishTime - System.currentTimeMillis
while (listener.startedTasks.isEmpty && remainingWait > 0) {
listener.wait(remainingWait)
remainingWait = finishTime - System.currentTimeMillis
}
assert(!listener.startedTasks.isEmpty)
}
f.cancel()
// Ensure that onTaskEnd is called for all started tasks.
finishTime = System.currentTimeMillis + WAIT_TIMEOUT_MILLIS
listener.synchronized {
var remainingWait = finishTime - System.currentTimeMillis
while (listener.endedTasks.size < listener.startedTasks.size && remainingWait > 0) {
listener.wait(finishTime - System.currentTimeMillis)
remainingWait = finishTime - System.currentTimeMillis
}
assert(listener.endedTasks.size === listener.startedTasks.size)
}
}
test("SparkListener moves on if a listener throws an exception") {
val badListener = new BadListener
val jobCounter1 = new BasicJobCounter
val jobCounter2 = new BasicJobCounter
val bus = new LiveListenerBus(new SparkConf())
// Propagate events to bad listener first
bus.addListener(badListener)
bus.addListener(jobCounter1)
bus.addListener(jobCounter2)
bus.start(mockSparkContext, mockMetricsSystem)
// Post events to all listeners, and wait until the queue is drained
(1 to 5).foreach { _ => bus.post(SparkListenerJobEnd(0, jobCompletionTime, JobSucceeded)) }
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
// The exception should be caught, and the event should be propagated to other listeners
assert(bus.listenerThreadIsAlive)
assert(jobCounter1.count === 5)
assert(jobCounter2.count === 5)
}
test("registering listeners via spark.extraListeners") {
val listeners = Seq(
classOf[ListenerThatAcceptsSparkConf],
classOf[FirehoseListenerThatAcceptsSparkConf],
classOf[BasicJobCounter])
val conf = new SparkConf().setMaster("local").setAppName("test")
.set("spark.extraListeners", listeners.map(_.getName).mkString(","))
sc = new SparkContext(conf)
sc.listenerBus.listeners.asScala.count(_.isInstanceOf[BasicJobCounter]) should be (1)
sc.listenerBus.listeners.asScala
.count(_.isInstanceOf[ListenerThatAcceptsSparkConf]) should be (1)
sc.listenerBus.listeners.asScala
.count(_.isInstanceOf[FirehoseListenerThatAcceptsSparkConf]) should be (1)
}
/**
* Assert that the given list of numbers has an average that is greater than zero.
*/
private def checkNonZeroAvg(m: Traversable[Long], msg: String) {
assert(m.sum / m.size.toDouble > 0.0, msg)
}
/**
* A simple listener that saves all task infos and task metrics.
*/
private class SaveStageAndTaskInfo extends SparkListener {
val stageInfos = mutable.Map[StageInfo, Seq[(TaskInfo, TaskMetrics)]]()
var taskInfoMetrics = mutable.Buffer[(TaskInfo, TaskMetrics)]()
override def onTaskEnd(task: SparkListenerTaskEnd) {
val info = task.taskInfo
val metrics = task.taskMetrics
if (info != null && metrics != null) {
taskInfoMetrics += ((info, metrics))
}
}
override def onStageCompleted(stage: SparkListenerStageCompleted) {
stageInfos(stage.stageInfo) = taskInfoMetrics
taskInfoMetrics = mutable.Buffer.empty
}
}
/**
* A simple listener that saves the task indices for all task events.
*/
private class SaveTaskEvents extends SparkListener {
val startedTasks = new mutable.HashSet[Int]()
val startedGettingResultTasks = new mutable.HashSet[Int]()
val endedTasks = new mutable.HashSet[Int]()
override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = synchronized {
startedTasks += taskStart.taskInfo.index
notify()
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
endedTasks += taskEnd.taskInfo.index
notify()
}
override def onTaskGettingResult(taskGettingResult: SparkListenerTaskGettingResult) {
startedGettingResultTasks += taskGettingResult.taskInfo.index
}
}
/**
* A simple listener that throws an exception on job end.
*/
private class BadListener extends SparkListener {
override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = { throw new Exception }
}
}
// These classes can't be declared inside of the SparkListenerSuite class because we don't want
// their constructors to contain references to SparkListenerSuite:
/**
* A simple listener that counts the number of jobs observed.
*/
private class BasicJobCounter extends SparkListener {
var count = 0
override def onJobEnd(job: SparkListenerJobEnd): Unit = count += 1
}
/**
* A simple listener that tries to stop SparkContext.
*/
private class SparkContextStoppingListener(val sc: SparkContext) extends SparkListener {
@volatile var sparkExSeen = false
override def onJobEnd(job: SparkListenerJobEnd): Unit = {
try {
sc.stop()
} catch {
case se: SparkException =>
sparkExSeen = true
}
}
}
private class ListenerThatAcceptsSparkConf(conf: SparkConf) extends SparkListener {
var count = 0
override def onJobEnd(job: SparkListenerJobEnd): Unit = count += 1
}
private class FirehoseListenerThatAcceptsSparkConf(conf: SparkConf) extends SparkFirehoseListener {
var count = 0
override def onEvent(event: SparkListenerEvent): Unit = event match {
case job: SparkListenerJobEnd => count += 1
case _ =>
}
}
|
aokolnychyi/spark
|
core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
|
Scala
|
apache-2.0
| 20,381
|
package com.ctask.storage
import com.ctask.data.Task.Recurrence
import com.ctask.data.{Task, TaskList}
import org.scalatest.{FlatSpec, Matchers}
import scala.util.{Failure, Success}
/**
* Common abstract class for the storage specs implementations.
*/
abstract class StorageSpec extends FlatSpec with Matchers {
def getBackingStorage: BackingStorage
behavior of "Storage"
it should "add a new task list to the storage" in {
val backingStorage = getBackingStorage
val newTaskListName = "new test tasklist"
val addedTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
addedTaskListTry match {
case Success(addedTaskList) => addedTaskList.name shouldBe newTaskListName
case Failure(ex) => fail(s"Unexpected exception: $ex")
}
}
it should "trim task list names when creating a new task list" in {
val backingStorage = getBackingStorage
val newTaskListName = "new test tasklist "
val newTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
val newTaskList = newTaskListTry.get
newTaskList.name shouldBe newTaskListName.trim
}
it should "generate an error when adding a task list with an already existing name" in {
val backingStorage = getBackingStorage
val newTaskListName = "new test tasklist"
backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
val alreadyExistingTaskTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
alreadyExistingTaskTry match {
case Failure(ex) => ex shouldBe a[StorageException]
case Success(_) => fail(s"Should have failed since the $newTaskListName list " +
"already exists")
}
}
it should "generate an error when adding a task list with an empty name" in {
val backingStorage = getBackingStorage
val newTaskListName = ""
val addedTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
addedTaskListTry match {
case Failure(ex) => ex shouldBe a[StorageException]
case Success(_) => fail(s"Should have failed since the task list name is empty")
}
}
it should "generate an error when adding a task list with a name that contains only spaces" in {
val backingStorage = getBackingStorage
val newTaskListName = " "
val addedTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
addedTaskListTry match {
case Failure(ex) => ex shouldBe a[StorageException]
case Success(_) => fail(s"Should have failed since the task list name is empty")
}
}
it should "remove an existing task list" in {
val backingStorage = getBackingStorage
val newTaskListName = "new test tasklist"
val addedTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
val removedTaskListTry = backingStorage.deleteTaskList(newTaskListName)
addedTaskListTry match {
case Success(addedTaskList) =>
removedTaskListTry match {
case Success(removedTaskList) =>
addedTaskList shouldEqual removedTaskList
case Failure(ex) => fail(s"Unexpected exception $ex")
}
case Failure(ex) => fail(s"Unexpected exception $ex")
}
backingStorage.loadTaskList(newTaskListName) match {
case Success(_) => fail(s"Task list $newTaskListName was not removed")
case Failure(ex) => ex shouldBe a[StorageException]
}
}
it should "trim task list names when removing a task list" in {
val backingStorage = getBackingStorage
val trimmedTaskListName = "new test tasklist"
backingStorage.createTaskList(new TaskList(trimmedTaskListName, Array.empty, None.orNull))
val nonTrimmedTaskListName = trimmedTaskListName + " "
val removedTaskListTry = backingStorage.deleteTaskList(nonTrimmedTaskListName)
removedTaskListTry match {
case Success(removedTaskList) => removedTaskList.name shouldBe trimmedTaskListName
case Failure(ex) => fail(s"Unexpected exception $ex")
}
}
it should "generate an error when trying to remove a task list that does not exist" in {
val backingStorage = getBackingStorage
val nonExistentTaskListName = "i do not exist"
val removedTaskListTry = backingStorage.deleteTaskList(nonExistentTaskListName)
removedTaskListTry match {
case Failure(ex) => ex shouldBe a[StorageException]
case Success(_) => fail("Should have failed since a task list with the provided" +
"name does not exist.")
}
}
it should "load a task list from storage" in {
val backingStorage = getBackingStorage
val newTaskListName = "new test tasklist"
val newTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
val newTaskList = newTaskListTry.get
val loadedTaskListTry = backingStorage.loadTaskList(newTaskListName)
loadedTaskListTry match {
case Success(loadedTaskList) => loadedTaskList shouldEqual newTaskList
case Failure(ex) => fail(s"Unexpected exception: $ex")
}
}
it should "generate an error when trying to load a non-existent task list" in {
val backingStorage = getBackingStorage
val nonExistentTaskListName = "i do not exist"
val loadedTaskListTry = backingStorage.loadTaskList(nonExistentTaskListName)
loadedTaskListTry match {
case Failure(ex) => ex shouldBe a[StorageException]
case Success(_) => fail("Should have failed since the task list does not exist.")
}
}
it should "trim task list names when loading a task list" in {
val backingStorage = getBackingStorage
val trimmedTaskListName = "new test tasklist"
backingStorage.createTaskList(new TaskList(trimmedTaskListName, Array.empty, None.orNull))
val nonTrimmedTaskListName = trimmedTaskListName + " "
val loadedTaskListTry = backingStorage.loadTaskList(nonTrimmedTaskListName)
loadedTaskListTry match {
case Success(loadedTaskList) => loadedTaskList.name shouldBe trimmedTaskListName
case Failure(ex) => fail(s"Unexpected exception: $ex")
}
}
it should "load all tasklists names" in {
val backingStorage = getBackingStorage
val newTaskListNames = "new test tasklist1" :: "new test tasklist2" :: Nil
newTaskListNames.foreach { newListName =>
backingStorage.createTaskList(new TaskList(newListName, Array.empty, None.orNull))
}
val taskListsNames = backingStorage.loadTaskListNames
taskListsNames.size shouldBe newTaskListNames.size
taskListsNames.foreach { loadedListName =>
newTaskListNames contains loadedListName shouldBe true
}
}
it should "replace an existing task list" in {
val backingStorage = getBackingStorage
val newTaskListName = "new test tasklist"
val addedTaskListTry = backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
addedTaskListTry match {
case Success(addedTaskList) => addedTaskList.name shouldBe newTaskListName
case Failure(ex) => fail(s"Unexpected exception: $ex")
}
val replacementTaskList = new TaskList(newTaskListName, Array(
new Task("taskName", "", 1L, None.orNull, false, Recurrence.NEVER, false)),
None.orNull)
backingStorage.replaceTaskList(newTaskListName, replacementTaskList) match {
case Success(replacedTaskList) => replacedTaskList shouldBe replacementTaskList
case Failure(ex) => fail(s"Unexpected exception: $ex")
}
}
it should "fail to replace a non-existing task list" in {
val backingStorage = getBackingStorage
val replacementTaskList = new TaskList("replacement task list", Array.empty, None.orNull)
backingStorage.replaceTaskList(replacementTaskList.name, replacementTaskList) match {
case Success(_) => fail("Should have failed to replace a non-existing task list.")
case Failure(ex) => ex shouldBe a[StorageException]
}
}
it should "remove a task from a tasklist" in {
// Add tasklist and add task to it
val backingStorage = getBackingStorage
val newTaskListName = "zerg"
backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
val task = new Task("kill protoss", "", 1L, None.orNull, false, Recurrence.NEVER, false)
backingStorage.replaceTaskList(newTaskListName, new TaskList(newTaskListName, Array(task), None.orNull)) should not be a[Failure[_]]
// remote task from tasklist
backingStorage.removeTaskFromTaskList(task.id, newTaskListName).get shouldBe task
}
it should "fail to remove a task from a non existing list" in {
val backingStorage = getBackingStorage
val nonExistingTaskListName = "i do not exist"
val taskId = 1
try {
backingStorage.removeTaskFromTaskList(taskId, nonExistingTaskListName).get
fail("Should have thrown a StorageException but did not throw.")
}
catch {
case se: StorageException =>
se.message shouldBe s"The task with id $taskId cannot be removed because list $nonExistingTaskListName does not exist"
case ex: Throwable =>
fail(s"Should have throws a StorageException but $ex was thrown instead")
}
}
it should "fail to remove a non existing task" in {
val backingStorage = getBackingStorage
val newTaskListName = "zerg"
backingStorage.createTaskList(new TaskList(newTaskListName, Array.empty, None.orNull))
val nonExistingTaskId = 1
try {
backingStorage.removeTaskFromTaskList(nonExistingTaskId, newTaskListName).get
fail("Should have thrown a StorageException but did not throw.")
}
catch {
case se: StorageException =>
se.message shouldBe s"Could not find task $nonExistingTaskId in list $newTaskListName"
case ex: Throwable =>
fail(s"Should have throws a StorageException but $ex was thrown instead")
}
}
it should "load all task lists" in {
val backingStorage = getBackingStorage
val taskList1 = "taskList1"
val taskList2 = "taskList2"
val taskList3 = "taskList3"
backingStorage.createTaskList(new TaskList(taskList1, Array.empty, None.orNull))
backingStorage.createTaskList(new TaskList(taskList2, Array.empty, None.orNull))
backingStorage.createTaskList(new TaskList(taskList3, Array.empty, None.orNull))
backingStorage.loadAllTaskLists.size shouldBe 3
}
it should "return an empty task lists result when there are no task lists to load" in {
val backingStorage = getBackingStorage
backingStorage.loadAllTaskLists.size shouldBe 0
}
}
|
modsrm/ctask
|
server/src/test/scala/com/ctask/storage/StorageSpec.scala
|
Scala
|
gpl-3.0
| 10,713
|
package uk.co.morleydev.zander.client.validator
trait ValidatorFactory {
def createValidateArtefactDetailsDoNotExist() : ValidateArtefactDetailsExistence
def createValidateArtefactDetailsExist() : ValidateArtefactDetailsExistence
}
|
MorleyDev/zander.client
|
src/main/scala/uk/co/morleydev/zander/client/validator/ValidatorFactory.scala
|
Scala
|
mit
| 239
|
package java.lang
import scala.scalajs.js
import java.util.Arrays
@inline
class Character(private val value: scala.Char) extends Comparable[Character] {
def charValue(): scala.Char = value
override def equals(that: Any): scala.Boolean =
that.isInstanceOf[Character] && (value == that.asInstanceOf[Character].charValue)
override def compareTo(that: Character): Int =
Character.compare(charValue, that.charValue)
override def toString(): String =
Character.toString(value)
override def hashCode(): Int = value.##
/*
* Methods on scala.Char
* The following methods are only here to properly support reflective calls
* on boxed primitive values. YOU WILL NOT BE ABLE TO USE THESE METHODS, since
* we use the true javalib to lookup symbols, this file contains only
* implementations.
*/
protected def toByte: scala.Byte = value.toByte
protected def toShort: scala.Short = value.toShort
protected def toChar: scala.Char = value.toChar
protected def toInt: scala.Int = value
protected def toLong: scala.Long = value.toLong
protected def toFloat: scala.Float = value.toFloat
protected def toDouble: scala.Double = value.toDouble
// scalastyle:off disallow.space.before.token
protected def unary_~ : scala.Int = ~value
protected def unary_+ : scala.Int = value
protected def unary_- : scala.Int = -value
// scalastyle:on disallow.space.before.token
protected def +(x: String): String = value + x
protected def <<(x: scala.Int): scala.Int = value << x
protected def <<(x: scala.Long): scala.Int = value << x
protected def >>>(x: scala.Int): scala.Int = value >>> x
protected def >>>(x: scala.Long): scala.Int = value >>> x
protected def >>(x: scala.Int): scala.Int = value >> x
protected def >>(x: scala.Long): scala.Int = value >> x
protected def ==(x: scala.Byte): scala.Boolean = value == x
protected def ==(x: scala.Short): scala.Boolean = value == x
protected def ==(x: scala.Char): scala.Boolean = value == x
protected def ==(x: scala.Int): scala.Boolean = value == x
protected def ==(x: scala.Long): scala.Boolean = value == x
protected def ==(x: scala.Float): scala.Boolean = value == x
protected def ==(x: scala.Double): scala.Boolean = value == x
protected def !=(x: scala.Byte): scala.Boolean = value != x
protected def !=(x: scala.Short): scala.Boolean = value != x
protected def !=(x: scala.Char): scala.Boolean = value != x
protected def !=(x: scala.Int): scala.Boolean = value != x
protected def !=(x: scala.Long): scala.Boolean = value != x
protected def !=(x: scala.Float): scala.Boolean = value != x
protected def !=(x: scala.Double): scala.Boolean = value != x
protected def <(x: scala.Byte): scala.Boolean = value < x
protected def <(x: scala.Short): scala.Boolean = value < x
protected def <(x: scala.Char): scala.Boolean = value < x
protected def <(x: scala.Int): scala.Boolean = value < x
protected def <(x: scala.Long): scala.Boolean = value < x
protected def <(x: scala.Float): scala.Boolean = value < x
protected def <(x: scala.Double): scala.Boolean = value < x
protected def <=(x: scala.Byte): scala.Boolean = value <= x
protected def <=(x: scala.Short): scala.Boolean = value <= x
protected def <=(x: scala.Char): scala.Boolean = value <= x
protected def <=(x: scala.Int): scala.Boolean = value <= x
protected def <=(x: scala.Long): scala.Boolean = value <= x
protected def <=(x: scala.Float): scala.Boolean = value <= x
protected def <=(x: scala.Double): scala.Boolean = value <= x
protected def >(x: scala.Byte): scala.Boolean = value > x
protected def >(x: scala.Short): scala.Boolean = value > x
protected def >(x: scala.Char): scala.Boolean = value > x
protected def >(x: scala.Int): scala.Boolean = value > x
protected def >(x: scala.Long): scala.Boolean = value > x
protected def >(x: scala.Float): scala.Boolean = value > x
protected def >(x: scala.Double): scala.Boolean = value > x
protected def >=(x: scala.Byte): scala.Boolean = value >= x
protected def >=(x: scala.Short): scala.Boolean = value >= x
protected def >=(x: scala.Char): scala.Boolean = value >= x
protected def >=(x: scala.Int): scala.Boolean = value >= x
protected def >=(x: scala.Long): scala.Boolean = value >= x
protected def >=(x: scala.Float): scala.Boolean = value >= x
protected def >=(x: scala.Double): scala.Boolean = value >= x
protected def |(x: scala.Byte): scala.Int = value | x
protected def |(x: scala.Short): scala.Int = value | x
protected def |(x: scala.Char): scala.Int = value | x
protected def |(x: scala.Int): scala.Int = value | x
protected def |(x: scala.Long): scala.Long = value | x
protected def &(x: scala.Byte): scala.Int = value & x
protected def &(x: scala.Short): scala.Int = value & x
protected def &(x: scala.Char): scala.Int = value & x
protected def &(x: scala.Int): scala.Int = value & x
protected def &(x: scala.Long): scala.Long = value & x
protected def ^(x: scala.Byte): scala.Int = value ^ x
protected def ^(x: scala.Short): scala.Int = value ^ x
protected def ^(x: scala.Char): scala.Int = value ^ x
protected def ^(x: scala.Int): scala.Int = value ^ x
protected def ^(x: scala.Long): scala.Long = value ^ x
protected def +(x: scala.Byte): scala.Int = value + x
protected def +(x: scala.Short): scala.Int = value + x
protected def +(x: scala.Char): scala.Int = value + x
protected def +(x: scala.Int): scala.Int = value + x
protected def +(x: scala.Long): scala.Long = value + x
protected def +(x: scala.Float): scala.Float = value + x
protected def +(x: scala.Double): scala.Double = value + x
protected def -(x: scala.Byte): scala.Int = value - x
protected def -(x: scala.Short): scala.Int = value - x
protected def -(x: scala.Char): scala.Int = value - x
protected def -(x: scala.Int): scala.Int = value - x
protected def -(x: scala.Long): scala.Long = value - x
protected def -(x: scala.Float): scala.Float = value - x
protected def -(x: scala.Double): scala.Double = value - x
protected def *(x: scala.Byte): scala.Int = value * x
protected def *(x: scala.Short): scala.Int = value * x
protected def *(x: scala.Char): scala.Int = value * x
protected def *(x: scala.Int): scala.Int = value * x
protected def *(x: scala.Long): scala.Long = value * x
protected def *(x: scala.Float): scala.Float = value * x
protected def *(x: scala.Double): scala.Double = value * x
protected def /(x: scala.Byte): scala.Int = value / x
protected def /(x: scala.Short): scala.Int = value / x
protected def /(x: scala.Char): scala.Int = value / x
protected def /(x: scala.Int): scala.Int = value / x
protected def /(x: scala.Long): scala.Long = value / x
protected def /(x: scala.Float): scala.Float = value / x
protected def /(x: scala.Double): scala.Double = value / x
protected def %(x: scala.Byte): scala.Int = value % x
protected def %(x: scala.Short): scala.Int = value % x
protected def %(x: scala.Char): scala.Int = value % x
protected def %(x: scala.Int): scala.Int = value % x
protected def %(x: scala.Long): scala.Long = value % x
protected def %(x: scala.Float): scala.Float = value % x
protected def %(x: scala.Double): scala.Double = value % x
}
object Character {
final val TYPE = classOf[scala.Char]
final val MIN_VALUE = '\\u0000'
final val MAX_VALUE = '\\uffff'
final val SIZE = 16
def valueOf(charValue: scala.Char): Character = new Character(charValue)
/* These are supposed to be final vals of type Byte, but that's not possible.
* So we implement them as def's, which are binary compatible with final vals.
*/
def UNASSIGNED: scala.Byte = 0
def UPPERCASE_LETTER: scala.Byte = 1
def LOWERCASE_LETTER: scala.Byte = 2
def TITLECASE_LETTER: scala.Byte = 3
def MODIFIER_LETTER: scala.Byte = 4
def OTHER_LETTER: scala.Byte = 5
def NON_SPACING_MARK: scala.Byte = 6
def ENCLOSING_MARK: scala.Byte = 7
def COMBINING_SPACING_MARK: scala.Byte = 8
def DECIMAL_DIGIT_NUMBER: scala.Byte = 9
def LETTER_NUMBER: scala.Byte = 10
def OTHER_NUMBER: scala.Byte = 11
def SPACE_SEPARATOR: scala.Byte = 12
def LINE_SEPARATOR: scala.Byte = 13
def PARAGRAPH_SEPARATOR: scala.Byte = 14
def CONTROL: scala.Byte = 15
def FORMAT: scala.Byte = 16
def PRIVATE_USE: scala.Byte = 18
def SURROGATE: scala.Byte = 19
def DASH_PUNCTUATION: scala.Byte = 20
def START_PUNCTUATION: scala.Byte = 21
def END_PUNCTUATION: scala.Byte = 22
def CONNECTOR_PUNCTUATION: scala.Byte = 23
def OTHER_PUNCTUATION: scala.Byte = 24
def MATH_SYMBOL: scala.Byte = 25
def CURRENCY_SYMBOL: scala.Byte = 26
def MODIFIER_SYMBOL: scala.Byte = 27
def OTHER_SYMBOL: scala.Byte = 28
def INITIAL_QUOTE_PUNCTUATION: scala.Byte = 29
def FINAL_QUOTE_PUNCTUATION: scala.Byte = 30
final val MIN_RADIX = 2
final val MAX_RADIX = 36
final val MIN_HIGH_SURROGATE = '\\uD800'
final val MAX_HIGH_SURROGATE = '\\uDBFF'
final val MIN_LOW_SURROGATE = '\\uDC00'
final val MAX_LOW_SURROGATE = '\\uDFFF'
final val MIN_SURROGATE = MIN_HIGH_SURROGATE
final val MAX_SURROGATE = MAX_LOW_SURROGATE
final val MIN_CODE_POINT = 0
final val MAX_CODE_POINT = 0x10ffff
final val MIN_SUPPLEMENTARY_CODE_POINT = 0x10000
def getType(ch: scala.Char): Int = getType(ch.toInt)
def getType(codePoint: Int): Int = {
if (codePoint < 0) UNASSIGNED.toInt
else if (codePoint < 256) getTypeLT256(codePoint)
else getTypeGE256(codePoint)
}
@inline
private[this] def getTypeLT256(codePoint: Int): scala.Byte =
charTypesFirst256(codePoint)
private[this] def getTypeGE256(codePoint: Int): scala.Byte = {
// the idx is increased by 1 due to the differences in indexing
// between charTypeIndices and charType
val idx = Arrays.binarySearch(charTypeIndices, codePoint) + 1
// in the case where idx is negative (-insertionPoint - 1)
charTypes(Math.abs(idx))
}
def digit(c: scala.Char, radix: Int): Int = {
if (radix > MAX_RADIX || radix < MIN_RADIX)
-1
else if (c >= '0' && c <= '9' && c - '0' < radix)
c - '0'
else if (c >= 'A' && c <= 'Z' && c - 'A' < radix - 10)
c - 'A' + 10
else if (c >= 'a' && c <= 'z' && c - 'a' < radix - 10)
c - 'a' + 10
else if (c >= '\\uFF21' && c <= '\\uFF3A' &&
c - '\\uFF21' < radix - 10)
c - '\\uFF21' + 10
else if (c >= '\\uFF41' && c <= '\\uFF5A' &&
c - '\\uFF41' < radix - 10)
c - '\\uFF21' + 10
else -1
}
// ported from https://github.com/gwtproject/gwt/blob/master/user/super/com/google/gwt/emul/java/lang/Character.java
def forDigit(digit: Int, radix: Int): Char = {
if (radix < MIN_RADIX || radix > MAX_RADIX || digit < 0 || digit >= radix) {
0
} else {
val overBaseTen = digit - 10
val result = if (overBaseTen < 0) '0' + digit else 'a' + overBaseTen
result.toChar
}
}
def isISOControl(c: scala.Char): scala.Boolean = isISOControl(c.toInt)
def isISOControl(codePoint: Int): scala.Boolean = {
(0x00 <= codePoint && codePoint <= 0x1F) || (0x7F <= codePoint && codePoint <= 0x9F)
}
@deprecated("Replaced by isWhitespace(char)", "")
def isSpace(c: scala.Char): scala.Boolean =
c == '\\t' || c == '\\n' || c == '\\f' || c == '\\r' || c == ' '
def isWhitespace(c: scala.Char): scala.Boolean =
isWhitespace(c.toInt)
def isWhitespace(codePoint: scala.Int): scala.Boolean = {
def isSeparator(tpe: Int): scala.Boolean =
tpe == SPACE_SEPARATOR || tpe == LINE_SEPARATOR || tpe == PARAGRAPH_SEPARATOR
if (codePoint < 256) {
codePoint == '\\t' || codePoint == '\\n' || codePoint == '\\u000B' ||
codePoint == '\\f' || codePoint == '\\r' ||
(codePoint <= '\\u001C' && codePoint <= '\\u001F') ||
(codePoint != '\\u00A0' && isSeparator(getTypeLT256(codePoint)))
} else {
(codePoint != '\\u2007' && codePoint != '\\u202F') &&
isSeparator(getTypeGE256(codePoint))
}
}
def isSpaceChar(ch: scala.Char): scala.Boolean =
isSpaceChar(ch.toInt)
def isSpaceChar(codePoint: Int): scala.Boolean =
isSpaceCharImpl(getType(codePoint))
@inline private[this] def isSpaceCharImpl(tpe: Int): scala.Boolean =
tpe == SPACE_SEPARATOR || tpe == LINE_SEPARATOR || tpe == PARAGRAPH_SEPARATOR
// --- UTF-16 surrogate pairs handling ---
// See http://en.wikipedia.org/wiki/UTF-16
private final val HighSurrogateMask = 0xfc00 // 111111 00 00000000
private final val HighSurrogateID = 0xd800 // 110110 00 00000000
private final val LowSurrogateMask = 0xfc00 // 111111 00 00000000
private final val LowSurrogateID = 0xdc00 // 110111 00 00000000
private final val SurrogateUsefulPartMask = 0x03ff // 000000 11 11111111
@inline def isHighSurrogate(c: scala.Char): scala.Boolean =
(c & HighSurrogateMask) == HighSurrogateID
@inline def isLowSurrogate(c: scala.Char): scala.Boolean =
(c & LowSurrogateMask) == LowSurrogateID
@inline def isSurrogatePair(high: scala.Char, low: scala.Char): scala.Boolean =
isHighSurrogate(high) && isLowSurrogate(low)
@inline def toCodePoint(high: scala.Char, low: scala.Char): Int =
((high & SurrogateUsefulPartMask) << 10) + (low & SurrogateUsefulPartMask) + 0x10000
// --- End of UTF-16 surrogate pairs handling ---
def isLowerCase(c: scala.Char): scala.Boolean =
isLowerCase(c.toInt)
def isLowerCase(c: Int): scala.Boolean = {
if (c < 256)
c == '\\u00AA' || c == '\\u00BA' || getTypeLT256(c) == LOWERCASE_LETTER
else
isLowerCaseGE256(c)
}
private[this] def isLowerCaseGE256(c: Int): scala.Boolean = {
('\\u02B0' <= c && c <= '\\u02B8') || ('\\u02C0' <= c && c <= '\\u02C1') ||
('\\u02E0' <= c && c <= '\\u02E4') || c == '\\u0345' || c == '\\u037A' ||
('\\u1D2C' <= c && c <= '\\u1D6A') || c == '\\u1D78' ||
('\\u1D9B' <= c && c <= '\\u1DBF') || c == '\\u2071' || c == '\\u207F' ||
('\\u2090' <= c && c <= '\\u209C') || ('\\u2170' <= c && c <= '\\u217F') ||
('\\u24D0' <= c && c <= '\\u24E9') || ('\\u2C7C' <= c && c <= '\\u2C7D') ||
c == '\\uA770' || ('\\uA7F8' <= c && c <= '\\uA7F9') ||
getTypeGE256(c) == LOWERCASE_LETTER
}
def isUpperCase(c: scala.Char): scala.Boolean =
isUpperCase(c.toInt)
def isUpperCase(c: Int): scala.Boolean = {
('\\u2160' <= c && c <= '\\u216F') || ('\\u24B6' <= c && c <= '\\u24CF') ||
getType(c) == UPPERCASE_LETTER
}
@inline def isValidCodePoint(codePoint: Int): scala.Boolean =
codePoint >= MIN_CODE_POINT && codePoint <= MAX_CODE_POINT
@inline def isBmpCodePoint(codePoint: Int): scala.Boolean =
codePoint >= MIN_VALUE && codePoint <= MAX_VALUE
@inline def isSupplementaryCodePoint(codePoint: Int): scala.Boolean =
codePoint >= MIN_SUPPLEMENTARY_CODE_POINT && codePoint <= MAX_CODE_POINT
def isTitleCase(c: scala.Char): scala.Boolean =
isTitleCase(c.toInt)
def isTitleCase(cp: Int): scala.Boolean =
if (cp < 256) false
else isTitleCaseImpl(getTypeGE256(cp))
@inline private[this] def isTitleCaseImpl(tpe: Int): scala.Boolean =
tpe == TITLECASE_LETTER
def isDigit(c: scala.Char): scala.Boolean =
isDigit(c.toInt)
def isDigit(cp: Int): scala.Boolean =
if (cp < 256) '0' <= cp && cp <= '9'
else isDigitImpl(getTypeGE256(cp))
@inline private[this] def isDigitImpl(tpe: Int): scala.Boolean =
tpe == DECIMAL_DIGIT_NUMBER
def isDefined(c: scala.Char): scala.Boolean =
isDefined(c.toInt)
def isDefined(c: scala.Int): scala.Boolean = {
if (c < 0) false
else if (c < 888) true
else getTypeGE256(c) != UNASSIGNED
}
def isLetter(c: scala.Char): scala.Boolean = isLetter(c.toInt)
def isLetter(cp: Int): scala.Boolean = isLetterImpl(getType(cp))
@inline private[this] def isLetterImpl(tpe: Int): scala.Boolean = {
tpe == UPPERCASE_LETTER || tpe == LOWERCASE_LETTER ||
tpe == TITLECASE_LETTER || tpe == MODIFIER_LETTER || tpe == OTHER_LETTER
}
def isLetterOrDigit(c: scala.Char): scala.Boolean =
isLetterOrDigit(c.toInt)
def isLetterOrDigit(cp: Int): scala.Boolean =
isLetterOrDigitImpl(getType(cp))
@inline private[this] def isLetterOrDigitImpl(tpe: Int): scala.Boolean =
isDigitImpl(tpe) || isLetterImpl(tpe)
def isJavaLetter(ch: scala.Char): scala.Boolean = isJavaLetterImpl(getType(ch))
@inline private[this] def isJavaLetterImpl(tpe: Int): scala.Boolean = {
isLetterImpl(tpe) || tpe == LETTER_NUMBER || tpe == CURRENCY_SYMBOL ||
tpe == CONNECTOR_PUNCTUATION
}
def isJavaLetterOrDigit(ch: scala.Char): scala.Boolean =
isJavaLetterOrDigitImpl(ch, getType(ch))
@inline private[this] def isJavaLetterOrDigitImpl(codePoint: Int,
tpe: Int): scala.Boolean = {
isJavaLetterImpl(tpe) || tpe == COMBINING_SPACING_MARK ||
tpe == NON_SPACING_MARK || isIdentifierIgnorableImpl(codePoint, tpe)
}
def isAlphabetic(codePoint: Int): scala.Boolean = {
val tpe = getType(codePoint)
tpe == UPPERCASE_LETTER || tpe == LOWERCASE_LETTER ||
tpe == TITLECASE_LETTER || tpe == MODIFIER_LETTER ||
tpe == OTHER_LETTER || tpe == LETTER_NUMBER
}
def isIdeographic(c: Int): scala.Boolean = {
(12294 <= c && c <= 12295) || (12321 <= c && c <= 12329) ||
(12344 <= c && c <= 12346) || (13312 <= c && c <= 19893) ||
(19968 <= c && c <= 40908) || (63744 <= c && c <= 64109) ||
(64112 <= c && c <= 64217) || (131072 <= c && c <= 173782) ||
(173824 <= c && c <= 177972) || (177984 <= c && c <= 178205) ||
(194560 <= c && c <= 195101)
}
def isJavaIdentifierStart(ch: scala.Char): scala.Boolean =
isJavaIdentifierStart(ch.toInt)
def isJavaIdentifierStart(codePoint: Int): scala.Boolean =
isJavaIdentifierStartImpl(getType(codePoint))
@inline
private[this] def isJavaIdentifierStartImpl(tpe: Int): scala.Boolean = {
isLetterImpl(tpe) || tpe == LETTER_NUMBER || tpe == CURRENCY_SYMBOL ||
tpe == CONNECTOR_PUNCTUATION
}
def isJavaIdentifierPart(ch: scala.Char): scala.Boolean =
isJavaIdentifierPart(ch.toInt)
def isJavaIdentifierPart(codePoint: Int): scala.Boolean =
isJavaIdentifierPartImpl(codePoint, getType(codePoint))
@inline private[this] def isJavaIdentifierPartImpl(codePoint: Int,
tpe: Int): scala.Boolean = {
isLetterImpl(tpe) || tpe == CURRENCY_SYMBOL ||
tpe == CONNECTOR_PUNCTUATION || tpe == DECIMAL_DIGIT_NUMBER ||
tpe == LETTER_NUMBER || tpe == COMBINING_SPACING_MARK ||
tpe == NON_SPACING_MARK || isIdentifierIgnorableImpl(codePoint, tpe)
}
def isUnicodeIdentifierStart(ch: scala.Char): scala.Boolean =
isUnicodeIdentifierStart(ch.toInt)
def isUnicodeIdentifierStart(codePoint: Int): scala.Boolean =
isUnicodeIdentifierStartImpl(getType(codePoint))
@inline
private[this] def isUnicodeIdentifierStartImpl(tpe: Int): scala.Boolean =
isLetterImpl(tpe) || tpe == LETTER_NUMBER
def isUnicodeIdentifierPart(ch: scala.Char): scala.Boolean =
isUnicodeIdentifierPart(ch.toInt)
def isUnicodeIdentifierPart(codePoint: Int): scala.Boolean =
isUnicodeIdentifierPartImpl(codePoint, getType(codePoint))
def isUnicodeIdentifierPartImpl(codePoint: Int,
tpe: Int): scala.Boolean = {
tpe == CONNECTOR_PUNCTUATION || tpe == DECIMAL_DIGIT_NUMBER ||
tpe == COMBINING_SPACING_MARK || tpe == NON_SPACING_MARK ||
isUnicodeIdentifierStartImpl(tpe) ||
isIdentifierIgnorableImpl(codePoint, tpe)
}
def isIdentifierIgnorable(c: scala.Char): scala.Boolean =
isIdentifierIgnorable(c.toInt)
def isIdentifierIgnorable(codePoint: Int): scala.Boolean =
isIdentifierIgnorableImpl(codePoint, getType(codePoint))
@inline private[this] def isIdentifierIgnorableImpl(codePoint: Int,
tpe: Int): scala.Boolean = {
('\\u0000' <= codePoint && codePoint <= '\\u0008') ||
('\\u000E' <= codePoint && codePoint <= '\\u001B') ||
('\\u007F' <= codePoint && codePoint <= '\\u009F') ||
tpe == FORMAT
}
def isMirrored(c: scala.Char): scala.Boolean =
isMirrored(c.toInt)
def isMirrored(codePoint: Int): scala.Boolean = {
val idx = Arrays.binarySearch(isMirroredIndices, codePoint) + 1
(Math.abs(idx) & 1) != 0
}
//def getDirectionality(c: scala.Char): scala.Byte
/* Conversions */
def toUpperCase(c: scala.Char): scala.Char = c.toString.toUpperCase()(0)
def toLowerCase(c: scala.Char): scala.Char = c.toString.toLowerCase()(0)
//def toTitleCase(c: scala.Char): scala.Char
//def getNumericValue(c: scala.Char): Int
/* Misc */
//def reverseBytes(ch: scala.Char): scala.Char
def toChars(codePoint: Int): Array[Char] = {
if (!isValidCodePoint(codePoint))
throw new IllegalArgumentException()
if (isSupplementaryCodePoint(codePoint)) {
val cpPrime = codePoint - 0x10000
val high = 0xD800 | ((cpPrime >> 10) & 0x3FF)
val low = 0xDC00 | (cpPrime & 0x3FF)
Array(high.toChar, low.toChar)
} else {
Array(codePoint.toChar)
}
}
@inline def toString(c: scala.Char): String =
js.Dynamic.global.String.fromCharCode(c.toInt).asInstanceOf[String]
@inline def compare(x: scala.Char, y: scala.Char): Int =
x - y
// Based on Unicode 7.0.0
// Types of characters from 0 to 255
private[this] lazy val charTypesFirst256 = Array[scala.Byte](15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 24, 24, 24, 26, 24, 24, 24,
21, 22, 24, 25, 24, 20, 24, 24, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 24, 24, 25,
25, 25, 24, 24, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 21, 24, 22, 27, 23, 27, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 21, 25, 22, 25, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 24, 26, 26, 26,
26, 28, 24, 27, 28, 5, 29, 25, 16, 28, 27, 28, 25, 11, 11, 27, 2, 24, 24,
27, 11, 5, 30, 11, 11, 11, 24, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 25, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 25, 2, 2, 2, 2, 2, 2,
2, 2)
// Character type data by ranges of types
// charTypeIndices: contains the index where the range ends
// charType: contains the type of the carater in the range ends
// note that charTypeIndices.length + 1 = charType.length and that the
// range 0 to 255 is not included because it is contained in charTypesFirst256
// They where generated with the following script:
//
// val indicesAndTypes = (256 to Character.MAX_CODE_POINT)
// .map(i => (i, Character.getType(i)))
// .foldLeft[List[(Int, Int)]](Nil) {
// case (x :: xs, elem) if x._2 == elem._2 => x :: xs
// case (prevs, elem) => elem :: prevs
// }.reverse
// val charTypeIndices = indicesAndTypes.map(_._1).tail
// val charTypeIndicesDeltas = charTypeIndices.zip(0 :: charTypeIndices.init)
// .map(tup => tup._1 - tup._2)
// val charTypes = indicesAndTypes.map(_._2)
// println(charTypeIndicesDeltas.mkString(
// "charTypeIndices: val deltas = Array[Int](", ", ", ")"))
// println(charTypes.mkString("val charTypes = Array[scala.Byte](", ", ", ")"))
//
private[this] lazy val charTypeIndices = {
val deltas = Array[Int](257, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 3, 2, 1, 1, 1, 2, 1, 3,
2, 4, 1, 2, 1, 3, 3, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1,
3, 1, 1, 1, 2, 2, 1, 1, 3, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 7, 2, 1, 2, 2, 1, 1, 4, 1, 1, 1, 1, 1, 1, 1, 1, 69, 1, 27, 18,
4, 12, 14, 5, 7, 1, 1, 1, 17, 112, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1,
5, 2, 1, 1, 3, 1, 1, 1, 2, 1, 17, 1, 9, 35, 1, 2, 3, 3, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 1, 1, 1,
1, 2, 2, 51, 48, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 2, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 38, 2, 1, 6, 1, 39, 1, 1,
1, 4, 1, 1, 45, 1, 1, 1, 2, 1, 2, 1, 1, 8, 27, 5, 3, 2, 11, 5, 1, 3,
2, 1, 2, 2, 11, 1, 2, 2, 32, 1, 10, 21, 10, 4, 2, 1, 99, 1, 1, 7, 1,
1, 6, 2, 2, 1, 4, 2, 10, 3, 2, 1, 14, 1, 1, 1, 1, 30, 27, 2, 89, 11,
1, 14, 10, 33, 9, 2, 1, 3, 1, 5, 22, 4, 1, 9, 1, 3, 1, 5, 2, 15, 1,
25, 3, 2, 1, 65, 1, 1, 11, 55, 27, 1, 3, 1, 54, 1, 1, 1, 1, 3, 8, 4,
1, 2, 1, 7, 10, 2, 2, 10, 1, 1, 6, 1, 7, 1, 1, 2, 1, 8, 2, 2, 2, 22,
1, 7, 1, 1, 3, 4, 2, 1, 1, 3, 4, 2, 2, 2, 2, 1, 1, 8, 1, 4, 2, 1, 3,
2, 2, 10, 2, 2, 6, 1, 1, 5, 2, 1, 1, 6, 4, 2, 2, 22, 1, 7, 1, 2, 1, 2,
1, 2, 2, 1, 1, 3, 2, 4, 2, 2, 3, 3, 1, 7, 4, 1, 1, 7, 10, 2, 3, 1, 11,
2, 1, 1, 9, 1, 3, 1, 22, 1, 7, 1, 2, 1, 5, 2, 1, 1, 3, 5, 1, 2, 1, 1,
2, 1, 2, 1, 15, 2, 2, 2, 10, 1, 1, 15, 1, 2, 1, 8, 2, 2, 2, 22, 1, 7,
1, 2, 1, 5, 2, 1, 1, 1, 1, 1, 4, 2, 2, 2, 2, 1, 8, 1, 1, 4, 2, 1, 3,
2, 2, 10, 1, 1, 6, 10, 1, 1, 1, 6, 3, 3, 1, 4, 3, 2, 1, 1, 1, 2, 3, 2,
3, 3, 3, 12, 4, 2, 1, 2, 3, 3, 1, 3, 1, 2, 1, 6, 1, 14, 10, 3, 6, 1,
1, 6, 3, 1, 8, 1, 3, 1, 23, 1, 10, 1, 5, 3, 1, 3, 4, 1, 3, 1, 4, 7, 2,
1, 2, 6, 2, 2, 2, 10, 8, 7, 1, 2, 2, 1, 8, 1, 3, 1, 23, 1, 10, 1, 5,
2, 1, 1, 1, 1, 5, 1, 1, 2, 1, 2, 2, 7, 2, 7, 1, 1, 2, 2, 2, 10, 1, 2,
15, 2, 1, 8, 1, 3, 1, 41, 2, 1, 3, 4, 1, 3, 1, 3, 1, 1, 8, 1, 8, 2, 2,
2, 10, 6, 3, 1, 6, 2, 2, 1, 18, 3, 24, 1, 9, 1, 1, 2, 7, 3, 1, 4, 3,
3, 1, 1, 1, 8, 18, 2, 1, 12, 48, 1, 2, 7, 4, 1, 6, 1, 8, 1, 10, 2, 37,
2, 1, 1, 2, 2, 1, 1, 2, 1, 6, 4, 1, 7, 1, 3, 1, 1, 1, 1, 2, 2, 1, 4,
1, 2, 6, 1, 2, 1, 2, 5, 1, 1, 1, 6, 2, 10, 2, 4, 32, 1, 3, 15, 1, 1,
3, 2, 6, 10, 10, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 8, 1, 36, 4, 14, 1,
5, 1, 2, 5, 11, 1, 36, 1, 8, 1, 6, 1, 2, 5, 4, 2, 37, 43, 2, 4, 1, 6,
1, 2, 2, 2, 1, 10, 6, 6, 2, 2, 4, 3, 1, 3, 2, 7, 3, 4, 13, 1, 2, 2, 6,
1, 1, 1, 10, 3, 1, 2, 38, 1, 1, 5, 1, 2, 43, 1, 1, 332, 1, 4, 2, 7, 1,
1, 1, 4, 2, 41, 1, 4, 2, 33, 1, 4, 2, 7, 1, 1, 1, 4, 2, 15, 1, 57, 1,
4, 2, 67, 2, 3, 9, 20, 3, 16, 10, 6, 85, 11, 1, 620, 2, 17, 1, 26, 1,
1, 3, 75, 3, 3, 15, 13, 1, 4, 3, 11, 18, 3, 2, 9, 18, 2, 12, 13, 1, 3,
1, 2, 12, 52, 2, 1, 7, 8, 1, 2, 11, 3, 1, 3, 1, 1, 1, 2, 10, 6, 10, 6,
6, 1, 4, 3, 1, 1, 10, 6, 35, 1, 52, 8, 41, 1, 1, 5, 70, 10, 29, 3, 3,
4, 2, 3, 4, 2, 1, 6, 3, 4, 1, 3, 2, 10, 30, 2, 5, 11, 44, 4, 17, 7, 2,
6, 10, 1, 3, 34, 23, 2, 3, 2, 2, 53, 1, 1, 1, 7, 1, 1, 1, 1, 2, 8, 6,
10, 2, 1, 10, 6, 10, 6, 7, 1, 6, 82, 4, 1, 47, 1, 1, 5, 1, 1, 5, 1, 2,
7, 4, 10, 7, 10, 9, 9, 3, 2, 1, 30, 1, 4, 2, 2, 1, 1, 2, 2, 10, 44, 1,
1, 2, 3, 1, 1, 3, 2, 8, 4, 36, 8, 8, 2, 2, 3, 5, 10, 3, 3, 10, 30, 6,
2, 64, 8, 8, 3, 1, 13, 1, 7, 4, 1, 4, 2, 1, 2, 9, 44, 63, 13, 1, 34,
37, 39, 21, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9,
8, 6, 2, 6, 2, 8, 8, 8, 8, 6, 2, 6, 2, 8, 1, 1, 1, 1, 1, 1, 1, 1, 8,
8, 14, 2, 8, 8, 8, 8, 8, 8, 5, 1, 2, 4, 1, 1, 1, 3, 3, 1, 2, 4, 1, 3,
4, 2, 2, 4, 1, 3, 8, 5, 3, 2, 3, 1, 2, 4, 1, 2, 1, 11, 5, 6, 2, 1, 1,
1, 2, 1, 1, 1, 8, 1, 1, 5, 1, 9, 1, 1, 4, 2, 3, 1, 1, 1, 11, 1, 1, 1,
10, 1, 5, 5, 6, 1, 1, 2, 6, 3, 1, 1, 1, 10, 3, 1, 1, 1, 13, 3, 27, 21,
13, 4, 1, 3, 12, 15, 2, 1, 4, 1, 2, 1, 3, 2, 3, 1, 1, 1, 2, 1, 5, 6,
1, 1, 1, 1, 1, 1, 4, 1, 1, 4, 1, 4, 1, 2, 2, 2, 5, 1, 4, 1, 1, 2, 1,
1, 16, 35, 1, 1, 4, 1, 6, 5, 5, 2, 4, 1, 2, 1, 2, 1, 7, 1, 31, 2, 2,
1, 1, 1, 31, 268, 8, 4, 20, 2, 7, 1, 1, 81, 1, 30, 25, 40, 6, 18, 12,
39, 25, 11, 21, 60, 78, 22, 183, 1, 9, 1, 54, 8, 111, 1, 144, 1, 103,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 30, 44, 5, 1, 1, 31, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 16, 256, 131, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 63, 1, 1, 1, 1, 32, 1, 1, 258, 48,
21, 2, 6, 3, 10, 166, 47, 1, 47, 1, 1, 1, 3, 2, 1, 1, 1, 1, 1, 1, 4,
1, 1, 2, 1, 6, 2, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 1, 1, 1, 3, 1, 1, 5,
4, 1, 2, 38, 1, 1, 5, 1, 2, 56, 7, 1, 1, 14, 1, 23, 9, 7, 1, 7, 1, 7,
1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 32, 2, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1,
9, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 10, 2, 68,
26, 1, 89, 12, 214, 26, 12, 4, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 9, 4, 2, 1, 5, 2, 3,
1, 1, 1, 2, 1, 86, 2, 2, 2, 2, 1, 1, 90, 1, 3, 1, 5, 41, 3, 94, 1, 2,
4, 10, 27, 5, 36, 12, 16, 31, 1, 10, 30, 8, 1, 15, 32, 10, 39, 15, 63,
1, 256, 6582, 10, 64, 20941, 51, 21, 1, 1143, 3, 55, 9, 40, 6, 2, 268,
1, 3, 16, 10, 2, 20, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 10, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 7, 1, 70, 10, 2, 6, 8,
23, 9, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 8, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 77, 2, 1, 7, 1, 3, 1, 4, 1, 23, 2, 2, 1, 4, 4, 6, 2, 1, 1, 6,
52, 4, 8, 2, 50, 16, 1, 9, 2, 10, 6, 18, 6, 3, 1, 4, 10, 28, 8, 2, 23,
11, 2, 11, 1, 29, 3, 3, 1, 47, 1, 2, 4, 2, 1, 4, 13, 1, 1, 10, 4, 2,
32, 41, 6, 2, 2, 2, 2, 9, 3, 1, 8, 1, 1, 2, 10, 2, 4, 16, 1, 6, 3, 1,
1, 4, 48, 1, 1, 3, 2, 2, 5, 2, 1, 1, 1, 24, 2, 1, 2, 11, 1, 2, 2, 2,
1, 2, 1, 1, 10, 6, 2, 6, 2, 6, 9, 7, 1, 7, 145, 35, 2, 1, 2, 1, 2, 1,
1, 1, 2, 10, 6, 11172, 12, 23, 4, 49, 4, 2048, 6400, 366, 2, 106, 38,
7, 12, 5, 5, 1, 1, 10, 1, 13, 1, 5, 1, 1, 1, 2, 1, 2, 1, 108, 16, 17,
363, 1, 1, 16, 64, 2, 54, 40, 12, 1, 1, 2, 16, 7, 1, 1, 1, 6, 7, 9, 1,
2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 4, 3,
3, 1, 4, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 3, 1, 1, 1, 2, 4, 5, 1, 135, 2,
1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 2, 10, 2, 3, 2, 26, 1, 1, 1, 1, 1, 1,
26, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 10, 1, 45, 2, 31, 3, 6, 2, 6, 2, 6,
2, 3, 3, 2, 1, 1, 1, 2, 1, 1, 4, 2, 10, 3, 2, 2, 12, 1, 26, 1, 19, 1,
2, 1, 15, 2, 14, 34, 123, 5, 3, 4, 45, 3, 9, 53, 4, 17, 1, 5, 12, 52,
45, 1, 130, 29, 3, 49, 47, 31, 1, 4, 12, 17, 1, 8, 1, 53, 30, 1, 1,
36, 4, 8, 1, 5, 42, 40, 40, 78, 2, 10, 854, 6, 2, 1, 1, 44, 1, 2, 3,
1, 2, 23, 1, 1, 8, 160, 22, 6, 3, 1, 26, 5, 1, 64, 56, 6, 2, 64, 1, 3,
1, 2, 5, 4, 4, 1, 3, 1, 27, 4, 3, 4, 1, 8, 8, 9, 7, 29, 2, 1, 128, 54,
3, 7, 22, 2, 8, 19, 5, 8, 128, 73, 535, 31, 385, 1, 1, 1, 53, 15, 7,
4, 20, 10, 16, 2, 1, 45, 3, 4, 2, 2, 2, 1, 4, 14, 25, 7, 10, 6, 3, 36,
5, 1, 8, 1, 10, 4, 60, 2, 1, 48, 3, 9, 2, 4, 4, 7, 10, 1190, 43, 1, 1,
1, 2, 6, 1, 1, 8, 10, 2358, 879, 145, 99, 13, 4, 2956, 1071, 13265,
569, 1223, 69, 11, 1, 46, 16, 4, 13, 16480, 2, 8190, 246, 10, 39, 2,
60, 2, 3, 3, 6, 8, 8, 2, 7, 30, 4, 48, 34, 66, 3, 1, 186, 87, 9, 18,
142, 26, 26, 26, 7, 1, 18, 26, 26, 1, 1, 2, 2, 1, 2, 2, 2, 4, 1, 8, 4,
1, 1, 1, 7, 1, 11, 26, 26, 2, 1, 4, 2, 8, 1, 7, 1, 26, 2, 1, 4, 1, 5,
1, 1, 3, 7, 1, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 28, 2,
25, 1, 25, 1, 6, 25, 1, 25, 1, 6, 25, 1, 25, 1, 6, 25, 1, 25, 1, 6,
25, 1, 25, 1, 6, 1, 1, 2, 50, 5632, 4, 1, 27, 1, 2, 1, 1, 2, 1, 1, 10,
1, 4, 1, 1, 1, 1, 6, 1, 4, 1, 1, 1, 1, 1, 1, 3, 1, 2, 1, 1, 2, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 4, 1, 7, 1, 4, 1, 4, 1, 1, 1, 10,
1, 17, 5, 3, 1, 5, 1, 17, 52, 2, 270, 44, 4, 100, 12, 15, 2, 14, 2,
15, 1, 15, 32, 11, 5, 31, 1, 60, 4, 43, 75, 29, 13, 43, 5, 9, 7, 2,
174, 33, 15, 6, 1, 70, 3, 20, 12, 37, 1, 5, 21, 17, 15, 63, 1, 1, 1,
182, 1, 4, 3, 62, 2, 4, 12, 24, 147, 70, 4, 11, 48, 70, 58, 116, 2188,
42711, 41, 4149, 11, 222, 16354, 542, 722403, 1, 30, 96, 128, 240,
65040, 65534, 2, 65534)
uncompressDeltas(deltas)
}
private[this] lazy val charTypes = Array[scala.Byte](1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 5, 1, 2, 5, 1, 3, 2,
1, 3, 2, 1, 3, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 5, 2, 4, 27, 4, 27, 4, 27, 4, 27, 4, 27, 6, 1, 2, 1,
2, 4, 27, 1, 2, 0, 4, 2, 24, 0, 27, 1, 24, 1, 0, 1, 0, 1, 2, 1, 0, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 25, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 28, 6, 7, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 0, 1, 0, 4, 24, 0, 2, 0, 24, 20, 0, 26, 0, 6, 20, 6, 24, 6, 24, 6,
24, 6, 0, 5, 0, 5, 24, 0, 16, 0, 25, 24, 26, 24, 28, 6, 24, 0, 24, 5,
4, 5, 6, 9, 24, 5, 6, 5, 24, 5, 6, 16, 28, 6, 4, 6, 28, 6, 5, 9, 5,
28, 5, 24, 0, 16, 5, 6, 5, 6, 0, 5, 6, 5, 0, 9, 5, 6, 4, 28, 24, 4, 0,
5, 6, 4, 6, 4, 6, 4, 6, 0, 24, 0, 5, 6, 0, 24, 0, 5, 0, 5, 0, 6, 0, 6,
8, 5, 6, 8, 6, 5, 8, 6, 8, 6, 8, 5, 6, 5, 6, 24, 9, 24, 4, 5, 0, 5, 0,
6, 8, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 5, 8, 6, 0, 8, 0, 8,
6, 5, 0, 8, 0, 5, 0, 5, 6, 0, 9, 5, 26, 11, 28, 26, 0, 6, 8, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 0, 8, 6, 0, 6, 0, 6, 0, 6, 0,
5, 0, 5, 0, 9, 6, 5, 6, 0, 6, 8, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 6, 5, 8, 6, 0, 6, 8, 0, 8, 6, 0, 5, 0, 5, 6, 0, 9, 24, 26, 0, 6, 8,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 5, 8, 6, 8, 6, 0, 8, 0, 8,
6, 0, 6, 8, 0, 5, 0, 5, 6, 0, 9, 28, 5, 11, 0, 6, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 8, 6, 8, 0, 8, 0, 8, 6, 0, 5,
0, 8, 0, 9, 11, 28, 26, 28, 0, 8, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
6, 8, 0, 6, 0, 6, 0, 6, 0, 5, 0, 5, 6, 0, 9, 0, 11, 28, 0, 8, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 6, 5, 8, 6, 8, 0, 6, 8, 0, 8, 6, 0, 8, 0, 5,
0, 5, 6, 0, 9, 0, 5, 0, 8, 0, 5, 0, 5, 0, 5, 0, 5, 8, 6, 0, 8, 0, 8,
6, 5, 0, 8, 0, 5, 6, 0, 9, 11, 0, 28, 5, 0, 8, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 6, 0, 8, 6, 0, 6, 0, 8, 0, 8, 24, 0, 5, 6, 5, 6, 0, 26, 5, 4,
6, 24, 9, 24, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0,
5, 0, 5, 0, 5, 6, 5, 6, 0, 6, 5, 0, 5, 0, 4, 0, 6, 0, 9, 0, 5, 0, 5,
28, 24, 28, 24, 28, 6, 28, 9, 11, 28, 6, 28, 6, 28, 6, 21, 22, 21, 22,
8, 5, 0, 5, 0, 6, 8, 6, 24, 6, 5, 6, 0, 6, 0, 28, 6, 28, 0, 28, 24,
28, 24, 0, 5, 8, 6, 8, 6, 8, 6, 8, 6, 5, 9, 24, 5, 8, 6, 5, 6, 5, 8,
5, 8, 5, 6, 5, 6, 8, 6, 8, 6, 5, 8, 9, 8, 6, 28, 1, 0, 1, 0, 1, 0, 5,
24, 4, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 24, 11, 0, 5, 28, 0, 5, 0, 20, 5,
24, 5, 12, 5, 21, 22, 0, 5, 24, 10, 0, 5, 0, 5, 6, 0, 5, 6, 24, 0, 5,
6, 0, 5, 0, 5, 0, 6, 0, 5, 6, 8, 6, 8, 6, 8, 6, 24, 4, 24, 26, 5, 6,
0, 9, 0, 11, 0, 24, 20, 24, 6, 12, 0, 9, 0, 5, 4, 5, 0, 5, 6, 5, 0, 5,
0, 5, 0, 6, 8, 6, 8, 0, 8, 6, 8, 6, 0, 28, 0, 24, 9, 5, 0, 5, 0, 5, 0,
8, 5, 8, 0, 9, 11, 0, 28, 5, 6, 8, 0, 24, 5, 8, 6, 8, 6, 0, 6, 8, 6,
8, 6, 8, 6, 0, 6, 9, 0, 9, 0, 24, 4, 24, 0, 6, 8, 5, 6, 8, 6, 8, 6, 8,
6, 8, 5, 0, 9, 24, 28, 6, 28, 0, 6, 8, 5, 8, 6, 8, 6, 8, 6, 8, 5, 9,
5, 6, 8, 6, 8, 6, 8, 6, 8, 0, 24, 5, 8, 6, 8, 6, 0, 24, 9, 0, 5, 9, 5,
4, 24, 0, 24, 0, 6, 24, 6, 8, 6, 5, 6, 5, 8, 6, 5, 0, 2, 4, 2, 4, 2,
4, 6, 0, 6, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 0, 1, 0, 2, 1, 2, 1, 2, 0, 1, 0, 2, 0, 1, 0, 1, 0, 1, 0, 1, 2, 1,
2, 0, 2, 3, 2, 3, 2, 3, 2, 0, 2, 1, 3, 27, 2, 27, 2, 0, 2, 1, 3, 27,
2, 0, 2, 1, 0, 27, 2, 1, 27, 0, 2, 0, 2, 1, 3, 27, 0, 12, 16, 20, 24,
29, 30, 21, 29, 30, 21, 29, 24, 13, 14, 16, 12, 24, 29, 30, 24, 23,
24, 25, 21, 22, 24, 25, 24, 23, 24, 12, 16, 0, 16, 11, 4, 0, 11, 25,
21, 22, 4, 11, 25, 21, 22, 0, 4, 0, 26, 0, 6, 7, 6, 7, 6, 0, 28, 1,
28, 1, 28, 2, 1, 2, 1, 2, 28, 1, 28, 25, 1, 28, 1, 28, 1, 28, 1, 28,
1, 28, 2, 1, 2, 5, 2, 28, 2, 1, 25, 1, 2, 28, 25, 28, 2, 28, 11, 10,
1, 2, 10, 11, 0, 25, 28, 25, 28, 25, 28, 25, 28, 25, 28, 25, 28, 25,
28, 25, 28, 25, 28, 25, 28, 25, 28, 25, 28, 21, 22, 28, 25, 28, 25,
28, 25, 28, 0, 28, 0, 28, 0, 11, 28, 11, 28, 25, 28, 25, 28, 25, 28,
25, 28, 0, 28, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22,
11, 28, 25, 21, 22, 25, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 25,
28, 25, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21,
22, 21, 22, 21, 22, 21, 22, 25, 21, 22, 21, 22, 25, 21, 22, 25, 28,
25, 28, 25, 0, 28, 0, 1, 0, 2, 0, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 4, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 28, 1, 2, 1, 2, 6, 1, 2, 0, 24,
11, 24, 2, 0, 2, 0, 2, 0, 5, 0, 4, 24, 0, 6, 5, 0, 5, 0, 5, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 24, 29, 30, 29, 30, 24, 29, 30, 24,
29, 30, 24, 20, 24, 20, 24, 29, 30, 24, 29, 30, 21, 22, 21, 22, 21,
22, 21, 22, 24, 4, 24, 20, 0, 28, 0, 28, 0, 28, 0, 28, 0, 12, 24, 28,
4, 5, 10, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 28, 21, 22, 21, 22,
21, 22, 21, 22, 20, 21, 22, 28, 10, 6, 8, 20, 4, 28, 10, 4, 5, 24, 28,
0, 5, 0, 6, 27, 4, 5, 20, 5, 24, 4, 5, 0, 5, 0, 5, 0, 28, 11, 28, 5,
0, 28, 0, 5, 28, 0, 11, 28, 11, 28, 11, 28, 11, 28, 11, 28, 0, 28, 5,
0, 28, 5, 0, 5, 4, 5, 0, 28, 0, 5, 4, 24, 5, 4, 24, 5, 9, 5, 0, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 5, 6,
7, 24, 6, 24, 4, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 0, 6, 5, 10, 6, 24, 0, 27, 4, 27, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 4, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 4, 27, 1, 2, 1, 2,
0, 1, 2, 1, 2, 0, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 0, 4, 2, 5, 6, 5,
6, 5, 6, 5, 8, 6, 8, 28, 0, 11, 28, 26, 28, 0, 5, 24, 0, 8, 5, 8, 6,
0, 24, 9, 0, 6, 5, 24, 5, 0, 9, 5, 6, 24, 5, 6, 8, 0, 24, 5, 0, 6, 8,
5, 6, 8, 6, 8, 6, 8, 24, 0, 4, 9, 0, 24, 0, 5, 6, 8, 6, 8, 6, 0, 5, 6,
5, 6, 8, 0, 9, 0, 24, 5, 4, 5, 28, 5, 8, 0, 5, 6, 5, 6, 5, 6, 5, 6, 5,
6, 5, 0, 5, 4, 24, 5, 8, 6, 8, 24, 5, 4, 8, 6, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 8, 6, 8, 6, 8, 24, 8, 6, 0, 9, 0, 5, 0, 5, 0, 5, 0, 19,
18, 5, 0, 5, 0, 2, 0, 2, 0, 5, 6, 5, 25, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0,
5, 27, 0, 5, 21, 22, 0, 5, 0, 5, 0, 5, 26, 28, 0, 6, 24, 21, 22, 24,
0, 6, 0, 24, 20, 23, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22,
21, 22, 21, 22, 24, 21, 22, 24, 23, 24, 0, 24, 20, 21, 22, 21, 22, 21,
22, 24, 25, 20, 25, 0, 24, 26, 24, 0, 5, 0, 5, 0, 16, 0, 24, 26, 24,
21, 22, 24, 25, 24, 20, 24, 9, 24, 25, 24, 1, 21, 24, 22, 27, 23, 27,
2, 21, 25, 22, 25, 21, 22, 24, 21, 22, 24, 5, 4, 5, 4, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 26, 25, 27, 28, 26, 0, 28, 25, 28, 0, 16, 28, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 24, 0, 11, 0, 28, 10, 11, 28, 11,
0, 28, 0, 28, 6, 0, 5, 0, 5, 0, 5, 0, 11, 0, 5, 10, 5, 10, 0, 5, 0,
24, 5, 0, 5, 24, 10, 0, 1, 2, 5, 0, 9, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 24, 11, 0, 5, 11, 0, 24, 5, 0, 24, 0, 5, 0, 5, 0, 5, 6, 0, 6,
0, 6, 5, 0, 5, 0, 5, 0, 6, 0, 6, 11, 0, 24, 0, 5, 11, 24, 0, 5, 0, 24,
5, 0, 11, 5, 0, 11, 0, 5, 0, 11, 0, 8, 6, 8, 5, 6, 24, 0, 11, 9, 0, 6,
8, 5, 8, 6, 8, 6, 24, 16, 24, 0, 5, 0, 9, 0, 6, 5, 6, 8, 6, 0, 9, 24,
0, 6, 8, 5, 8, 6, 8, 5, 24, 0, 9, 0, 5, 6, 8, 6, 8, 6, 8, 6, 0, 9, 0,
5, 0, 10, 0, 24, 0, 5, 0, 5, 0, 5, 0, 5, 8, 0, 6, 4, 0, 5, 0, 28, 0,
28, 0, 28, 8, 6, 28, 8, 16, 6, 28, 6, 28, 6, 28, 0, 28, 6, 28, 0, 28,
0, 11, 0, 1, 2, 1, 2, 0, 2, 1, 2, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 2,
0, 2, 0, 2, 0, 2, 1, 2, 1, 0, 1, 0, 1, 0, 1, 0, 2, 1, 0, 1, 0, 1, 0,
1, 0, 1, 0, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 0, 1, 25, 2, 25, 2,
1, 25, 2, 25, 2, 1, 25, 2, 25, 2, 1, 25, 2, 25, 2, 1, 25, 2, 25, 2, 1,
2, 0, 9, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 25, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 11, 0, 28, 0, 28,
0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28,
0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28,
0, 28, 0, 28, 0, 28, 0, 5, 0, 5, 0, 5, 0, 5, 0, 16, 0, 16, 0, 6, 0,
18, 0, 18, 0)
// Indices representing the start of ranges of codePoint that have the same
// `isMirrored` result. It is true for the first range
// (i.e. isMirrored(40)==true, isMirrored(41)==true, isMirrored(42)==false)
// They where generated with the following script:
//
// val indicesAndRes = (0 to Character.MAX_CODE_POINT)
// .map(i => (i, Character.isMirrored(i))).foldLeft[List[(Int, Boolean)]](Nil) {
// case (x :: xs, elem) if x._2 == elem._2 => x :: xs
// case (prevs, elem) => elem :: prevs
// }.reverse
// val isMirroredIndices = indicesAndRes.map(_._1).tail
// val isMirroredIndicesDeltas = isMirroredIndices.zip(
// 0 :: isMirroredIndices.init).map(tup => tup._1 - tup._2)
// println(isMirroredIndicesDeltas.mkString(
// "isMirroredIndices: val deltas = Array[Int](", ", ", ")"))
private[this] lazy val isMirroredIndices = {
val deltas = Array[Int](40, 2, 18, 1, 1, 1, 28, 1, 1, 1, 29, 1, 1, 1,
45, 1, 15, 1, 3710, 4, 1885, 2, 2460, 2, 10, 2, 54, 2, 14, 2, 177, 1,
192, 4, 3, 6, 3, 1, 3, 2, 3, 4, 1, 4, 1, 1, 1, 1, 4, 9, 5, 1, 1, 18,
5, 4, 9, 2, 1, 1, 1, 8, 2, 31, 2, 4, 5, 1, 9, 2, 2, 19, 5, 2, 9, 5, 2,
2, 4, 24, 2, 16, 8, 4, 20, 2, 7, 2, 1085, 14, 74, 1, 2, 4, 1, 2, 1, 3,
5, 4, 5, 3, 3, 14, 403, 22, 2, 21, 8, 1, 7, 6, 3, 1, 4, 5, 1, 2, 2, 5,
4, 1, 1, 3, 2, 2, 10, 6, 2, 2, 12, 19, 1, 4, 2, 1, 1, 1, 2, 1, 1, 4,
5, 2, 6, 3, 24, 2, 11, 2, 4, 4, 1, 2, 2, 2, 4, 43, 2, 8, 1, 40, 5, 1,
1, 1, 3, 5, 5, 3, 4, 1, 3, 5, 1, 1, 772, 4, 3, 2, 1, 2, 14, 2, 2, 10,
478, 10, 2, 8, 52797, 6, 5, 2, 162, 2, 18, 1, 1, 1, 28, 1, 1, 1, 29,
1, 1, 1, 1, 2, 1, 2, 55159, 1, 57, 1, 57, 1, 57, 1, 57, 1)
uncompressDeltas(deltas)
}
private[this] def uncompressDeltas(deltas: Array[Int]): Array[Int] = {
for (i <- 1 until deltas.length)
deltas(i) += deltas(i - 1)
deltas
}
}
|
jmnarloch/scala-js
|
javalanglib/src/main/scala/java/lang/Character.scala
|
Scala
|
bsd-3-clause
| 47,167
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.jsinterop
import scala.scalajs.js
import org.scalajs.jasminetest.JasmineTest
object DictionaryTest extends JasmineTest {
describe("scala.scalajs.js.Dictionary") {
it("should provide an equivalent of the JS delete keyword - #255") {
val obj = js.Dictionary.empty[js.Any]
obj("foo") = 42
obj("bar") = "foobar"
expect(obj("foo")).toEqual(42)
expect(obj("bar")).toEqual("foobar")
obj.delete("foo")
expect(obj.contains("foo")).toBeFalsy
expect(obj.asInstanceOf[js.Object].hasOwnProperty("foo")).toBeFalsy
expect(obj("bar")).toEqual("foobar")
}
// This doesn't work on Rhino due to lack of full strict mode support - #679
unless("rhino").
it("should behave as specified when deleting a non-configurable property - #461 - #679") {
val obj = js.Dictionary.empty[js.Any]
js.Object.defineProperty(obj.asInstanceOf[js.Object], "nonconfig",
js.Dynamic.literal(value = 4, writable = false).asInstanceOf[js.PropertyDescriptor])
expect(obj("nonconfig")).toEqual(4)
expect(() => obj.delete("nonconfig")).toThrow
expect(obj("nonconfig")).toEqual(4)
}
it("apply should throw when not found") {
val obj = js.Dictionary("foo" -> "bar")
expect(() => obj("bar")).toThrow
}
it("should provide `get`") {
val obj = js.Dictionary.empty[Int]
obj("hello") = 1
expect(obj.get("hello") == Some(1)).toBeTruthy
expect(obj.get("world").isDefined).toBeFalsy
}
it("-= should ignore deleting a non-existent key") {
val obj = js.Dictionary("a" -> "A")
obj -= "b"
}
it("should treat delete as a statement - #907") {
val obj = js.Dictionary("a" -> "A")
obj.delete("a")
}
it("should provide keys") {
val obj = js.Dictionary("a" -> "A", "b" -> "B")
val keys = obj.keys.toList
expect(keys.size).toEqual(2)
expect(keys.contains("a")).toBeTruthy
expect(keys.contains("b")).toBeTruthy
}
it("should survive the key 'hasOwnProperty' - #1414") {
val obj = js.Dictionary.empty[Int]
expect(obj.contains("hasOwnProperty")).toBeFalsy
obj("hasOwnProperty") = 5
expect(obj.contains("hasOwnProperty")).toBeTruthy
obj.delete("hasOwnProperty")
expect(obj.contains("hasOwnProperty")).toBeFalsy
}
it("should provide an iterator") {
val obj = js.Dictionary("foo" -> 5, "bar" -> 42, "babar" -> 0)
var elems: List[(String, Int)] = Nil
for ((prop, value) <- obj) {
elems ::= (prop, value)
}
expect(elems.size).toEqual(3)
expect(elems.contains(("foo", 5))).toBeTruthy
expect(elems.contains(("bar", 42))).toBeTruthy
expect(elems.contains(("babar", 0))).toBeTruthy
}
it("should desugar arguments to delete statements - #908") {
val kh = js.Dynamic.literal( key = "a" ).asInstanceOf[KeyHolder]
val dict = js.Dictionary[String]("a" -> "A")
def a[T](foo: String) = dict.asInstanceOf[T]
a[js.Dictionary[String]]("foo").delete(kh.key)
}
}
trait KeyHolder extends js.Object {
def key: String = js.native
}
describe("scala.scalajs.js.JSConverters.JSRichGenMap") {
import js.JSConverters._
it("should provide toJSDictionary") {
expect(Map("a" -> 1, "b" -> 2).toJSDictionary).toEqual(
js.Dynamic.literal(a = 1, b = 2))
expect(Map("a" -> "foo", "b" -> "bar").toJSDictionary).toEqual(
js.Dynamic.literal(a = "foo", b = "bar"))
}
}
}
|
matthughes/scala-js
|
test-suite/src/test/scala/org/scalajs/testsuite/jsinterop/DictionaryTest.scala
|
Scala
|
bsd-3-clause
| 4,080
|
object G {
trait Wizzle {
type X <: Int with Singleton
type Y <: Int with Singleton
type Bar[A] = A match {
case X => String
case Y => Int
}
def left(fa: String): Bar[X] = fa
def center[F[_]](fa: F[X]): F[Y]
def right(fa: Bar[Y]): Int = fa // error
def run: String => Int = left andThen center[Bar] andThen right
}
class Wozzle extends Wizzle {
type X = 0
type Y = 0
def center[F[_]](fa: F[X]): F[Y] = fa
}
def main(args: Array[String]): Unit = {
val coerce: String => Int = (new Wozzle).run
println(coerce("hello") + 1)
}
}
|
lampepfl/dotty
|
tests/neg/6314-4.scala
|
Scala
|
apache-2.0
| 682
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.intermediate
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn.mkldnn.{DnnGraph, MklDnnContainer}
import com.intel.analytics.bigdl.nn.mkldnn.{DnnGraph, MklDnnLayer, MklDnnModule}
import com.intel.analytics.bigdl.utils.{Engine, MklDnn, T}
import org.apache.spark.rdd.RDD
import com.intel.analytics.bigdl.nn.Graph
import com.intel.analytics.bigdl.nn.StaticGraph
import com.intel.analytics.bigdl.nn.quantized.Quantization
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
private[bigdl] object ConversionUtils {
/**
* convert model to ir graph and build
* @param model
* @return
*/
def convert[T: ClassTag](model: Module[T]): Module[T] = {
if (model.isInstanceOf[IRGraph[T]]) {
val g = model.asInstanceOf[IRGraph[T]]
if (g.isBuild) g else g.build()
} else if (!model.isInstanceOf[MklDnnModule] && Engine.getEngineType() == MklDnn) {
val m = if (!model.isInstanceOf[Graph[T]]) model.toGraph() else model
if (!m.isInstanceOf[StaticGraph[T]]) return model
val ir = m.asInstanceOf[StaticGraph[T]].toIRgraph().asInstanceOf[Module[T]]
if (model.isTraining()) ir.training() else ir.evaluate()
ir
} else {
model
}
}
def convert[T: ClassTag](model: Module[T], needQuantize: Boolean)(
implicit ev: TensorNumeric[T]): Module[T] = {
val convertedModel = convert(model)
getInt8ModelIfNeeded(convertedModel, needQuantize)
}
/**
* For dnn backend, it is recommended to run single model on each node.
* So when partition number of dataset is not equal to node number,
* there will be coalesce operation.
* @param dataset
* @tparam T
* @return
*/
def coalesce[T: ClassTag](dataset: RDD[T]): RDD[T] = {
if (dataset.partitions.length != Engine.nodeNumber()
&& !Engine.isMultiModels) {
dataset.coalesce(Engine.nodeNumber(), false)
} else dataset
}
private def getInt8ModelIfNeeded[T: ClassTag](model: Module[T],
needQuantize: Boolean)(implicit ev: TensorNumeric[T]): Module[T] = {
// we will not set the model's quantize flag with `needQuantize`.
// because Evaluator will always has the `false` of it.
// TODO we should handle different types of model. We need refactor here later
model match {
case ir: IRGraph[T] => if (needQuantize) ir.setQuantize(true) else ir
case dnnGraph: DnnGraph => if (needQuantize) {
dnnGraph.cloneModule().setQuantize(true)
} else {
dnnGraph
}
case dnnContainer: MklDnnContainer =>
if (needQuantize) {
dnnContainer.cloneModule().setQuantize(true)
} else {
dnnContainer
}
case _ => if (needQuantize) Quantization.quantize[T](model) else model
}
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/intermediate/ConversionUtils.scala
|
Scala
|
apache-2.0
| 3,447
|
package org.precompiler.scala101.ch13
import java.io.File
import akka.actor.{Actor, ActorSystem, Props}
import akka.routing.RoundRobinPool
/**
*
* @author Richard Li
*/
object AkkaConcurrencyDemo extends App {
val system = ActorSystem("FileCounter")
val counter = system.actorOf(Props[FileCounter])
counter ! "D:/localrepo"
}
class FileVisitor extends Actor {
override def receive: Receive = {
case dirPath: String => {
var fileCnt = 0L
val file = new File(dirPath)
val children = file.listFiles()
if (children != null) {
children.filter(_.isDirectory).foreach(sender ! _.getAbsolutePath)
fileCnt = children.count(!_.isDirectory)
}
sender ! fileCnt
}
}
}
class FileCounter extends Actor {
var pending = 0
var fileCnt = 0L
val fileVisitors = context.actorOf(RoundRobinPool(100).props(Props[FileVisitor]))
override def receive: Receive = {
case dirPath: String => {
pending += 1
fileVisitors ! dirPath
}
case cnt: Long => {
fileCnt += cnt
pending -= 1
if (pending == 0) {
println(s"Total file count: ${fileCnt}")
context.system.terminate()
}
}
}
}
|
precompiler/scala-101
|
learning-scala/src/main/scala/org/precompiler/scala101/ch13/AkkaConcurrencyDemo.scala
|
Scala
|
apache-2.0
| 1,204
|
package endpoints.documented.openapi
import endpoints.algebra.MuxRequest
import endpoints.documented.algebra
trait MuxEndpoints extends algebra.MuxEndpoints with Endpoints {
type MuxEndpoint[Req <: MuxRequest, Resp, Transport] = DocumentedEndpoint
def muxEndpoint[Req <: MuxRequest, Resp, Transport](
request: Request[Transport],
response: Response[Transport]
): MuxEndpoint[Req, Resp, Transport] = endpoint(request, response)
}
|
Krever/endpoints
|
openapi/openapi/src/main/scala/endpoints/documented/openapi/MuxEndpoints.scala
|
Scala
|
mit
| 447
|
package es.weso.utils
import com.hp.hpl.jena.rdf.model.LiteralRequiredException
import com.hp.hpl.jena.rdf.model.Model
import com.hp.hpl.jena.rdf.model.ModelFactory
import com.hp.hpl.jena.rdf.model.Resource
import com.hp.hpl.jena.rdf.model.ResourceRequiredException
import com.hp.hpl.jena.rdf.model.Statement
import java.io.InputStreamReader
import java.io.ByteArrayInputStream
import com.hp.hpl.jena.query.Query
import com.hp.hpl.jena.query.QueryExecutionFactory
import com.hp.hpl.jena.query.QueryFactory
import java.io.StringWriter
import com.hp.hpl.jena.rdf.model.RDFNode
import com.hp.hpl.jena.rdf.model.Property
import java.net.URI
import java.net.URL
import java.io.InputStream
import java.io.FileOutputStream
import org.apache.jena.atlas.AtlasException
import org.apache.jena.riot.RiotException
import com.hp.hpl.jena.query.ResultSet
import com.hp.hpl.jena.rdf.model.Literal
import es.weso.computex.PREFIXES
import com.hp.hpl.jena.rdf.model.ResourceFactory
import scala.collection.JavaConverters._
import java.io.FileNotFoundException
import scala.io.Source
import com.hp.hpl.jena.rdf.model.SimpleSelector
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype
import org.slf4j.LoggerFactory
sealed abstract class ParserReport[+A,+B]
final case class Parsed[A](info:A)
extends ParserReport[A,Nothing]
final case class NotParsed[B](error: B)
extends ParserReport[Nothing,B]
object JenaUtils {
var bNodeCount = 0
val time = compat.Platform.currentTime
val logger = LoggerFactory.getLogger("Application")
lazy val RdfXML = "RDF/XML"
lazy val RdfXMLAbbr = "RDF/XML-ABBREV"
lazy val NTriple = "N-TRIPLE"
lazy val Turtle = "TURTLE"
lazy val TTL = "TTL"
lazy val N3 = "N3"
// In Jena selectors, null represents any node
lazy val any : RDFNode = null
def emptyModel = ModelFactory.createDefaultModel
def extractModel(resource: Resource, model: Model): Model = {
val nModel = ModelFactory.createDefaultModel()
def inner(resource: Resource): Model = {
val iterator2 = model.listStatements(resource, null, null)
while (iterator2.hasNext()) {
val stmt = iterator2.nextStatement();
val subject = stmt.getSubject();
val predicate = stmt.getPredicate();
val objec = stmt.getObject();
nModel.add(subject, predicate, objec)
if (objec.isAnon) {
inner(objec.asResource())
}
}
nModel
}
inner(resource)
}
def statementAsString(statement: Statement, model: Model, preffix: Boolean): String = {
val resource = try {
val uri = statement.getResource.toString
val preffixUri = statement.getResource.getNameSpace
val preffixNS = model.getNsURIPrefix(statement.getResource.getNameSpace)
val suffix = statement.getResource.getLocalName
if (preffix && preffixUri != null)
preffixNS + ":" + suffix
else uri
} catch {
case e: ResourceRequiredException => null
}
if (resource == null) {
try {
if (preffix)
statement.getLiteral().getValue().toString
else statement.getLiteral().toString
} catch {
case e: LiteralRequiredException => resource
}
} else resource
}
def dereferenceURI( uri: String ) : InputStream = {
val url = new URL(uri)
val urlCon = url.openConnection()
urlCon.setConnectTimeout(4000)
urlCon.setReadTimeout(2000)
urlCon.getInputStream()
}
def parseFromURI(uri: String,
base: String = "",
syntax: String = Turtle) : Model = {
uri2Model(uri,base,syntax) match {
case Parsed(model) => model
case NotParsed(err) =>
throw new Exception(err)
}
}
def parseFromString(
content: String,
base: String = "",
syntax: String = Turtle) : Model = {
str2Model(content,base,syntax) match {
case Parsed(model) => model
case NotParsed(err) =>
throw new Exception("Cannot parse from string: " + content + ". Error: " + err + ". Syntax: " + syntax)
}
}
def modelFromPath(path: String,
base: String = "",
syntax: String = Turtle): Model = {
val model = ModelFactory.createDefaultModel()
val inputStream = getClass.getClassLoader().getResourceAsStream(path)
if (inputStream == null)
throw new FileNotFoundException("File especified does not exist")
model.read(inputStream,base,syntax)
}
def uri2Model(
uriName: String,
base: String = "",
syntax: String = Turtle) : ParserReport[Model,String] = {
try {
val model = ModelFactory.createDefaultModel()
Parsed(model.read(dereferenceURI(uriName),base,syntax))
} catch {
case e: AtlasException =>
NotParsed("Error parsing URI " + uriName + " with syntax " + syntax + ".\n AtlasException: " + e.toString())
case e: RiotException =>
NotParsed("Exception parsing URI " + uriName + " with syntax " + syntax + ".\n RIOT Exception: " + e.toString())
case e : Exception =>
NotParsed("Exception parsing URI " + uriName + " with syntax " + syntax + ".\n Exception: " + e.toString())
}
}
/**
* Returns a RDF model after parsing a String
*/
def str2Model(
str: String,
base: String = "",
syntax: String = Turtle) : ParserReport[Model,String] = {
try {
val model = ModelFactory.createDefaultModel()
val stream = new ByteArrayInputStream(str.getBytes("UTF-8"))
Parsed(model.read(stream,base,syntax))
} catch {
case e@(_: AtlasException | _: RiotException) =>
NotParsed("Bad formed with syntax " + syntax + ". " + e.getLocalizedMessage())
case e : Exception =>
NotParsed("Exception parsing from String " + str +
" with syntax " + syntax + ". " + e.getLocalizedMessage())
}
}
/**
* Returns a RDF model after parsing an InputStream
*/
def parseInputStream(
stream: InputStream,
base: String = "",
syntax: String = Turtle) : ParserReport[Model,String] = {
try {
val model = ModelFactory.createDefaultModel()
Parsed(model.read(stream,base,syntax))
} catch {
case e@(_: AtlasException | _: RiotException) =>
NotParsed("Bad formed with syntax " + syntax + ". " + e.getLocalizedMessage())
case e : Exception =>
NotParsed("Exception parsing " +
" with syntax " + syntax + ". " + e.getLocalizedMessage())
}
}
def getLiteral(
r : RDFNode,
property: Property) : String = {
if (r.isResource()) {
val res = r.asResource
val stmt = res.getRequiredProperty(property)
stmt match {
case null =>
throw new Exception("getName: " + res + " doesn't have value for property " + property + ".\n" +
showResource(r.asResource) )
case _ =>
if (stmt.getObject.isLiteral) stmt.getObject.asLiteral.getString
else
throw new Exception("getName: " + stmt.getObject + " is not a literal")
}
}
else
throw new Exception("getName: " + r + "is not a resource")
}
/*
*
*/
def getURI(r: RDFNode) : URI = {
if (r.isResource) {
new URI(r.asResource.getURI)
}
else
throw new Exception("getURI: Node " + r + " is not a resource")
}
/*
* If there is a triple <r,p,u> and u is a URI, returns u
* @param r RDFNode
* @param property
*/
def getObjectURI(
r: RDFNode,
p: Property
) : URI = {
if (r.isResource()) {
val resUri = r.asResource().getPropertyResourceValue(p)
resUri match {
case null =>
throw new Exception("getURI: " + resUri + " doesn't have value for property " + p + ".\n" + showResource(r.asResource) )
case _ =>
getURI(resUri)
}
} else
throw new Exception("getURI: Node " + r + " is not a resource")
}
/**
* Shows infomation about a resource (list all the statements)
*/
def showResource(resource: Resource) : String = {
val sb = new StringBuilder
val iter = resource.listProperties()
sb ++= ("Infor about: " + resource + "\n")
while (iter.hasNext) {
val st = iter.next
sb ++= (st.toString + "\n")
}
sb.toString
}
/*
* Parse a string to obtain a query
*/
def parseQuery(
str: String
) : Option[Query] = {
try {
val query = QueryFactory.create(str)
Some(query)
} catch {
case e: Exception => None
}
}
def querySelectModel(query: Query, model:Model) : ResultSet = {
val qexec = QueryExecutionFactory.create(query, model)
qexec.execSelect()
}
def querySelectModel(queryStr: String, model:Model) : ResultSet = {
val query = QueryFactory.create(queryStr)
querySelectModel(query, model)
}
def queryConstructModel(queryStr: String, model:Model) : Model = {
val query = QueryFactory.create(queryStr)
queryConstructModel(query, model)
}
def queryConstructModel(query: Query, model:Model) : Model = {
val resultModel = ModelFactory.createDefaultModel
val qexec = QueryExecutionFactory.create(query, model)
qexec.execConstruct
}
/*
* Convert a model to a String
*/
def model2Str(
model: Model,
syntax: String = Turtle) : String = {
val strWriter = new StringWriter
model.write(strWriter,syntax)
strWriter.toString
}
/*
* Write a model to a file
*/
def model2File(
model: Model,
fileName : String,
syntax: String = Turtle) : Unit = {
model.write(new FileOutputStream(fileName),syntax)
}
def getValuesOfType(r: Resource, m: Model) : Set[Resource] = {
m.listResourcesWithProperty(PREFIXES.rdf_type,r).toSet.asScala.toSet
}
def findSubject(m: Model, obj:Resource, p: Property) : RDFNode = {
val selector : SimpleSelector = new SimpleSelector(null,p,obj)
val iter = m.listStatements(selector)
if (iter.hasNext) {
val node = iter.next.getSubject
if (!iter.hasNext) node
else throw
new Exception("findSubject: Resource " + obj + " has more than one subject for property " + p)
}
else
throw new Exception("findSubject: Resource " + obj + " does not have subject for property " + p)
}
def findSubject_asResource(m: Model, obj:Resource, p : Property) : Resource = {
val v = findSubject(m,obj,p)
if (v.isResource) v.asResource
else {
throw new Exception("findSubject_asResource: Object " + obj + " has value " + v + " for property " + p + " which is not a resource")
}
}
def hasProperty(m: Model, r:Resource, p: Property) : Boolean = {
val iter = m.listStatements(r,p,any)
iter.hasNext
}
/**
* TODO: rename these methods to "findObject"
*/
def findProperty(m: Model, r:Resource, p: Property) : RDFNode = {
val iter = m.listStatements(r,p,any)
if (iter.hasNext) {
val node = iter.next.getObject
if (!iter.hasNext) node
else {
val msg="findProperty: Resource " + r + " has more than one value for property " + p
throw new Exception(msg)
// logger.error(msg)
// node
}
}
else
throw new Exception("findProperty: Resource " + r + " does not have value for property " + p)
}
def findProperty_asResource(m: Model, r:Resource, p : Property) : Resource = {
val v = findProperty(m,r,p)
if (v.isResource) v.asResource
else {
throw new Exception("findProperty_asResource: Resource " + r + " has value " + v + " for property " + p + " which is not a resource")
}
}
def findProperty_asLiteral(m: Model, r:Resource, p : Property) : Literal = {
val v = findProperty(m,r,p)
if (v.isLiteral) v.asLiteral
else {
throw new Exception("findProperty_asLiteral: Resource " + r + " has value " + v + " for property " + p + " which is not a literal")
}
}
def readFileFromPath(path: String): String = {
val inputStream = getClass.getClassLoader().getResourceAsStream(path)
if (inputStream == null)
throw new FileNotFoundException("File especified does not exist")
Source.fromInputStream(inputStream).getLines.mkString("\n")
}
val literalTrue =
ResourceFactory.createTypedLiteral("true",XSDDatatype.XSDboolean)
def literalInt(i : Int) =
ResourceFactory.createTypedLiteral(new Integer(i))
def literalInteger(i : Integer) =
ResourceFactory.createTypedLiteral(i.toString,XSDDatatype.XSDinteger)
def literalFloat(n : Float) =
ResourceFactory.createTypedLiteral(n.toString,XSDDatatype.XSDfloat)
def literalDouble(n : Double) =
ResourceFactory.createTypedLiteral(n.toString,XSDDatatype.XSDdouble)
def literal(name: String) =
ResourceFactory.createPlainLiteral(name)
def newResourceNoBlankNode(m: Model, base: String) : Resource = {
bNodeCount = bNodeCount + 1
m.createResource(base + time + "_" + bNodeCount)
}
}
|
weso/wiCompute
|
src/main/scala/es/weso/utils/JenaUtils.scala
|
Scala
|
mit
| 12,893
|
package com.mb.akkaremotechat.actors.client
import com.mb.akkaremotechat.utils.GlobalMessages._
import com.mb.akkaremotechat.utils.ClientMessages
import akka.actor._
class Supervisor extends Actor with ActorLogging {
private val serverSupervisor = context.actorSelection("akka.tcp://serverSystem@127.0.0.1:5150/user/supervisor")
override def receive: Receive = {
case command: ClientMessages.Command =>
serverSupervisor ! SimpleMessage(command.content)
case simpleMessage: SimpleMessage =>
log.info(simpleMessage.message)
}
}
|
maxbundscherer/akka-remote-scala-chat
|
src/main/scala/com/mb/akkaremotechat/actors/client/Supervisor.scala
|
Scala
|
apache-2.0
| 561
|
package com.wavesplatform
import java.lang.reflect.Constructor
import com.wavesplatform.account.{PrivateKey, PublicKey}
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.lang.ValidationError
import com.wavesplatform.transaction.TxValidationError.GenericError
import com.wavesplatform.utils._
import org.whispersystems.curve25519.OpportunisticCurve25519Provider
import scala.util.Try
package object crypto extends ScorexLogging {
// Constants
val SignatureLength: Int = Curve25519.SignatureLength
val KeyLength: Int = Curve25519.KeyLength
val DigestLength: Int = 32
// Additional provider
private val provider: OpportunisticCurve25519Provider = {
val constructor = classOf[OpportunisticCurve25519Provider].getDeclaredConstructors.head
.asInstanceOf[Constructor[OpportunisticCurve25519Provider]]
constructor.setAccessible(true)
val p = constructor.newInstance()
log.info(s"Native provider used: ${p.isNative}")
p
}
// Digests
def fastHash(m: Array[Byte]): Array[Byte] = Blake2b256.hash(m)
def fastHash(s: String): Array[Byte] = fastHash(s.utf8Bytes)
def secureHash(m: Array[Byte]): Array[Byte] = Keccak256.hash(Blake2b256.hash(m))
def secureHash(s: String): Array[Byte] = secureHash(s.utf8Bytes)
// Signatures
def sign(account: PrivateKey, message: Array[Byte]): ByteStr =
ByteStr(Curve25519.sign(account.arr, message))
def signVRF(account: PrivateKey, message: Array[Byte]): ByteStr =
ByteStr(provider.calculateVrfSignature(provider.getRandom(DigestLength), account.arr, message))
def verify(signature: ByteStr, message: Array[Byte], publicKey: PublicKey): Boolean =
Curve25519.verify(signature.arr, message, publicKey.arr)
def verifyVRF(signature: ByteStr, message: Array[Byte], publicKey: PublicKey): Either[ValidationError, ByteStr] =
Try(ByteStr(provider.verifyVrfSignature(publicKey.arr, message, signature.arr))).toEither.left
.map(_ => GenericError("Could not verify VRF proof"))
// see
// https://github.com/jedisct1/libsodium/blob/ab4ab23d5744a8e060864a7cec1a7f9b059f9ddd/src/libsodium/crypto_scalarmult/curve25519/ref10/x25519_ref10.c#L17
// https://boringssl.googlesource.com/boringssl/+/master/third_party/wycheproof_testvectors/x25519_test.json
private[this] val BlacklistedKeys: Array[Array[Byte]] = Array(
// 0 (order 4)
Array(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
// 1 (order 1)
Array(0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
// 325606250916557431795983626356110631294008115727848805560023387167927233504 (order 8)
Array(0xe0, 0xeb, 0x7a, 0x7c, 0x3b, 0x41, 0xb8, 0xae, 0x16, 0x56, 0xe3, 0xfa, 0xf1, 0x9f, 0xc4, 0x6a, 0xda, 0x09, 0x8d, 0xeb, 0x9c, 0x32, 0xb1,
0xfd, 0x86, 0x62, 0x05, 0x16, 0x5f, 0x49, 0xb8, 0x00),
// 39382357235489614581723060781553021112529911719440698176882885853963445705823 (order 8)
Array(0x5f, 0x9c, 0x95, 0xbc, 0xa3, 0x50, 0x8c, 0x24, 0xb1, 0xd0, 0xb1, 0x55, 0x9c, 0x83, 0xef, 0x5b, 0x04, 0x44, 0x5c, 0xc4, 0x58, 0x1c, 0x8e,
0x86, 0xd8, 0x22, 0x4e, 0xdd, 0xd0, 0x9f, 0x11, 0x57),
// p-1 (order 2)
Array(0xec, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f),
// p (=0, order 4)
Array(0xed, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f),
// p+1 (=1, order 1)
Array(0xee, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f)
).map(_.map(_.toByte))
def isWeakPublicKey(publicKey: Array[Byte]): Boolean =
BlacklistedKeys.exists { wk =>
publicKey.view.init.iterator.sameElements(wk.view.init) &&
(publicKey.last == wk.last || (publicKey.last & 0xff) == wk.last + 0x80)
}
}
|
wavesplatform/Waves
|
node/src/main/scala/com/wavesplatform/crypto/package.scala
|
Scala
|
mit
| 4,392
|
package services.sitedata
import javax.inject._
import scala.concurrent.Future
import utils.Awaits
import models.sitedata.LineType
import dao.sitedata.LineTypeDao
import dao.sitedata.ILineTypeDao
trait ILineTypeService extends BaseService[LineType]{
def insert(linetype: LineType): Future[Unit]
def update(id: Long, linetype: LineType): Future[Unit]
def remove(id: Long): Future[Int]
def findById(id: Long): Future[Option[LineType]]
def findAll(): Future[Option[Seq[LineType]]]
def findAllLineTypes(): Seq[(String, String)]
}
@Singleton
class LineTypeService @Inject() (dao:ILineTypeDao) extends ILineTypeService{
import play.api.libs.concurrent.Execution.Implicits.defaultContext
def insert(linetype: LineType):Future[Unit] = {
dao.insert(linetype);
}
def update(id: Long, linetype: LineType):Future[Unit] = {
// linetype.id = Option(id.toInt)
// linetype.id = id
dao.update(linetype)
}
def remove(id: Long): Future[Int] = {
dao.remove(id)
}
def findById(id: Long): Future[Option[LineType]] = {
dao.findById(id)
}
def findAll(): Future[Option[Seq[LineType]]] = {
dao.findAll().map { x => Option(x) }
}
private def validateId(id: Long): Unit = {
val future = findById(id)
val entry = Awaits.get(5, future)
if (entry == null || entry.equals(None)) throw new RuntimeException("Could not find LineType: " + id)
}
def findAllLineTypes():Seq[(String, String)] = {
val future = this.findAll()
val result = Awaits.get(5, future)
val linetypes: Seq[(String, String)] =
result
.getOrElse(Seq(LineType(0, "")))
.toSeq
.map { linetype => (linetype.id.toString, linetype.name) }
return linetypes
}
}
|
tnddn/iv-web
|
portal/rest-portal/app/services/sitedata/LineTypeService.scala
|
Scala
|
apache-2.0
| 1,737
|
/**
* Practica de conjuntos funcionales
*/
object ConjuntoFuncional {
/**
* Un conjunto funcional se representa mediante una funcion
* caracteristica, un predicado. De esta forma, se declara
* el tipo conjunto como un predicado que recibe un entero
* (elemento) como argumento y devuelve un valor booleano
* que indica si pertenece o no al conjunto
*/
type Conjunto = Int => Boolean
/**
* Metodo para determinar si un elemento pertenece al conjunto
* @param conjunto
* @param elemento
* @return
*/
def contiene(conjunto: Conjunto, elemento: Int): Boolean = conjunto(elemento)
/**
* Devuelve un conjunto asociado al elemento pasado como
* argumento
* @param elemento
* @return
*
*/
def conjuntoUnElemento(elemento: Int): Conjunto = (x:Int) => x == elemento
/**
* Union de dos conjuntos
* @param conjunto1
* @param conjunto2
* @return
*/
def union(conjunto1: Conjunto, conjunto2: Conjunto): Conjunto = (x:Int) => conjunto1(x) || conjunto2(x)
/**
* Interseccion de dos conjuntos
* @param conjunto1
* @param conjunto2
* @return
*/
def interseccion(conjunto1: Conjunto, conjunto2: Conjunto): Conjunto = (x:Int) => conjunto1(x) && conjunto2(x)
/**
* Diferencia entre dos conjuntos
* @param conjunto1
* @param conjunto2
* @return
*/
def diferencia(conjunto1: Conjunto, conjunto2: Conjunto): Conjunto = (x:Int) => conjunto1(x) && !conjunto2(x)
/**
* Filtrado para obtener el conjunto de los elementos que cumplen
* el predicado pasado como argumento
* @param conjunto
* @param p
* @return
*/
def filter(conjunto: Conjunto, p: Int => Boolean): Conjunto = (x:Int) => conjunto(x) && p(x)
/**
* Limite para la iteracion necesaria con paraTodo y existe,
* entre -1000 y 1000
*/
private val LIMITE = 1000
/**
* Determina si todos los elementos del conjunto cumplen
* la condicion indicada por el predicado
* @param conjunto
* @param p
* @return
*/
def paraTodo(conjunto: Conjunto, p: Int => Boolean): Boolean = {
// Funcion auxiliar para iterar sobre los valores desde
// -LIMITE a LIMITE
def iter(elemento: Int): Boolean = {
// si elemento hubiera superado el LIMITE quiere decir que se han recorrido todos los elementos del conjunto y que cumplen la condicion del predicado p
if (elemento > LIMITE) true
else if (!conjunto(elemento)) iter(elemento+1) // si el elemento no perteneciera al conjunto mirariamos el siguiente
else iter(elemento + 1) && p(elemento)
}
iter(-LIMITE)
}
/**
* Determina si existe al menos un elemento en el conjunto
* que cumple el predicado indicado
* @param conjunto
* @param p
* @return
*/
// ejemplo para el conjunto de numeros naturales {1, 2, 3, 4...} veamos si se cumple el predicado x > 0 que realmente es asi
// !(x>0) = x<=0 veamos si se cumple para todo el conjunto, que exista alguno con esta condicion.
// como nos devuelve false, negandolo obtenemos true ya que en este conjunto no hay ninguno que cumpla esa condicion
def existe(conjunto: Conjunto, p: Int => Boolean): Boolean = !paraTodo(conjunto, x => !p(x))
/**
* Genera un nuevo conjunto transformando los elementos del
* conjunto pasado como argumento y aplicando la transformacion
* dada por la funcion pasada como segundo argumento
* @param conjunto
* @param funcion
* @return
*/
def map(conjunto: Conjunto, funcion: Int => Int): Conjunto = (x:Int) => existe(conjunto, y => funcion(y) == x )
/**
* Crea una cadena con el contenido completo del conjunto
* @param conjunto
* @return
*/
def toString(conjunto: Conjunto): String = {
val elementos = for (
i <- -LIMITE to LIMITE if contiene(conjunto, i)) yield i
elementos.mkString("{", ",", "}")
}
/**
* Muestra el contenido completo del conjunto por pantalla
* @param conjunto
*/
def printSet(conjunto: Conjunto) {
println(toString(conjunto))
}
}
|
romanarranz/NTP
|
P3/src/ConjuntoFuncional.scala
|
Scala
|
mit
| 4,097
|
package dotty.tools
package dotc.util
/** A common class for lightweight mutable maps.
*/
abstract class MutableMap[Key, Value] extends ReadOnlyMap[Key, Value]:
def update(k: Key, v: Value): Unit
def remove(k: Key): Value | Null
def -=(k: Key): this.type =
remove(k)
this
def clear(): Unit
def getOrElseUpdate(key: Key, value: => Value): Value
|
dotty-staging/dotty
|
compiler/src/dotty/tools/dotc/util/MutableMap.scala
|
Scala
|
apache-2.0
| 369
|
package org.lolhens.minechanics.core.storageaccess.json
import org.lolhens.minechanics.core.storageaccess._
import scala.collection.JavaConversions._
import scala.collection.mutable
class JsonList(override val obj: java.util.List[_]) extends ValidJsonObject[java.util.List[_]](obj) {
override def apply(i: Int) = if (i >= obj.size || i < 0) JsonObject else JsonObject.fromAny(obj.get(i))
override def foreach(f: StorageAccess => Unit) = {
val iterator = obj.iterator;
while (iterator.hasNext) f(JsonObject.fromAny(iterator.next))
}
override def map[B](f: (Any) => B): mutable.Buffer[B] = obj.map(f)
}
|
LolHens/Minechanics
|
src/main/scala/org/lolhens/minechanics/core/storageaccess/json/JsonList.scala
|
Scala
|
gpl-2.0
| 621
|
package de.kaufhof.hajobs
import akka.actor.{ActorNotFound, ActorSystem}
import com.datastax.driver.core.utils.UUIDs
import de.kaufhof.hajobs
import de.kaufhof.hajobs.JobManagerSpec._
import de.kaufhof.hajobs.testutils.MockInitializers
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.quartz.Scheduler
import org.scalatest.Matchers
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import play.api.Application
import de.kaufhof.hajobs.testutils.StandardSpec
import scala.concurrent.{Promise, blocking, Future}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps
class JobManagerSpec extends StandardSpec {
private val lockRepository = mock[LockRepository]
private val jobStatusRepository = mock[JobStatusRepository]
private val jobUpdater = mock[JobUpdater]
private var actorSystem: ActorSystem = _
private var manager: JobManager = _
val app = mock[Application]
override def beforeEach() {
MockInitializers.initializeLockRepo(lockRepository)
reset(jobStatusRepository)
reset(jobUpdater)
when(jobUpdater.updateJobs()).thenReturn(Future.successful(Nil))
actorSystem = ActorSystem("JobManagerSpec")
}
override def afterEach() {
manager.shutdown()
actorSystem.terminate()
}
"JobManager scheduler" should {
"trigger a job with a cronExpression defined" in {
val job = spy(new TestJob(Some("* * * * * ?")))
manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem)
await(manager.allJobsScheduled)
eventually(Timeout(scaled(3 seconds))) {
verify(job, atLeastOnce()).run()(any[JobContext])
}
}
"not trigger a job with no cronExpression defined" in {
val mockedScheduler = mock[Scheduler]
val job = new TestJob(cronExpression = None)
val manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem, mockedScheduler, true)
await(manager.allJobsScheduled)
verify(mockedScheduler, times(1)).start()
verifyNoMoreInteractions(mockedScheduler)
}
"not trigger a job with a cronExpression defined, if scheduling is disabled" in {
val mockedScheduler = mock[Scheduler]
val job = new TestJob(Some("* * * * * ?"))
val jobUpdater = new JobUpdater(lockRepository, jobStatusRepository)
val manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem, mockedScheduler, false)
await(manager.allJobsScheduled)
verifyNoMoreInteractions(mockedScheduler)
}
}
"JobManager retrigger job" should {
"release lock after a synchronous job finished" in {
val job = new TestJob()
val manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem, enableJobScheduling = false)
await(manager.allJobsScheduled)
await(manager.retriggerJob(JobType1, UUIDs.timeBased()))
verify(lockRepository, times(1)).acquireLock(any(), any(), any())(any())
eventually { verify(lockRepository, times(1)).releaseLock(any(), any())(any()) }
eventually {
// KeepJobLockedActor path looks like this: "akka://system/user/JobExecutor/ProductImport_LOCK"
an[ActorNotFound] shouldBe thrownBy(await(actorSystem.actorSelection(".*_LOCK").resolveOne()))
}
}
"release lock after a job failed on start" in {
val mockedScheduler = mock[Scheduler]
val job = mock[Job]
when(job.jobType).thenReturn(JobType1)
when(job.run()(any())).thenThrow(newTestException)
val manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem, mockedScheduler, false)
await(manager.allJobsScheduled)
a[RuntimeException] should be thrownBy(await(manager.retriggerJob(JobType1, UUIDs.timeBased())))
verify(lockRepository, times(3)).acquireLock(any(), any(), any())(any())
verify(lockRepository, times(3)).releaseLock(any(), any())(any())
an[ActorNotFound] shouldBe thrownBy(await(actorSystem.actorSelection(".*_LOCK").resolveOne()))
}
"release lock after a job failed result" in {
val mockedScheduler = mock[Scheduler]
val job = new TestJob() {
override def run()(implicit context: JobContext): JobExecution = new JobExecution() {
override def result = Future.failed(newTestException)
override def cancel: Unit = ()
}
}
val manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem, mockedScheduler, false)
await(manager.allJobsScheduled)
await(manager.retriggerJob(JobType1, UUIDs.timeBased()))
verify(lockRepository, times(1)).acquireLock(any(), any(), any())(any())
eventually(verify(lockRepository, times(1)).releaseLock(any(), any())(any()))
eventually(an[ActorNotFound] shouldBe thrownBy(await(actorSystem.actorSelection(".*_LOCK").resolveOne())))
}
"set job to failed if job failed on start" in {
val mockedScheduler = mock[Scheduler]
val job = new TestJob() {
override def run()(implicit context: JobContext): JobExecution = throw newTestException
}
var jobStatus: List[JobStatus] = Nil
when(jobStatusRepository.save(any())(any())).thenAnswer(new Answer[Future[JobStatus]] {
override def answer(invocation: InvocationOnMock): Future[JobStatus] = {
jobStatus = List(invocation.getArguments.head.asInstanceOf[JobStatus])
Future.successful(jobStatus.head)
}
})
when(jobStatusRepository.getLatestMetadata(any())(any())).thenAnswer(new Answer[Future[List[JobStatus]]] {
override def answer(invocation: InvocationOnMock): Future[List[JobStatus]] = Future.successful(jobStatus)
})
val manager = new JobManager(Seq(job), lockRepository, jobStatusRepository, actorSystem, mockedScheduler, false)
await(manager.allJobsScheduled)
await(manager.retriggerJob(JobType1, UUIDs.timeBased()))
verify(jobStatusRepository, times(1)).save(any())(any())
await(jobStatusRepository.getLatestMetadata()).head.jobResult should be(JobResult.Failed)
}
}
}
object JobManagerSpec {
class TestJob(cronExpression: Option[String] = None) extends Job(JobType1, 0, cronExpression) {
override def run()(implicit context: JobContext): JobExecution = new JobExecution() {
override def result = Future {
// just wait a bit...
blocking(Thread.sleep(50))
}
override def cancel: Unit = ()
}
}
private[JobManagerSpec] def newTestException = new RuntimeException("test exception") {
// suppress the stacktrace to reduce log spam
override def fillInStackTrace(): Throwable = this
}
}
|
bryanriddle/ha-jobs
|
ha-jobs-core/src/test/scala/de/kaufhof/hajobs/JobManagerSpec.scala
|
Scala
|
apache-2.0
| 6,850
|
package sttp.client3.httpclient
import sttp.client3.testing.{ConvertToFuture, HttpTest}
import sttp.client3.{Identity, SttpBackend}
class HttpClientSyncHttpTest extends HttpTest[Identity] {
override val backend: SttpBackend[Identity, Any] = HttpClientSyncBackend()
override implicit val convertToFuture: ConvertToFuture[Identity] = ConvertToFuture.id
override def supportsHostHeaderOverride = false
override def supportsCancellation: Boolean = false
override def timeoutToNone[T](t: Identity[T], timeoutMillis: Int): Identity[Option[T]] = Some(t)
}
|
softwaremill/sttp
|
httpclient-backend/src/test/scala/sttp/client3/httpclient/HttpClientSyncHttpTest.scala
|
Scala
|
apache-2.0
| 563
|
package scala.generator
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class TargetSpec extends AnyFunSpec with Matchers {
private lazy val service = models.TestHelper.generatorApiService
it("Has a field named target") {
service.models.find(_.name == "generator").get.fields.find(_.name == "key").getOrElse {
sys.error("Cannot find generator.key field")
}
}
}
|
gheine/apidoc-generator
|
scala-generator/src/test/scala/models/generator/TargetSpec.scala
|
Scala
|
mit
| 424
|
package org.genericConfig.admin.client.views.html
/**
* Copyright (C) 2016 Gennadi Heimann genaheimann@gmail.com
*
* Created by Gennadi Heimann 25.05.2018
*/
object HtmlElementIds {
def mainHtml = "main"
def mainJQuery = "#main"
def htmlConfigGraphNodeEdit = "configGraphNodeEdit"
def jQueryConfigGraphNodeEdit = "#configGraphNodeEdit"
def htmlEditGroupNodePage = "editGroupNodePage"
def jQueryEditGroupNodePage = "#editGroupNodePage"
def statusHtml = "status"
def statusJQuery = "#status"
def usernameHtml = "username"
def usernameJQuery = "#username"
def passwordHtml = "password"
def passwordJQuery = "#password"
def startPageHtml = "startPage"
def startPageJQuery = "#startPage"
def section = "section"
val inputFieldUpdateUsernameHtml : String = "inputFieldUpdateUsername"
val inputFieldUpdateUsernameJQuery : String = "#inputFieldUpdateUsername"
val buttonActionUpdateUsernameHtml : String = "buttonActionUpdateUsername"
val buttonActionUpdateUsernameJQuery : String = "#buttonActionUpdateUsername"
def addConfigHtml = "addConfig"
def addConfigJQuery = "#addConfig"
def updateConfigHtml = "updateConfig"
def updateConfigJQuery = "#updateConfig"
def deleteConfigHtml = "deleteConfig"
def deleteConfigJQuery = "#deleteConfig"
def getConfigsHtml = "getConfigs"
def getConfigsJQuery = "#getConfigs"
val inputConfigUrlHtml : String = "inputConfigUrl"
val inputConfigUrlJQuery : String = "#inputConfigUrl"
val inputConfigurationCourseHtml : String = "configurationCourse"
val inputConfigurationCourseJQuery : String = "#configurationCourse"
def addStepHtml = "addStep"
def addStepJQuery = "#addStep"
def inputStepNameToShowHtml = "inputStepNameToShow"
def inputStepNameToShowJQuery = "#inputStepNameToShow"
def inputSelectionCriteriumMinHtml = "inputSelectionCriteriumMin"
def inputSelectionCriteriumMinJQuery = "#inputSelectionCriteriumMin"
def inputSelectionCriteriumMaxHtml = "inputSelectionCriteriumMax"
def inputSelectionCriteriumMaxJQuery = "#inputSelectionCriteriumMax"
def updateStepHtml = "updateStep"
def updateStepJQuery = "#updateStep"
def deleteStepHtml = "deleteStep"
def deleteStepJQuery = "deleteStep"
def addComponentHtml = "addComponent"
def addComponentJQuery = "#addComponent"
def deleteComponentHtml = "deleteComponent"
def deleteComponentJQuery = "#deleteComponent"
def updateComponentHtml = "updateComponent"
def updateComponentJQuery = "#updateComponent"
def connectToStepHtml = "connectToStep"
def connectToStepJQuery = "#connectToStep"
}
|
gennadij/admin
|
client/src/main/scala/org/genericConfig/admin/client/views/html/HtmlElementIds.scala
|
Scala
|
apache-2.0
| 2,751
|
package com.github.vitalsoftware.util
import com.github.vitalsoftware.macros.{ json, jsonDefaults }
import org.specs2.mutable.Specification
import play.api.libs.json._
@json case class Person(name: String, age: Int, gender: Option[String])
@jsonDefaults case class Person2(name: String, age: Int = 7, gender: Option[String] = None)
@jsonDefaults case class Test(f1: Int = 1, f2: String = "2", f3: Boolean = true, f4: Option[Test])
@jsonDefaults case class TestWithNoFallback(f1: String, f2: Int = 99, f3: Option[TestWithNoFallback] = None)
@jsonDefaults case class Container(data: List[TestWithNoFallback] = List(TestWithNoFallback("test", 100)))
@jsonDefaults case class PrimitiveContainer(data: List[Int] = List(1, 2, 3))
class RobustParsingTest extends Specification {
"robustParsing" should {
"make invalid option values None" in {
val json = Json.obj(
"name" -> "Victor Hugo",
"age" -> 46,
"gender" -> true
)
val (errors, result) = RobustParsing.robustParsing(Person.jsonAnnotationFormat.reads, json)
result must beSome(Person("Victor Hugo", 46, None))
errors must beSome[JsError]
val (errors2, result2) = RobustParsing.robustParsing(Person2.jsonAnnotationFormat.reads, json)
result2 must beSome(Person2("Victor Hugo", 46))
errors2 must beSome[JsError]
}
"make invalid values with defaults fallback to the default" in {
val json = Json.obj(
"name" -> "Victor Hugo",
"age" -> "non age"
)
val (errors, result) = RobustParsing.robustParsing(Person2.jsonAnnotationFormat.reads, json)
result must beSome(Person2("Victor Hugo", 7))
errors must beSome[JsError]
}
"throw on invalid values which are not optional or default" in {
val json = Json.obj(
"name" -> "Victor Hugo",
"age" -> "non age"
)
val (errors, result) = RobustParsing.robustParsing(Person.jsonAnnotationFormat.reads, json)
result must beNone
errors must beSome[JsError]
errors.get.errors.head._1.path.head.asInstanceOf[KeyPathNode].key mustEqual ("age")
errors.get.errors.head._2.head.message must contain("error.expected.jsnumber")
}
"multiple defaults must get replaced" in {
val json = Json.obj("f1" -> "str", "f2" -> false, "f3" -> 3, "f4" -> "not test")
val (errors, result) = RobustParsing.robustParsing(Test.jsonAnnotationFormat, json)
result must beSome(Test(f4 = None))
errors must beSome[JsError]
errors.get.errors.map(_._1.toJsonString) must contain(allOf("obj.f1", "obj.f2", "obj.f3", "obj.f4"))
}
"Replace deep nested values" in {
val json = Json.obj("f4" -> Json.obj("f3" -> "not boolean"))
val (errors, result) = RobustParsing.robustParsing(Test.jsonAnnotationFormat, json)
result must beSome(Test(f4 = Some(Test(f4 = None))))
errors must beSome[JsError]
errors.get.errors.map(_._1.toJsonString) must contain(allOf("obj.f4.f3"))
}
"recursively fix errors on non-arrays" in {
val json = Json.obj("f1" -> "str", "f3" -> Json.obj("f2" -> 10))
val (errors, result) = RobustParsing.robustParsing(TestWithNoFallback.jsonAnnotationFormat, json)
result must beSome(TestWithNoFallback("str", 99))
errors must beSome[JsError]
errors.get.errors.map(_._1.toJsonString) must contain(allOf("obj.f3.f1"))
}
"recursively fix errors on arrays" in {
val json = Json.arr(
Json.obj("f1" -> "arr 1", "f2" -> "a"), // fail on f2 as its a string
Json.obj("f1" -> "arr 2", "f2" -> 2),
Json.obj("f1" -> "arr 3", "f2" -> "c"),
Json.obj("f1" -> "arr 4", "f2" -> "d"),
Json.obj("f1" -> "arr 5", "f2" -> 5)
)
val (errors, result) = RobustParsing.robustParsing(implicitly[Reads[List[TestWithNoFallback]]], json)
errors must beSome[JsError]
result must beSome(
List(
TestWithNoFallback("arr 1", 99),
TestWithNoFallback("arr 2", 2),
TestWithNoFallback("arr 3", 99),
TestWithNoFallback("arr 4", 99),
TestWithNoFallback("arr 5", 5)
)
)
errors.get.errors.map(_._1.toJsonString) must contain(allOf("obj[0].f2", "obj[2].f2", "obj[3].f2"))
}
"recursively fix errors on arrays in containers" in {
val json = Json.obj(
"data" -> Json.arr(
Json.obj("f1" -> "arr 1", "f2" -> "a"), // fail on f2 as its a string
Json.obj("f1" -> "arr 2", "f2" -> 2),
Json.obj("f1" -> "arr 3", "f2" -> "c"),
Json.obj("f1" -> "arr 4", "f2" -> "d"),
Json.obj("f1" -> "arr 5", "f2" -> 5)
)
)
val (errors, result) = RobustParsing.robustParsing(implicitly[Reads[Container]], json)
errors must beSome[JsError]
result must beSome(
Container(
List(
TestWithNoFallback("arr 1", 99),
TestWithNoFallback("arr 2", 2),
TestWithNoFallback("arr 3", 99),
TestWithNoFallback("arr 4", 99),
TestWithNoFallback("arr 5", 5)
)
)
)
errors.get.errors.map(_._1.toJsonString) must contain(allOf("obj.data[0].f2", "obj.data[2].f2", "obj.data[3].f2"))
}
"fail on arrays un-recoverable failures in arrays" in {
val json = Json.arr(
Json.obj("f1" -> "arr 1", "f2" -> "a"),
Json.obj("f1" -> 234, "f2" -> 2), // fail on f1 as its a string
Json.obj("f1" -> "arr 3", "f2" -> "c"),
Json.obj("f1" -> "arr 4", "f2" -> "d"),
Json.obj("f1" -> "arr 5", "f2" -> 5)
)
val (errors, result) = RobustParsing.robustParsing(implicitly[Reads[List[TestWithNoFallback]]], json)
errors must beSome[JsError]
result must beNone
}
"recover on arrays with default values" in {
val json = Json.obj(
"data" -> Json.arr(
Json.obj("f1" -> "arr 1", "f2" -> "a"),
Json.obj("f1" -> 234, "f2" -> 2), // fail on f1 as its a string
Json.obj("f1" -> "arr 3", "f2" -> "c")
)
)
val (errors, result) = RobustParsing.robustParsing(implicitly[Reads[Container]], json)
errors must beSome[JsError]
result must beSome(Container(List(TestWithNoFallback("test", 100))))
}
"fail on arrays of primitives" in {
val json = Json.arr(1, "a", 2, "b")
val (errors, result) = RobustParsing.robustParsing(Reads.list[Int], json)
errors must beSome[JsError]
result must beNone
}
"recover from array of primitives with default values" in {
val json = Json.obj("data" -> Json.arr(1, "a", 2, "b"))
val (errors, result) = RobustParsing.robustParsing(Reads.of[PrimitiveContainer], json)
errors must beSome[JsError]
result must beSome(PrimitiveContainer(List(1, 2, 3)))
}
}
}
|
vital-software/scala-redox
|
src/test/scala/com/github/vitalsoftware/util/RobustParsingTest.scala
|
Scala
|
mit
| 6,838
|
object Foo { // error
def bar: Int = "LOL"
}
|
som-snytt/dotty
|
tests/vulpix-tests/unit/negAnnotWrongLine.scala
|
Scala
|
apache-2.0
| 47
|
package com.github.tototoshi.play2.auth.social.providers.slack
import com.github.tototoshi.play2.auth.social.core.OAuth2Controller
import com.github.tototoshi.play2.auth.{ AuthConfig, Login, OptionalAuthElement }
trait SlackController extends OAuth2Controller
with AuthConfig
with OptionalAuthElement
with Login {
val authenticator = new SlackAuthenticator
}
|
tototoshi/play2-auth
|
social/src/main/scala/com/github/tototoshi/play2/auth/social/providers/slack/SlackController.scala
|
Scala
|
apache-2.0
| 376
|
package org.fedoraproject.mobile
import Implicits._
import android.os.Bundle
import android.preference.PreferenceFragment
import scalaz._, Scalaz._
import scalaz.effect.IO
class PreferencesFragment extends PreferenceFragment with TypedFragment {
override def onCreate(savedInstanceState: Bundle): Unit = IO {
super.onCreate(savedInstanceState)
addPreferencesFromResource(R.xml.preferences);
}.unsafePerformIO
}
|
fedora-infra/mobile
|
src/main/scala/fragment/PreferencesFragment.scala
|
Scala
|
mpl-2.0
| 427
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils
import netty.Crc32c
private[bigdl] object Crc32 {
def maskedCRC32(crc32c: Crc32c, data: Array[Byte], offset: Int, length: Int): Long = {
crc32c.reset()
crc32c.update(data, offset, length)
val x = u32(crc32c.getValue)
u32(((x >> 15) | u32(x << 17)) + 0xa282ead8)
}
def maskedCRC32(crc32c: Crc32c, data: Array[Byte]): Long = {
maskedCRC32(crc32c, data, 0, data.length)
}
def maskedCRC32(data: Array[Byte]): Long = {
val crc32c = new Crc32c()
maskedCRC32(crc32c, data)
}
def maskedCRC32(data: Array[Byte], offset: Int, length: Int): Long = {
val crc32c = new Crc32c()
maskedCRC32(crc32c, data, offset, length)
}
def u32(x: Long): Long = {
x & 0xffffffff
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/Crc32.scala
|
Scala
|
apache-2.0
| 1,373
|
package org.jetbrains.jps.incremental.scala.remote
import java.io._
import java.net.{InetAddress, Socket}
import com.intellij.util.Base64Converter
import com.martiansoftware.nailgun.NGConstants
import org.jetbrains.jps.incremental.messages.BuildMessage.Kind
import org.jetbrains.jps.incremental.scala._
/**
* @author Pavel Fatin
* @author Dmitry Naydanov
*/
trait RemoteResourceOwner {
protected val address: InetAddress
protected val port: Int
protected val currentDirectory = System.getProperty("user.dir")
protected val serverAlias = "compile-server"
def send(command: String, arguments: Seq[String], client: Client) {
val encodedArgs = arguments.map(s => Base64Converter.encode(s.getBytes("UTF-8")))
using(new Socket(address, port)) { socket =>
using(new DataOutputStream(new BufferedOutputStream(socket.getOutputStream))) { output =>
createChunks(command, encodedArgs).foreach(_.writeTo(output))
output.flush()
if (client != null) {
using(new DataInputStream(new BufferedInputStream(socket.getInputStream))) { input =>
handle(input, client)
}
}
}
}
}
protected def handle(input: DataInputStream, client: Client) {
val processor = new ClientEventProcessor(client)
while (!client.isCanceled) {
Chunk.readFrom(input) match {
case Chunk(NGConstants.CHUNKTYPE_EXIT, code) =>
return
case Chunk(NGConstants.CHUNKTYPE_STDOUT, data) =>
try {
val event = Event.fromBytes(Base64Converter.decode(data))
processor.process(event)
} catch {
case e: Exception =>
val chars = {
val s = new String(data)
if (s.length > 50) s.substring(0, 50) + "..." else s
}
client.message(Kind.ERROR, "Unable to read an event from: " + chars)
client.trace(e)
}
// Main server class redirects all (unexpected) stdout data to stderr.
// In theory, there should be no such data at all, however, in practice,
// sbt "leaks" some messages into console (e.g. for "explain type errors" option).
// Report such output not as errors, but as warings (to continue make process).
case Chunk(NGConstants.CHUNKTYPE_STDERR, data) =>
client.message(Kind.WARNING, fromBytes(data))
case Chunk(kind, data) =>
client.message(Kind.ERROR, "Unexpected server output: " + data)
}
}
}
protected def createChunks(command: String, args: Seq[String]): Seq[Chunk] = {
args.map(s => Chunk(NGConstants.CHUNKTYPE_ARGUMENT.toChar, toBytes(s))) :+
Chunk(NGConstants.CHUNKTYPE_WORKINGDIRECTORY.toChar, toBytes(currentDirectory)) :+
Chunk(NGConstants.CHUNKTYPE_COMMAND.toChar, toBytes(command))
}
private def toBytes(s: String) = s.getBytes
private def fromBytes(bytes: Array[Byte]) = new String(bytes)
}
case class Chunk(kind: Chunk.Kind, data: Array[Byte]) {
def writeTo(output: DataOutputStream) {
output.writeInt(data.length)
output.writeByte(kind.toByte)
output.write(data)
}
}
object Chunk {
type Kind = Char
def readFrom(input: DataInputStream): Chunk = {
val size = input.readInt()
val kind = input.readByte().toChar
val data = {
val buffer = new Array[Byte](size)
input.readFully(buffer)
buffer
}
Chunk(kind, data)
}
}
|
triplequote/intellij-scala
|
scala/compiler-shared/src/org/jetbrains/jps/incremental/scala/remote/RemoteResourceOwner.scala
|
Scala
|
apache-2.0
| 3,441
|
package scala {
package meta {
package config {
case class Version()
trait Aliases {
type Version = scala.meta.config.Version
val Version = scala.meta.config.Version
}
}
}
package object meta extends scala.meta.config.Aliases
}
|
som-snytt/dotty
|
tests/pos/i2551/library_1.scala
|
Scala
|
apache-2.0
| 278
|
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2014, Gary Keorkunian **
** **
\\* */
package squants.time
import org.scalatest.{ Matchers, FlatSpec }
import squants.motion.UsMilesPerHour
import squants.space.UsMiles
import squants.CustomMatchers
import scala.language.postfixOps
/**
* @author garyKeorkunian
* @since 0.1
*
*/
class TimeDerivativeSpec extends FlatSpec with Matchers with CustomMatchers {
behavior of "Time Derivatives and Integrals as implemented in Distance and Velocity"
it should "satisfy Derivative = Integral / Time" in {
implicit val tolerance = UsMilesPerHour(0.0000000000001)
UsMilesPerHour(55) should beApproximately(UsMiles(55) / Hours(1))
}
it should "satisfy Integral = Derivative * Time" in {
implicit val tolerance = UsMiles(0.0000000000001)
UsMiles(110) should beApproximately(UsMilesPerHour(55) * Hours(2))
UsMiles(110) should beApproximately(Hours(2) * UsMilesPerHour(55))
}
it should "satisfy Time = Integral / Derivative" in {
implicit val tolerance = Hours(0.0000000000001)
Hours(2) should beApproximately(UsMiles(110) / UsMilesPerHour(55))
}
}
|
non/squants
|
src/test/scala/squants/time/TimeDerivativeSpec.scala
|
Scala
|
apache-2.0
| 1,577
|
package org.scalatra
package commands
import mojolly.inflector.InflectorImports._
import org.scalatra.util.RicherString._
import org.scalatra.util.conversion._
import org.scalatra.validation._
import scalaz.Validation.FlatMap._
import scalaz._
import scalaz.syntax.std.option._
import scalaz.syntax.validation._
object DefVal {
def apply[T](prov: => T) = new DefVal(prov)
}
class DefVal[T](valueProvider: => T) {
lazy val value = valueProvider
}
object ValueSource extends Enumeration {
val Header = Value("header")
val Body = Value("body")
val Query = Value("query")
val Path = Value("path")
}
object FieldDescriptor {
def apply[T](name: String)(implicit mf: Manifest[T]): FieldDescriptor[T] =
new BasicFieldDescriptor[T](name, transformations = identity)
}
trait FieldDescriptor[T] {
def name: String
def value: FieldValidation[T]
def validator: Option[Validator[T]]
def notes: String
def notes(note: String): FieldDescriptor[T]
def description: String
def description(desc: String): FieldDescriptor[T]
def valueManifest: Manifest[T]
def valueSource: ValueSource.Value
def sourcedFrom(valueSource: ValueSource.Value): FieldDescriptor[T]
def allowableValues: List[T]
def allowableValues(vals: T*): FieldDescriptor[T]
def displayName: Option[String]
def displayName(name: String): FieldDescriptor[T]
def position: Int
def position(pos: Int): FieldDescriptor[T]
private[commands] def defVal: Option[DefVal[T]]
def defaultValue: Option[T] = defVal.map(_.value)
def withDefaultValue(default: => T): FieldDescriptor[T]
def requiredError: String
def withRequiredError(msgFormat: String): FieldDescriptor[T]
def isValid = value.isSuccess
def isInvalid = value.isFailure
private[commands] def isRequired: Boolean
def required: FieldDescriptor[T]
def optional(default: => T): FieldDescriptor[T]
override def toString() = "FieldDescriptor(name: %s)".format(name)
def validateWith(validators: BindingValidator[T]*): FieldDescriptor[T]
def apply[S](original: Either[String, Option[S]])(implicit ms: Manifest[S], convert: TypeConverter[S, T]): DataboundFieldDescriptor[S, T]
override def hashCode() = 41 + 41 * name.hashCode()
def transform(endo: T => T): FieldDescriptor[T]
private[commands] def transformations: T => T
override def equals(obj: Any) = obj match {
case b: FieldDescriptor[_] => b.name == this.name
case _ => false
}
}
class BasicFieldDescriptor[T](
val name: String,
val validator: Option[Validator[T]] = None,
private[commands] val transformations: T => T = identity _,
private[commands] var isRequired: Boolean = false,
val description: String = "",
val notes: String = "",
private[commands] val defVal: Option[DefVal[T]] = None,
val valueSource: ValueSource.Value = ValueSource.Body,
val allowableValues: List[T] = Nil,
val displayName: Option[String] = None,
val position: Int = 0,
val requiredError: String = "%s is required.")(implicit val valueManifest: Manifest[T]) extends FieldDescriptor[T] {
private[this] def requiredValidationFailure: FieldValidation[T] = ValidationError(requiredError.format(name), FieldName(name)).failure
def value: FieldValidation[T] = defaultValue.fold(requiredValidationFailure)(_.success)
def validateWith(bindingValidators: BindingValidator[T]*): FieldDescriptor[T] = {
val nwValidators: Option[Validator[T]] =
if (bindingValidators.nonEmpty) Some(bindingValidators.map(_ apply name).reduce(_ andThen _)) else None
copy(validator = validator.flatMap(v => nwValidators.map(v andThen)) orElse nwValidators)
}
def copy(
name: String = name,
validator: Option[Validator[T]] = validator,
transformations: T => T = transformations,
isRequired: Boolean = isRequired,
description: String = description,
notes: String = notes,
defVal: Option[DefVal[T]] = defVal,
valueSource: ValueSource.Value = valueSource,
allowableValues: List[T] = allowableValues,
displayName: Option[String] = displayName,
position: Int = position,
requiredError: String = requiredError): FieldDescriptor[T] = {
new BasicFieldDescriptor(name, validator, transformations, isRequired, description, notes, defVal, valueSource, allowableValues, displayName, position, requiredError)(valueManifest)
}
def apply[S](original: Either[String, Option[S]])(implicit ms: Manifest[S], convert: TypeConverter[S, T]): DataboundFieldDescriptor[S, T] = {
val conv = original.fold(
e => ValidationError(e).failure,
o => (o.flatMap(convert(_)) orElse defaultValue).fold(requiredValidationFailure)(_.success)
)
val o = original.fold(_ => None, identity)
BoundFieldDescriptor(o, conv, this)
}
def transform(endo: T => T): FieldDescriptor[T] = copy(transformations = transformations andThen endo)
def required = copy(isRequired = true)
def optional(default: => T): FieldDescriptor[T] = withDefaultValue(default)
def description(desc: String) = copy(description = desc)
def notes(note: String) = copy(notes = note)
def withDefaultValue(default: => T): FieldDescriptor[T] = copy(defVal = Some(DefVal(default)), isRequired = false)
def withRequiredError(msgFormat: String): FieldDescriptor[T] = copy(requiredError)
def sourcedFrom(valueSource: ValueSource.Value): FieldDescriptor[T] = copy(valueSource = valueSource)
def allowableValues(vals: T*): FieldDescriptor[T] =
copy(allowableValues = vals.toList).validateWith(BindingValidators.oneOf("%%s must be one of %s.", vals))
def displayName(name: String): FieldDescriptor[T] = copy(displayName = name.blankOption)
def position(pos: Int): FieldDescriptor[T] = copy(position = pos)
}
trait DataboundFieldDescriptor[S, T] extends FieldDescriptor[T] {
def field: FieldDescriptor[T]
def original: Option[S]
def transform(endo: T => T): DataboundFieldDescriptor[S, T]
def apply[V](original: Either[String, Option[V]])(implicit mv: Manifest[V], convert: TypeConverter[V, T]): DataboundFieldDescriptor[V, T] =
this.asInstanceOf[DataboundFieldDescriptor[V, T]]
override def toString() = "FieldDescriptor(name: %s, original: %s, value: %s)".format(name, original, value)
def validate: ValidatedFieldDescriptor[S, T]
def validateWith(bindingValidators: BindingValidator[T]*): DataboundFieldDescriptor[S, T]
def required: DataboundFieldDescriptor[S, T]
def optional(default: => T): DataboundFieldDescriptor[S, T]
def isRequired = field.isRequired
def requiredError: String = field.requiredError
def withRequiredError(msgFormat: String): DataboundFieldDescriptor[S, T]
def description = field.description
def description(desc: String): DataboundFieldDescriptor[S, T]
def notes = field.notes
def notes(note: String): DataboundFieldDescriptor[S, T]
def valueManifest = field.valueManifest
private[commands] def defVal: Option[DefVal[T]] = field.defVal
def withDefaultValue(default: => T): DataboundFieldDescriptor[S, T]
def valueSource: ValueSource.Value = field.valueSource
def sourcedFrom(valueSource: ValueSource.Value): DataboundFieldDescriptor[S, T]
def allowableValues = field.allowableValues
def allowableValues(vals: T*): DataboundFieldDescriptor[S, T]
def displayName: Option[String] = field.displayName
def displayName(name: String): DataboundFieldDescriptor[S, T]
def position: Int = field.position
def position(pos: Int): DataboundFieldDescriptor[S, T]
}
trait ValidatedFieldDescriptor[S, T] extends DataboundFieldDescriptor[S, T] {
def validate: ValidatedFieldDescriptor[S, T] = this
}
object BoundFieldDescriptor {
def apply[S, T](original: Option[S], value: FieldValidation[T], binding: FieldDescriptor[T]): DataboundFieldDescriptor[S, T] =
new BoundFieldDescriptor(original, value, binding, binding.validator)
}
class BoundFieldDescriptor[S, T](
val original: Option[S],
val value: FieldValidation[T],
val field: FieldDescriptor[T],
val validator: Option[Validator[T]]) extends DataboundFieldDescriptor[S, T] {
def name: String = field.name
override def hashCode(): Int = field.hashCode()
override def equals(other: Any) = other match {
case o: BasicFieldDescriptor[T] => field.equals(o)
case o: BoundFieldDescriptor[T, S] => field.equals(o.field)
case _ => false
}
override def toString() = "BoundFieldDescriptor(name: %s, original: %s, converted: %s)".format(name, original, value)
def validateWith(bindingValidators: BindingValidator[T]*): DataboundFieldDescriptor[S, T] = {
val nwFld = field.validateWith(bindingValidators: _*)
copy(field = nwFld, validator = nwFld.validator)
}
def withRequiredError(msgFormat: String): DataboundFieldDescriptor[S, T] = copy(field = field.withRequiredError(msgFormat))
def copy(original: Option[S] = original, value: FieldValidation[T] = value, field: FieldDescriptor[T] = field, validator: Option[Validator[T]] = validator): DataboundFieldDescriptor[S, T] =
new BoundFieldDescriptor(original, value, field, validator)
def transform(endo: T => T): DataboundFieldDescriptor[S, T] = copy(value = value map endo)
def required = copy(field = field.required)
def optional(default: => T): DataboundFieldDescriptor[S, T] = withDefaultValue(default)
def description(desc: String) = copy(field = field.description(desc))
def notes(note: String) = copy(field = field.notes(note))
def validate: ValidatedFieldDescriptor[S, T] = {
val defaultValidator: Validator[T] = validator getOrElse identity
if (!isRequired && original.isEmpty) {
new ValidatedBoundFieldDescriptor(value map transformations, this)
} else {
val doValidation: Validator[T] = if (isRequired) {
(x: FieldValidation[T]) =>
x flatMap { v =>
if (original.isDefined) v.success else ValidationError("%s is required." format name.underscore.humanize, FieldName(name), ValidationFail).failure
}
} else identity
new ValidatedBoundFieldDescriptor((doValidation andThen defaultValidator)(value) map transformations, this)
}
}
private[commands] def transformations: (T) => T = field.transformations
def withDefaultValue(default: => T): DataboundFieldDescriptor[S, T] = copy(field = field.withDefaultValue(default))
def sourcedFrom(valueSource: ValueSource.Value): DataboundFieldDescriptor[S, T] = copy(field = field.sourcedFrom(valueSource))
def allowableValues(vals: T*): DataboundFieldDescriptor[S, T] =
copy(field = field.allowableValues(vals: _*))
def displayName(name: String): DataboundFieldDescriptor[S, T] = copy(field = field.displayName(name))
def position(pos: Int): DataboundFieldDescriptor[S, T] = copy(field = field.position(pos))
}
class ValidatedBoundFieldDescriptor[S, T](val value: FieldValidation[T], val field: DataboundFieldDescriptor[S, T]) extends ValidatedFieldDescriptor[S, T] {
def name: String = field.name
override def hashCode(): Int = field.hashCode()
override def equals(other: Any) = other match {
case o: BasicFieldDescriptor[T] => field.equals(o)
case o: BoundFieldDescriptor[T, S] => field.equals(o.field)
case o: ValidatedBoundFieldDescriptor[S, T] => field.equals(o.field)
case _ => false
}
override def toString() = "BoundFieldDescriptor(name: %s, original: %s, converted: %s)".format(name, original, value)
def validateWith(bindingValidators: BindingValidator[T]*): DataboundFieldDescriptor[S, T] = {
copy(field = field.validateWith(bindingValidators: _*))
}
def copy(value: FieldValidation[T] = value, field: DataboundFieldDescriptor[S, T] = field): ValidatedFieldDescriptor[S, T] =
new ValidatedBoundFieldDescriptor(value, field)
def transform(endo: T => T): DataboundFieldDescriptor[S, T] = copy(value = value map endo)
def required = copy(field = field.required)
def optional(default: => T): DataboundFieldDescriptor[S, T] = withDefaultValue(default)
def withRequiredError(msgFormat: String): DataboundFieldDescriptor[S, T] = copy(field = field.withRequiredError(msgFormat))
def description(desc: String) = copy(field = field.description(desc))
def notes(note: String) = copy(field = field.notes(note))
def validator: Option[Validator[T]] = field.validator
def original: Option[S] = field.original
private[commands] def transformations: (T) => T = field.transformations
def withDefaultValue(default: => T): DataboundFieldDescriptor[S, T] = copy(field = field.withDefaultValue(default))
def sourcedFrom(valueSource: ValueSource.Value): DataboundFieldDescriptor[S, T] = copy(field = field.sourcedFrom(valueSource))
def allowableValues(vals: T*): DataboundFieldDescriptor[S, T] = copy(field = field.allowableValues(vals: _*))
def displayName(name: String): DataboundFieldDescriptor[S, T] = copy(field = field.displayName(name))
def position(pos: Int): DataboundFieldDescriptor[S, T] = copy(field = field.position(pos))
}
import scala.util.matching.Regex
trait BindingValidatorImplicits {
import org.scalatra.commands.BindingValidators._
implicit def validatableStringBinding(b: FieldDescriptor[String]) = new ValidatableStringBinding(b)
implicit def validatableSeqBinding[T <: Seq[_]](b: FieldDescriptor[T]) = new ValidatableSeq(b)
implicit def validatableGenericBinding[T](b: FieldDescriptor[T]) = new ValidatableGenericBinding(b)
implicit def validatableOrderedBinding[T <% Ordered[T]](b: FieldDescriptor[T]) = new ValidatableOrdered(b)
}
object BindingValidators {
class ValidatableSeq[T <: Seq[_]](b: FieldDescriptor[T]) {
def notEmpty: FieldDescriptor[T] = notEmpty()
def notEmpty(messageFormat: String = b.requiredError): FieldDescriptor[T] =
b.required.validateWith(BindingValidators.nonEmptyCollection(messageFormat))
}
class ValidatableOrdered[T <% Ordered[T]](b: FieldDescriptor[T]) {
def greaterThan(min: T, messageFormat: String = "%%s must be greater than %s"): FieldDescriptor[T] =
b.validateWith(BindingValidators.greaterThan(min, messageFormat))
def lessThan(max: T, messageFormat: String = "%%s must be less than %s"): FieldDescriptor[T] =
b.validateWith(BindingValidators.lessThan(max, messageFormat))
def greaterThanOrEqualTo(min: T, messageFormat: String = "%%s must be greater than or equal to %s"): FieldDescriptor[T] =
b.validateWith(BindingValidators.greaterThanOrEqualTo(min, messageFormat))
def lessThanOrEqualTo(max: T, messageFormat: String = "%%s must be less than or equal to %s"): FieldDescriptor[T] =
b.validateWith(BindingValidators.lessThanOrEqualTo(max, messageFormat))
}
class ValidatableGenericBinding[T](b: FieldDescriptor[T]) {
def validate(validate: T => Boolean, messageFormat: String = "%s is invalid."): FieldDescriptor[T] =
b.validateWith(BindingValidators.validate(validate, messageFormat))
}
class ValidatableStringBinding(b: FieldDescriptor[String]) {
def notBlank: FieldDescriptor[String] = notBlank()
def notBlank(messageFormat: String = b.requiredError): FieldDescriptor[String] =
b.required.validateWith(BindingValidators.nonEmptyString(messageFormat))
def validEmail: FieldDescriptor[String] = validEmail()
def validEmail(messageFormat: String = "%s must be a valid email address."): FieldDescriptor[String] =
b.validateWith(BindingValidators.validEmail(messageFormat))
def validAbsoluteUrl(allowLocalHost: Boolean, messageFormat: String = "%s must be a valid absolute url.", schemes: Seq[String] = Seq("http", "https")): FieldDescriptor[String] =
b.validateWith(BindingValidators.validAbsoluteUrl(allowLocalHost, messageFormat, schemes))
def validUrl(allowLocalHost: Boolean, messageFormat: String = "%s must be a valid url.", schemes: Seq[String] = Seq("http", "https")): FieldDescriptor[String] =
b.validateWith(BindingValidators.validUrl(allowLocalHost, messageFormat, schemes))
def validForFormat(regex: Regex, messageFormat: String = "%s is invalid."): FieldDescriptor[String] =
b.validateWith(BindingValidators.validFormat(regex, messageFormat))
def validForConfirmation(against: Field[String], messageFormat: String = "%%s must match %s."): FieldDescriptor[String] =
b.validateWith(BindingValidators.validConfirmation(against, messageFormat))
def minLength(min: Int, messageFormat: String = "%%s must be at least %s characters long."): FieldDescriptor[String] =
b.validateWith(BindingValidators.minLength(min, messageFormat))
def enumValue(enum: Enumeration, messageFormat: String = "%%s must be one of %s."): FieldDescriptor[String] =
b.validateWith(BindingValidators.enumValue(enum, messageFormat))
}
import org.scalatra.validation.Validation
import scalaz.Validation.FlatMap._
def validate[TValue](validate: TValue => Boolean, messageFormat: String = "%s is invalid."): BindingValidator[TValue] = (s: String) => {
_ flatMap Validators.validate(s, messageFormat = messageFormat, validate = validate).validate
}
def nonEmptyString: BindingValidator[String] = nonEmptyString()
def nonEmptyString(messageFormat: String = "%s is required."): BindingValidator[String] = (s: String) => {
_ flatMap (Validation.nonEmptyString(s, _, messageFormat))
}
def notNull: BindingValidator[AnyRef] = notNull()
def notNull(messageFormat: String = "%s is required."): BindingValidator[AnyRef] = (s: String) => {
_ flatMap (Validation.notNull(s, _, messageFormat))
}
def nonEmptyCollection[TResult <: Traversable[_]]: BindingValidator[TResult] = nonEmptyCollection[TResult]()
def nonEmptyCollection[TResult <: Traversable[_]](messageFormat: String = "%s must not be empty."): BindingValidator[TResult] = (s: String) => {
_ flatMap (Validation.nonEmptyCollection(s, _, messageFormat))
}
def validEmail: BindingValidator[String] = validEmail()
def validEmail(messageFormat: String = "%s must be a valid email address."): BindingValidator[String] = (s: String) => {
_ flatMap (Validation.validEmail(s, _, messageFormat))
}
def validAbsoluteUrl(allowLocalHost: Boolean, messageFormat: String = "%s must be a absolute valid url.", schemes: Seq[String] = Seq("http", "https")): BindingValidator[String] = (s: String) => {
_ flatMap Validators.validAbsoluteUrl(s, allowLocalHost, messageFormat, schemes).validate
}
def validUrl(allowLocalHost: Boolean, messageFormat: String = "%s must be a valid url.", schemes: Seq[String] = Seq("http", "https")): BindingValidator[String] = (s: String) => {
_ flatMap Validators.validUrl(s, allowLocalHost, messageFormat, schemes).validate
}
def validFormat(regex: Regex, messageFormat: String = "%s is invalid."): BindingValidator[String] = (s: String) => {
_ flatMap Validators.validFormat(s, regex, messageFormat).validate
}
def validConfirmation(against: Field[String], messageFormat: String = "%%s must match %s."): BindingValidator[String] = (s: String) => {
_ flatMap { Validators.validConfirmation(s, against.name, (against.value orElse against.defaultValue).orNull, messageFormat).validate }
}
def greaterThan[T <% Ordered[T]](min: T, messageFormat: String = "%%s must be greater than %s."): BindingValidator[T] = (s: String) => {
_ flatMap Validators.greaterThan(s, min, messageFormat).validate
}
def lessThan[T <% Ordered[T]](max: T, messageFormat: String = "%%s must be less than %s."): BindingValidator[T] = (s: String) => {
_ flatMap Validators.lessThan(s, max, messageFormat).validate
}
def greaterThanOrEqualTo[T <% Ordered[T]](min: T, messageFormat: String = "%%s must be greater than or equal to %s."): BindingValidator[T] = (s: String) => {
_ flatMap Validators.greaterThanOrEqualTo(s, min, messageFormat).validate
}
def lessThanOrEqualTo[T <% Ordered[T]](max: T, messageFormat: String = "%%s must be less than or equal to %s."): BindingValidator[T] = (s: String) => {
_ flatMap Validators.lessThanOrEqualTo(s, max, messageFormat).validate
}
def minLength(min: Int, messageFormat: String = "%%s must be at least %s characters long."): BindingValidator[String] = (s: String) => {
_ flatMap Validators.minLength(s, min, messageFormat).validate
}
def oneOf[TResult](messageFormat: String = "%%s must be one of %s.", expected: Seq[TResult]): BindingValidator[TResult] = (s: String) => {
_ flatMap Validators.oneOf(s, messageFormat, expected).validate
}
def enumValue(enum: Enumeration, messageFormat: String = "%%s must be one of %s."): BindingValidator[String] =
oneOf(messageFormat, enum.values.map(_.toString).toSeq)
}
class Field[A: Manifest](descr: FieldDescriptor[A], command: Command) {
val name = descr.name
def validation: FieldValidation[A] = binding.field.value.asInstanceOf[FieldValidation[A]]
def value: Option[A] = binding.field.value.toOption.asInstanceOf[Option[A]]
def defaultValue: Option[A] = descr.defaultValue
def error: Option[ValidationError] = binding.field.value.fold(_.some, _ => None)
def original = binding.original
def binding: Binding = command.bindings(name)
def isValid = validation.isSuccess
def isInvalid = validation.isFailure
def notes: String = descr.notes
def description: String = descr.description
def isRequired: Boolean = descr.isRequired
def valueSource: ValueSource.Value = descr.valueSource
def allowableValues = descr.allowableValues
def displayName: Option[String] = descr.displayName
def position: Int = descr.position
}
|
lightvector/scalatra
|
commands/src/main/scala/org/scalatra/commands/field.scala
|
Scala
|
bsd-2-clause
| 21,456
|
/*
* Copyright (c) 2016 LIBBLE team supervised by Dr. Wu-Jun LI at Nanjing University.
* All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package libble.examples
import libble.clustering.KMeans
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.mutable
/**
* Created by Aplysia_x on 2016/12/9.
*/
object testKMeans {
def main(args: Array[String]) {
if (args.length < 1) {
System.err.println("Usage: ~ path:String --k=Int --maxIters=Int --stopBound=Double")
System.exit(1)
}
// System.setProperty("hadoop.home.dir", "D:\\\\Program Files\\\\hadoop-2.6.0")
val optionsList = args.drop(1).map { arg =>
arg.dropWhile(_ == '-').split('=') match {
case Array(opt, v) => (opt -> v)
case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
}
}
val options = mutable.Map(optionsList: _*)
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
val conf = new SparkConf()
.setAppName("My Test Kmeans")
val sc = new SparkContext(conf)
val k = options.remove("k").map(_.toInt).getOrElse(10)
val maxIters = options.remove("maxIters").map(_.toInt).getOrElse(10)
val stopBound = options.remove("stopBound").map(_.toDouble).getOrElse(0.0001)
import libble.context.implicits.sc2LibContext
val training = sc.loadLIBBLEFile(args(0))
val m = new KMeans(k, maxIters, stopBound)
val data = training.map(e => (e.label, e.features))
m.train(data)
}
}
|
syh6585/LIBBLE-Spark
|
src/main/scala/examples/testKMeans.scala
|
Scala
|
apache-2.0
| 2,131
|
package mr.merc.map.hex
import mr.merc.map.hex.Direction._
private[hex] case class CubeHex(x: Int, y: Int, z: Int) {
def toHex: Hex = {
val q = x
val r = z + (x - (x & 1)) / 2
new Hex(q, r)
}
def toAxialHex: AxialHex = AxialHex(x, z)
private lazy val cubeDirections = Map(SE -> (+1, -1, 0), NE -> (+1, 0, -1), N -> (0, +1, -1),
NW -> (-1, +1, 0), SW -> (-1, 0, +1), S -> (0, -1, +1))
def neighbour(direction: Direction, step:Int = 1):CubeHex = {
val (xx, yy, zz) = cubeDirections(direction)
CubeHex(x + step * xx, y + step * yy, z + step * zz)
}
}
|
RenualdMarch/merc
|
src/main/scala/mr/merc/map/hex/CubeHex.scala
|
Scala
|
gpl-3.0
| 589
|
class Foo0 extends (() => Double) {
def apply() = 5.0d
}
class Foo1 extends (Double => Double) {
def apply(x: Double) = x
}
object Test {
def main(args: Array[String]): Unit = {
println((new Foo0)())
println((new Foo1)(5.0d))
}
}
|
lampepfl/dotty
|
tests/run/spec-self.scala
|
Scala
|
apache-2.0
| 248
|
package example
import org.scalatest._
import skinny.jackson.JSONStringOps
case class Samples(samples: Seq[Sample])
case class Sample(id: Long, firstName: String)
case class SamplePerson(name: Option[String] = None, parent: SamplePerson, children: Seq[SamplePerson] = Nil)
class JSONOperation2Spec extends FlatSpec with JSONStringOps with Matchers {
def toJSONString1 = toJSONString(Sample(1, "Alice"))
def toJSONString2 = toJSONString(List(Sample(1, "Alice"), Sample(2, "Bob")))
def toJSONString3 = toPrettyJSONString(List(Sample(1, "Alice"), Sample(2, "Bob")))
def toJSONString4 = toJSONString(Sample(1, "Alice"), false)
def toJSONString5 = toJSONString(List(Sample(1, "Alice"), Sample(2, "Bob")), false)
def toJSONString6 = toPrettyJSONString(List(Sample(1, "Alice"), Sample(2, "Bob")), false)
val alice = SamplePerson(Some("Alice"), null)
val bob = SamplePerson(Some("Bob"), alice, Nil)
val chris = SamplePerson(Some("Chris"), alice, Seq(bob))
val dennis = SamplePerson(Some("Dennis"), alice, Seq(bob, chris))
def toJSONString7 = toJSONString(dennis)
def fromJSON1: Option[Sample] = fromJSONString[Sample]("""{"id":1,"first_name":"Alice"}""").toOption
def fromJSON2: Option[Samples] = fromJSONString[Samples]("""{"samples":[{"id":1,"first_name":"Alice"},{"id":2,"first_name":"Bob"}]}""").toOption
def fromJSON3: Option[Sample] = fromJSONString[Sample]("""{"id":1,"firstName":"Alice"}""", false).toOption
def fromJSON4: Option[Seq[Sample]] = fromJSONString[Seq[Sample]]("""[{"id":1,"firstName":"Alice"},{"id":2,"firstName":"Bob"}]""", false).toOption
def fromJSON5: Option[SamplePerson] = fromJSONString[SamplePerson](
"""{"name":"Dennis","parent":{"name":"Alice","parent":null,"children":[]},"children":[{"name":"Bob","parent":{"name":"Alice","parent":null,"children":[]},"children":[]},{"name":"Chris","parent":{"name":"Alice","parent":null,"children":[]},"children":[{"name":"Bob","parent":{"name":"Alice","parent":null,"children":[]},"children":[]}]}]}"""
).toOption
it should "have toJSONString 1" in {
toJSONString1 should equal("""{"id":1,"first_name":"Alice"}""")
}
it should "have toJSONString 2" in {
toJSONString2 should equal("""[{"id":1,"first_name":"Alice"},{"id":2,"first_name":"Bob"}]""")
}
it should "have toJSONString 3" in {
toJSONString3 should equal(
"""[ {
| "id" : 1,
| "first_name" : "Alice"
|}, {
| "id" : 2,
| "first_name" : "Bob"
|} ]""".stripMargin)
}
it should "have toJSONString 4" in {
toJSONString4 should equal("""{"id":1,"firstName":"Alice"}""")
}
it should "have toJSONString 5" in {
toJSONString5 should equal("""[{"id":1,"firstName":"Alice"},{"id":2,"firstName":"Bob"}]""")
}
it should "have toJSONString 6" in {
toJSONString6 should equal(
"""[ {
| "id" : 1,
| "firstName" : "Alice"
|}, {
| "id" : 2,
| "firstName" : "Bob"
|} ]""".stripMargin)
}
it should "have toJSONString 7" in {
toJSONString7 should equal(
"""{"name":"Dennis","parent":{"name":"Alice","parent":null,"children":[]},"children":[{"name":"Bob","parent":{"name":"Alice","parent":null,"children":[]},"children":[]},{"name":"Chris","parent":{"name":"Alice","parent":null,"children":[]},"children":[{"name":"Bob","parent":{"name":"Alice","parent":null,"children":[]},"children":[]}]}]}""")
}
it should "have fromJSONString 1" in {
fromJSON1.get should equal(Sample(1, "Alice"))
}
it should "have fromJSONString 2" in {
fromJSON2.get should equal(Samples(Seq(Sample(1, "Alice"), Sample(2, "Bob"))))
}
it should "have fromJSONString 3" in {
fromJSON3.get should equal(Sample(1, "Alice"))
}
it should "have fromJSONString 4" in {
fromJSON4.get should equal(Seq(Sample(1, "Alice"), Sample(2, "Bob")))
}
it should "have fromJSONString 5" in {
fromJSON5.get should equal(dennis)
}
}
|
xerial/skinny-micro
|
micro-jackson/src/test/scala/example/JSONOperation2Spec.scala
|
Scala
|
bsd-2-clause
| 3,951
|
package se.gigurra.leavu3.datamodel
import com.github.gigurra.heisenberg.MapData._
import com.github.gigurra.heisenberg.{Schema, Parsed}
case class FlightModel(source: SourceData = Map.empty) extends SafeParsed[FlightModel.type] {
/*val pitch = parse(schema.pitch).toDegrees
val roll = parse(schema.roll).toDegrees
val trueHeading = parse(schema.trueHeading).toDegrees
val magneticHeading = parse(schema.magneticHeading).toDegrees
val angleOfAttack = parse(schema.angleOfAttack).toDegrees
*/
val velocity = parse(schema.velocity)
/* val acceleration = parse(schema.acceleration)
val indicatedAirspeed = parse(schema.indicatedAirspeed)
val trueAirspeed = parse(schema.trueAirspeed)
val verticalVelocity = parse(schema.verticalVelocity)
val machNumber = parse(schema.machNumber)
val altitudeAGL = parse(schema.altitudeAGL)
val altitudeAsl = parse(schema.altitudeAsl)
val windVelocity = parse(schema.windVelocity)
val airPressure = parse(schema.airPressure)
val slipBallDeviation = parse(schema.slipBallDeviation)
val ilsLocalizer = parse(schema.ilsLocalizer)
val ilsGlideslope = parse(schema.ilsGlideslope)*/
}
object FlightModel extends Schema[FlightModel] {
val pitch = required[Float]("pitch", default = 0)
val roll = required[Float]("roll", default = 0)
val trueHeading = required[Float]("heading", default = 0)
val magneticHeading = required[Float]("magneticYaw", default = 0)
val angleOfAttack = required[Float]("AOA", default = 0)
val velocity = required[Vec3]("vectorVelocity", default = Vec3())
val acceleration = required[Vec3]("acc", default = Vec3())
val indicatedAirspeed = required[Float]("IAS", default = 0)
val trueAirspeed = required[Float]("TAS", default = 0)
val verticalVelocity = required[Float]("vv", default = 0)
val machNumber = required[Float]("mach", default = 0)
val altitudeAGL = required[Float]("altitudeAboveGroundLevel", default = 0)
val altitudeAsl = required[Float]("altitudeAboveSeaLevel", default = 0)
val windVelocity = required[Vec3]("windVectorVelocity", default = Vec3())
val airPressure = required[Float]("atmospherePressure", default = 0)
val slipBallDeviation = required[Float]("slipBallPosition", default = 0)
val ilsLocalizer = required[Float]("sideDeviation", default = 0)
val ilsGlideslope = required[Float]("glideDeviation", default = 0)
}
|
GiGurra/leavu3
|
src/main/scala/se/gigurra/leavu3/datamodel/FlightModel.scala
|
Scala
|
mit
| 2,561
|
package com.twitter.streaming
import com.twitter.finatra.http.HttpServer
import com.twitter.finatra.http.filters.CommonFilters
import com.twitter.finatra.http.routing.HttpRouter
object StreamingServerMain extends StreamingServer
class StreamingServer extends HttpServer {
override def streamRequest = true
override def configureHttp(router: HttpRouter) {
router
.filter[CommonFilters]
.add[StreamingController]
}
}
|
syamantm/finatra
|
examples/streaming-example/src/main/scala/com/twitter/streaming/StreamingServer.scala
|
Scala
|
apache-2.0
| 441
|
package com.twitter.finagle.service
import com.twitter.finagle._
import com.twitter.util.Future
import java.util.concurrent.RejectedExecutionException
import java.util.concurrent.atomic.AtomicInteger
/**
* A module which allows clients to limit the number of pending
* requests per connection.
*/
object PendingRequestFilter {
val role = Stack.Role("PendingRequestLimit")
case class Param(limit: Option[Int])
object Param {
implicit val param = Stack.Param(Param(None))
}
private[finagle] def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module1[Param, ServiceFactory[Req, Rep]] {
val role = PendingRequestFilter.role
val description = "Restrict number of pending requests"
// n.b. we can't simply compose the `PendingRequestFilter` onto the `next`
// service factory since we need a distinct filter instance to provide
// distinct state per-session.
def make(_param: Param, next: ServiceFactory[Req, Rep]) = _param match {
case Param(Some(limit)) =>
next.map(new PendingRequestFilter[Req, Rep](limit).andThen(_))
case Param(None) => next
}
}
val PendingRequestsLimitExceeded =
new RejectedExecutionException("Pending request limit exceeded")
}
/**
* A filter which limits the number of pending requests to a service.
*/
private[finagle] class PendingRequestFilter[Req, Rep](limit: Int) extends SimpleFilter[Req, Rep] {
import PendingRequestFilter._
if (limit < 1)
throw new IllegalArgumentException(s"request limit must be greater than zero, saw $limit")
private[this] val pending = new AtomicInteger(0)
private[this] val decFn: Any => Unit = { _: Any => pending.decrementAndGet() }
def apply(req: Req, service: Service[Req, Rep]): Future[Rep] =
// N.B. There's a race on the sad path of this filter when we increment and
// then immediately decrement the atomic int which can cause services
// vacillating around their pending request limit to reject valid requests.
// We tolerate this on account that this will only further shed traffic
// from a session operating at its limits.
if (pending.incrementAndGet() > limit) {
pending.decrementAndGet()
Future.exception(Failure.rejected(PendingRequestsLimitExceeded))
} else {
service(req).respond(decFn)
}
}
|
liamstewart/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/service/PendingRequestFilter.scala
|
Scala
|
apache-2.0
| 2,356
|
package javaee6.web.rest
import javax.ws.rs.ApplicationPath
import javax.ws.rs.core.Application
@ApplicationPath("/rest")
class JaxrsApplication extends Application
|
kazuhira-r/javaee6-scala-examples
|
arquillian-remote/src/main/scala/javaee6/web/rest/JaxrsApplication.scala
|
Scala
|
mit
| 167
|
package AccurateML.nonLinearRegression
import breeze.linalg.{DenseMatrix => BDM, DenseVector => BDV, _}
import org.apache.spark.mllib.linalg.{DenseMatrix, Vector, Vectors}
import org.apache.spark.rdd._
import AccurateML.blas.ZFBLAS
import scala.collection.mutable.ArrayBuffer
/**
* @author Nodalpoint
* Implementation of the sum-of-squares objective function using Spark RDDs
*
* Properties
* model: NonlinearModel -> The nonlinear model that defines the function
* data: RDD[(Double, Vector)] -> The training data used to calculate the loss in a form of a Spark RDD
* that contains target, input pairs.
* [(t1, x1), (t2, x2), ..., (tm, xm)]
*/
class SumOfSquaresFunctionRDD(fitmodel: NonlinearModel, xydata: RDD[(Double, Vector)]) extends SumOfSquaresFunction {
var model: NonlinearModel = fitmodel
var dim = fitmodel.getDim()
var data: RDD[(Double, Vector)] = xydata
var m: Int = data.cache().count().toInt
var n: Int = data.first()._2.size
/**
* Return the objective function dimensionality which is essentially the model's dimensionality
*/
def getDim(): Int = {
return this.dim
}
/**
* This method is inherited by Breeze DiffFunction. Given an input vector of weights it returns the
* objective function and the first order derivative.
* It operates using treeAggregate action on the training pair data.
* It is essentially the same implementation as the one used for the Stochastic Gradient Descent
* Partial subderivative vectors are calculated in the map step
* val per = fitModel.eval(w, feat)
* val gper = fitModel.grad(w, feat)
* and are aggregated by summation in the reduce part.
*/
def calculate(weights: BDV[Double]): (Double, BDV[Double]) = {
assert(dim == weights.length)
val bcW = data.context.broadcast(weights)
val fitModel: NonlinearModel = model
val n: Int = dim
val bcDim = data.context.broadcast(dim)
val (grad, f) = data.treeAggregate((Vectors.zeros(n), 0.0))(
seqOp = (c, v) => (c, v) match {
case ((grad, loss), (label, features)) =>
//fitModel.setWeights(bcW.value)
val feat: BDV[Double] = new BDV[Double](features.toArray)
val w: BDV[Double] = new BDV[Double](bcW.value.toArray)
val per = fitModel.eval(w, feat)
val gper = fitModel.grad(w, feat)
var f1 = 0.5 * Math.pow(label - per, 2)
var g1 = 2.0 * (per - label) * gper
val gradBDV = new BDV[Double](grad.toArray)
var newgrad = Vectors.dense((g1 + gradBDV).toArray)
(newgrad, loss + f1)
},
combOp = (c1, c2) => (c1, c2) match {
case ((grad1, loss1), (grad2, loss2)) =>
//axpy(1.0, grad2, grad1)
val grad1BDV = new BDV[Double](grad1.toArray)
val grad2BDV = new BDV[Double](grad2.toArray)
var newgrad = Vectors.dense((grad1BDV + grad2BDV).toArray)
(newgrad, loss1 + loss2)
})
val gradBDV = new BDV[Double](grad.toArray)
return (f, gradBDV)
}
def calculate(weights: BDV[Double], itN: Int): (Double, BDV[Double],Array[Double],Int) = {
System.err.println("SumOfSquaresFunctionRDD.calculate(w,i)")
assert(dim == weights.length)
val bcW = data.context.broadcast(weights)
val fitModel: NonlinearModel = model
val n: Int = dim
val bcDim = data.context.broadcast(dim)
val (grad, f,gn) = data.treeAggregate((Vectors.zeros(n), 0.0, new ArrayBuffer[Double]))(
seqOp = (c, v) => (c, v) match {
case ((grad, loss,gn), (label, features)) =>
//fitModel.setWeights(bcW.value)
val feat: BDV[Double] = new BDV[Double](features.toArray)
val w: BDV[Double] = new BDV[Double](bcW.value.toArray)
val per = fitModel.eval(w, feat)
val gper = fitModel.grad(w, feat)
var f1 = 0.5 * Math.pow(label - per, 2)
var g1 = 2.0 * (per - label) * gper
val gradBDV = new BDV[Double](grad.toArray)
var newgrad = Vectors.dense((g1 + gradBDV).toArray)
val adot=ZFBLAS.dot(Vectors.dense(g1.toArray),Vectors.dense(g1.toArray))
gn += math.sqrt(adot)
(newgrad, loss + f1,gn)
},
combOp = (c1, c2) => (c1, c2) match {
case ((grad1, loss1,gn1), (grad2, loss2,gn2)) =>
//axpy(1.0, grad2, grad1)
val grad1BDV = new BDV[Double](grad1.toArray)
val grad2BDV = new BDV[Double](grad2.toArray)
var newgrad = Vectors.dense((grad1BDV + grad2BDV).toArray)
(newgrad, loss1 + loss2,gn1++gn2)
})
val gradBDV = new BDV[Double](grad.toArray)
return (f, gradBDV,gn.toArray,0)
}
// def calculate(weights: BDV[Double], itN: Double): (Double, BDV[Double]) = {
// assert(dim == weights.length)
// val bcW = data.context.broadcast(weights)
//
// val fitModel: NonlinearModel = model
// val n: Int = dim
// val bcDim = data.context.broadcast(dim)
// val (grad, f) = data.treeAggregate((Vectors.zeros(n), 0.0))(
// seqOp = (c, v) => (c, v) match {
// case ((grad, loss), (label, features)) =>
// //fitModel.setWeights(bcW.value)
// val feat: BDV[Double] = new BDV[Double](features.toArray)
// val w: BDV[Double] = new BDV[Double](bcW.value.toArray)
// val per = fitModel.eval(w, feat)
// val gper = fitModel.grad(w, feat)
// var f1 = 0.5 * Math.pow(label - per, 2)
// var g1 = 2.0 * (per - label) * gper
//
// if(itN == -1){
// val nodes = 1 //only print 1 hidden node
// val n = 9
// val x = feat
//
// val ss = new ArrayBuffer[Double]()
// val wx = new ArrayBuffer[Double]()
// for (i <- 0 to nodes - 1) {
// var arg: Double = 0
// for (j <- 0 to n - 1) {
// arg = arg + x(j) * w(i * (n + 2) + j)
// }
// arg = arg + w(i * (n + 2) + n)
// var sig: Double = 1.0 / (1.0 + Math.exp(-arg))
//
// gper(i * (n + 2) + n + 1) = sig
// gper(i * (n + 2) + n) = w(i * (n + 2) + n + 1) * sig * (1 - sig)
// for (j <- 0 to n - 1) {
// gper(i * (n + 2) + j) = x(j) * w(i * (n + 2) + n + 1) * sig * (1 - sig)
// ss += sig * (1 - sig)
// wx += x(j) * w(i * (n + 2) + n + 1)
// }
// println(itN + ",sig*(1-sig)," + ss.mkString(","))
// println(itN + ",wx," + wx.mkString(","))
// println(itN + ",diff," + (per - label))
// println(itN + ",g1," + g1.toArray.mkString(","))
// }
// }
//
// val gradBDV = new BDV[Double](grad.toArray)
// var newgrad = Vectors.dense((g1 + gradBDV).toArray)
//
// (newgrad, loss + f1)
// },
// combOp = (c1, c2) => (c1, c2) match {
// case ((grad1, loss1), (grad2, loss2)) =>
// //axpy(1.0, grad2, grad1)
// val grad1BDV = new BDV[Double](grad1.toArray)
// val grad2BDV = new BDV[Double](grad2.toArray)
// var newgrad = Vectors.dense((grad1BDV + grad2BDV).toArray)
// (newgrad, loss1 + loss2)
// })
//
//
//
//
// val gradBDV = new BDV[Double](grad.toArray)
// return (f, gradBDV)
// }
/**
* This method is caclulates the Hessian matrix approximation using Jacobian matrix and
* the algorithm of Wilamowsky and Yu.
* It operates using treeAggregate action on the training pair data.
* Partial subhessian matrices are calculated in the map step
* val gper = fitModel.grad(w, feat)
* val hper : BDM[Double] = (gper * gper.t)
* and aggregated by summation in the reduce part.
* Extra care is taken to transform between Breee and Spark DenseVectors.
*/
def hessian(weights: BDV[Double]): BDM[Double] = {
val bcW = data.context.broadcast(weights)
val fitModel: NonlinearModel = model
val n: Int = dim
val bcDim = data.context.broadcast(dim)
val (hess) = data.treeAggregate((new DenseMatrix(n, n, new Array[Double](n * n))))(
seqOp = (c, v) => (c, v) match {
case ((hess), (label, features)) =>
val w: BDV[Double] = new BDV[Double](bcW.value.toArray)
val feat: BDV[Double] = new BDV[Double](features.toArray)
val gper = fitModel.grad(w, feat)
val hper: BDM[Double] = (gper * gper.t)
val hperDM: DenseMatrix = new DenseMatrix(n, n, hper.toArray)
for (i <- 0 until n * n) {
hess.values(i) = hperDM.values(i) + hess.values(i)
}
(hess)
},
combOp = (c1, c2) => (c1, c2) match {
case ((hess1), (hess2)) =>
for (i <- 0 until n * n) {
hess1.values(i) = hess1.values(i) + hess2.values(i)
}
(hess1)
})
var hessBDM: BDM[Double] = new BDM[Double](n, n, hess.toArray)
var Hpos = posDef(hessBDM)
return Hpos
}
/**
* Helper method that uses eigenvalue decomposition to enforce positive definiteness of the Hessian
*/
def posDef(H: BDM[Double]): BDM[Double] = {
var n = H.rows
var m = H.cols
var dim = model.getDim()
var Hpos: BDM[Double] = BDM.zeros(n, m)
var eigens = eigSym(H)
var oni: BDV[Double] = BDV.ones[Double](dim)
var diag = eigens.eigenvalues
var vectors = eigens.eigenvectors
//diag = diag :+ (1.0e-4)
for (i <- 0 until dim) {
if (diag(i) < 1.0e-4) {
diag(i) = 1.0e-4
}
}
var I = BDM.eye[Double](dim)
for (i <- 0 until dim) {
I(i, i) = diag(i)
}
Hpos = vectors * I * (vectors.t)
return Hpos
}
}
|
harryandlina/ARIM
|
project/nonLinearRegression/SumOfSquaresFunctionRDD.scala
|
Scala
|
apache-2.0
| 9,725
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.nio.ByteBuffer
import kafka.utils.nonthreadsafe
import kafka.api.ApiUtils._
import scala.collection.immutable.Map
import kafka.common.{ErrorMapping, TopicAndPartition}
import kafka.consumer.ConsumerConfig
import java.util.concurrent.atomic.AtomicInteger
import kafka.network.RequestChannel
case class PartitionFetchInfo(offset: Long, fetchSize: Int)
object FetchRequest {
val CurrentVersion = 0.shortValue
val DefaultMaxWait = 0
val DefaultMinBytes = 0
val DefaultCorrelationId = 0
def readFrom(buffer: ByteBuffer): FetchRequest = {
val versionId = buffer.getShort
val correlationId = buffer.getInt
val clientId = readShortString(buffer)
val replicaId = buffer.getInt
val maxWait = buffer.getInt
val minBytes = buffer.getInt
val topicCount = buffer.getInt
val pairs = (1 to topicCount).flatMap(_ => {
val topic = readShortString(buffer)
val partitionCount = buffer.getInt
(1 to partitionCount).map(_ => {
val partitionId = buffer.getInt
val offset = buffer.getLong
val fetchSize = buffer.getInt
(TopicAndPartition(topic, partitionId), PartitionFetchInfo(offset, fetchSize))
})
})
FetchRequest(versionId, correlationId, clientId, replicaId, maxWait, minBytes, Map(pairs:_*))
}
}
case class FetchRequest private[kafka] (versionId: Short = FetchRequest.CurrentVersion,
override val correlationId: Int = FetchRequest.DefaultCorrelationId,
clientId: String = ConsumerConfig.DefaultClientId,
replicaId: Int = Request.OrdinaryConsumerId,
maxWait: Int = FetchRequest.DefaultMaxWait,
minBytes: Int = FetchRequest.DefaultMinBytes,
requestInfo: Map[TopicAndPartition, PartitionFetchInfo])
extends RequestOrResponse(Some(RequestKeys.FetchKey), correlationId) {
/**
* Partitions the request info into a map of maps (one for each topic).
*/
lazy val requestInfoGroupedByTopic = requestInfo.groupBy(_._1.topic)
/**
* Public constructor for the clients
*/
def this(correlationId: Int,
clientId: String,
maxWait: Int,
minBytes: Int,
requestInfo: Map[TopicAndPartition, PartitionFetchInfo]) {
this(versionId = FetchRequest.CurrentVersion,
correlationId = correlationId,
clientId = clientId,
replicaId = Request.OrdinaryConsumerId,
maxWait = maxWait,
minBytes= minBytes,
requestInfo = requestInfo)
}
def writeTo(buffer: ByteBuffer) {
buffer.putShort(versionId)
buffer.putInt(correlationId)
writeShortString(buffer, clientId)
buffer.putInt(replicaId)
buffer.putInt(maxWait)
buffer.putInt(minBytes)
buffer.putInt(requestInfoGroupedByTopic.size) // topic count
requestInfoGroupedByTopic.foreach {
case (topic, partitionFetchInfos) =>
writeShortString(buffer, topic)
buffer.putInt(partitionFetchInfos.size) // partition count
partitionFetchInfos.foreach {
case (TopicAndPartition(_, partition), PartitionFetchInfo(offset, fetchSize)) =>
buffer.putInt(partition)
buffer.putLong(offset)
buffer.putInt(fetchSize)
}
}
}
def sizeInBytes: Int = {
2 + /* versionId */
4 + /* correlationId */
shortStringLength(clientId) +
4 + /* replicaId */
4 + /* maxWait */
4 + /* minBytes */
4 + /* topic count */
requestInfoGroupedByTopic.foldLeft(0)((foldedTopics, currTopic) => {
val (topic, partitionFetchInfos) = currTopic
foldedTopics +
shortStringLength(topic) +
4 + /* partition count */
partitionFetchInfos.size * (
4 + /* partition id */
8 + /* offset */
4 /* fetch size */
)
})
}
def isFromFollower = replicaId != Request.OrdinaryConsumerId && replicaId != Request.DebuggingConsumerId
def isFromOrdinaryConsumer = replicaId == Request.OrdinaryConsumerId
def isFromLowLevelConsumer = replicaId == Request.DebuggingConsumerId
def numPartitions = requestInfo.size
override def toString(): String = {
val fetchRequest = new StringBuilder
fetchRequest.append("Name: " + this.getClass.getSimpleName)
fetchRequest.append("; Version: " + versionId)
fetchRequest.append("; CorrelationId: " + correlationId)
fetchRequest.append("; ClientId: " + clientId)
fetchRequest.append("; ReplicaId: " + replicaId)
fetchRequest.append("; MaxWait: " + maxWait + " ms")
fetchRequest.append("; MinBytes: " + minBytes + " bytes")
fetchRequest.append("; RequestInfo: " + requestInfo.mkString(","))
fetchRequest.toString()
}
override def handleError(e: Throwable, requestChannel: RequestChannel, request: RequestChannel.Request): Unit = {
val fetchResponsePartitionData = requestInfo.map {
case (topicAndPartition, data) =>
(topicAndPartition, FetchResponsePartitionData(ErrorMapping.codeFor(e.getClass.asInstanceOf[Class[Throwable]]), -1, null))
}
val errorResponse = FetchResponse(correlationId, fetchResponsePartitionData)
requestChannel.sendResponse(new RequestChannel.Response(request, new FetchResponseSend(errorResponse)))
}
}
@nonthreadsafe
class FetchRequestBuilder() {
private val correlationId = new AtomicInteger(0)
private val versionId = FetchRequest.CurrentVersion
private var clientId = ConsumerConfig.DefaultClientId
private var replicaId = Request.OrdinaryConsumerId
private var maxWait = FetchRequest.DefaultMaxWait
private var minBytes = FetchRequest.DefaultMinBytes
private val requestMap = new collection.mutable.HashMap[TopicAndPartition, PartitionFetchInfo]
def addFetch(topic: String, partition: Int, offset: Long, fetchSize: Int) = {
requestMap.put(TopicAndPartition(topic, partition), PartitionFetchInfo(offset, fetchSize))
this
}
def clientId(clientId: String): FetchRequestBuilder = {
this.clientId = clientId
this
}
/**
* Only for internal use. Clients shouldn't set replicaId.
*/
private[kafka] def replicaId(replicaId: Int): FetchRequestBuilder = {
this.replicaId = replicaId
this
}
def maxWait(maxWait: Int): FetchRequestBuilder = {
this.maxWait = maxWait
this
}
def minBytes(minBytes: Int): FetchRequestBuilder = {
this.minBytes = minBytes
this
}
def build() = {
val fetchRequest = FetchRequest(versionId, correlationId.getAndIncrement, clientId, replicaId, maxWait, minBytes, requestMap.toMap)
requestMap.clear()
fetchRequest
}
}
|
kavink92/kafka-0.8.0-beta1-src
|
core/src/main/scala/kafka/api/FetchRequest.scala
|
Scala
|
apache-2.0
| 7,555
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.stream.table
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.expressions.utils.{Func18, Func20, RichFunc2}
import org.apache.flink.table.runtime.utils.{StreamITCase, StreamTestData, _}
import org.apache.flink.table.utils._
import org.apache.flink.test.util.AbstractTestBase
import org.apache.flink.types.Row
import org.junit.Assert._
import org.junit.{Before, Rule, Test}
import java.lang.{Boolean => JBoolean}
import scala.collection.mutable
class CorrelateITCase extends AbstractTestBase {
@Rule
def usesLegacyRows: LegacyRowResource = LegacyRowResource.INSTANCE
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
val settings: EnvironmentSettings = EnvironmentSettings.newInstance().useOldPlanner().build()
val tEnv: StreamTableEnvironment = StreamTableEnvironment.create(env, settings)
@Before
def clear(): Unit = {
StreamITCase.clear
}
@Test
def testCrossJoin(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val func0 = new TableFunc0
val pojoFunc0 = new PojoTableFunc()
val result = t
.joinLateral(func0('c) as('d, 'e))
.select('c, 'd, 'e)
.joinLateral(pojoFunc0('c))
.where('age > 20)
.select('c, 'name, 'age)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList("Jack#22,Jack,22", "Anna#44,Anna,44")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testLeftOuterJoinWithoutPredicates(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val func0 = new TableFunc0
val result = t
.leftOuterJoinLateral(func0('c) as('d, 'e))
.select('c, 'd, 'e)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"nosharp,null,null", "Jack#22,Jack,22",
"John#19,John,19", "Anna#44,Anna,44")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
/**
* Common join predicates are temporarily forbidden (see FLINK-7865).
*/
@Test (expected = classOf[ValidationException])
def testLeftOuterJoinWithPredicates(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val func0 = new TableFunc0
val result = t
.leftOuterJoinLateral(func0('c) as ('s, 'l), 'a === 'l)
.select('c, 's, 'l)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = "John#19,null,null\\n" + "John#22,null,null\\n" + "Anna44,null,null\\n" +
"nosharp,null,null"
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testUserDefinedTableFunctionWithScalarFunction(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val func0 = new TableFunc0
val result = t
.joinLateral(func0('c) as('d, 'e))
.where(Func18('d, "J"))
.select('c, 'd, 'e)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList("Jack#22,Jack,22", "John#19,John,19")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testUserDefinedTableFunctionWithParameter(): Unit = {
val tableFunc1 = new RichTableFunc1
tEnv.registerFunction("RichTableFunc1", tableFunc1)
UserDefinedFunctionTestUtils.setJobParameters(env, Map("word_separator" -> " "))
StreamITCase.testResults = mutable.MutableList()
val result = StreamTestData.getSmall3TupleDataStream(env)
.toTable(tEnv, 'a, 'b, 'c)
.joinLateral(tableFunc1('c) as 's)
.select('a, 's)
val results = result.toAppendStream[Row]
results.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList("3,Hello", "3,world")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testUserDefinedTableFunctionWithUserDefinedScalarFunction(): Unit = {
val tableFunc1 = new RichTableFunc1
val richFunc2 = new RichFunc2
tEnv.registerFunction("RichTableFunc1", tableFunc1)
tEnv.registerFunction("RichFunc2", richFunc2)
UserDefinedFunctionTestUtils.setJobParameters(
env,
Map("word_separator" -> "#", "string.value" -> "test"))
StreamITCase.testResults = mutable.MutableList()
val result = StreamTestData.getSmall3TupleDataStream(env)
.toTable(tEnv, 'a, 'b, 'c)
.joinLateral(tableFunc1(richFunc2('c)) as 's)
.select('a, 's)
val results = result.toAppendStream[Row]
results.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"1,Hi",
"1,test",
"2,Hello",
"2,test",
"3,Hello world",
"3,test")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testTableFunctionConstructorWithParams(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val config = Map("key1" -> "value1", "key2" -> "value2")
val func30 = new TableFunc3(null)
val func31 = new TableFunc3("OneConf_")
val func32 = new TableFunc3("TwoConf_", config)
val result = t
.joinLateral(func30('c) as('d, 'e))
.select('c, 'd, 'e)
.joinLateral(func31('c) as ('f, 'g))
.select('c, 'd, 'e, 'f, 'g)
.joinLateral(func32('c) as ('h, 'i))
.select('c, 'd, 'f, 'h, 'e, 'g, 'i)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"Anna#44,Anna,OneConf_Anna,TwoConf__key=key1_value=value1_Anna,44,44,44",
"Anna#44,Anna,OneConf_Anna,TwoConf__key=key2_value=value2_Anna,44,44,44",
"Jack#22,Jack,OneConf_Jack,TwoConf__key=key1_value=value1_Jack,22,22,22",
"Jack#22,Jack,OneConf_Jack,TwoConf__key=key2_value=value2_Jack,22,22,22",
"John#19,John,OneConf_John,TwoConf__key=key1_value=value1_John,19,19,19",
"John#19,John,OneConf_John,TwoConf__key=key2_value=value2_John,19,19,19"
)
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testTableFunctionWithVariableArguments(): Unit = {
val varArgsFunc0 = new VarArgsFunc0
tEnv.registerFunction("VarArgsFunc0", varArgsFunc0)
val result = testData(env)
.toTable(tEnv, 'a, 'b, 'c)
.select('c)
.joinLateral(varArgsFunc0("1", "2", 'c))
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"Anna#44,1",
"Anna#44,2",
"Anna#44,Anna#44",
"Jack#22,1",
"Jack#22,2",
"Jack#22,Jack#22",
"John#19,1",
"John#19,2",
"John#19,John#19",
"nosharp,1",
"nosharp,2",
"nosharp,nosharp")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowType(): Unit = {
val row = Row.of(
12.asInstanceOf[Integer],
true.asInstanceOf[JBoolean],
Row.of(1.asInstanceOf[Integer], 2.asInstanceOf[Integer], 3.asInstanceOf[Integer])
)
val rowType = Types.ROW(Types.INT, Types.BOOLEAN, Types.ROW(Types.INT, Types.INT, Types.INT))
val in = env.fromElements(row, row)(rowType).toTable(tEnv).as("a", "b", "c")
val tableFunc5 = new TableFunc5()
val result = in
.joinLateral(tableFunc5('c) as ('f0, 'f1, 'f2))
.select('c, 'f2)
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"1,2,3,3",
"1,2,3,3")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testTableFunctionCollectorOpenClose(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val func0 = new TableFunc0
val func20 = new Func20
val result = t
.joinLateral(func0('c) as('d, 'e))
.where(func20('e))
.select('c, 'd, 'e)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq (
"Jack#22,Jack,22",
"John#19,John,19",
"Anna#44,Anna,44"
)
assertEquals(
expected.sorted,
StreamITCase.testResults.sorted
)
}
@Test
def testTableFunctionCollectorInit(): Unit = {
val t = testData(env).toTable(tEnv).as("a", "b", "c")
val func0 = new TableFunc0
// this case will generate 'timestamp' member field and 'DateFormatter'
val result = t
.joinLateral(func0('c) as('d, 'e))
.where(dateFormat(currentTimestamp(), "yyyyMMdd") === 'd)
.select('c, 'd, 'e)
.toAppendStream[Row]
result.addSink(new StreamITCase.StringSink[Row])
env.execute()
assertEquals(
Seq(),
StreamITCase.testResults.sorted
)
}
@Test
def testFlatMap(): Unit = {
val func2 = new TableFunc2
val ds = testData(env).toTable(tEnv, 'a, 'b, 'c)
// test non alias
.flatMap(func2('c))
.select('f0, 'f1)
// test the output field name of flatMap is the same as the field name of the input table
.flatMap(func2(concat('f0, "#")))
.as ("f0", "f1")
.select('f0, 'f1)
val results = ds.toAppendStream[Row]
results.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = mutable.MutableList(
"Jack,4",
"22,2",
"John,4",
"19,2",
"Anna,4",
"44,2")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
private def testData(
env: StreamExecutionEnvironment)
: DataStream[(Int, Long, String)] = {
val data = new mutable.MutableList[(Int, Long, String)]
data.+=((1, 1L, "Jack#22"))
data.+=((2, 2L, "John#19"))
data.+=((3, 2L, "Anna#44"))
data.+=((4, 3L, "nosharp"))
env.fromCollection(data)
}
}
|
clarkyzl/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/stream/table/CorrelateITCase.scala
|
Scala
|
apache-2.0
| 10,853
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.schema
import slamdata.Predef._
import quasar.ejson.{EJson, EncodeEJson, Fixed}
import quasar.ejson.implicits._
import matryoshka.{Corecursive, Recursive}
import monocle.macros.Lenses
import scalaz.{Cord, Equal, IMap, Show}
import scalaz.std.tuple._
import scalaz.syntax.show._
@Lenses
final case class Occurred[N, A](occurrence: N, value: A)
object Occurred {
implicit def encodeEJson[N: EncodeEJson, A: EncodeEJson]: EncodeEJson[Occurred[N, A]] =
new EncodeEJson[Occurred[N, A]] {
def encode[J](o: Occurred[N, A])(implicit JC: Corecursive.Aux[J, EJson], JR: Recursive.Aux[J, EJson]): J = {
val J = Fixed[J]
val v = o.value.asEJson[J]
J.imap
.modifyOption(_.insert(J.str(OccurrenceKey), o.occurrence.asEJson[J]))
.apply(v)
.getOrElse(J.imap(IMap(
J.str(OccurrenceKey) -> o.occurrence.asEJson[J],
J.str(ValueKey) -> v)))
}
}
implicit def equal[N: Equal, A: Equal]: Equal[Occurred[N, A]] =
Equal.equalBy {
case Occurred(n, a) => (n, a)
}
implicit def show[N: Show, A: Show]: Show[Occurred[N, A]] =
Show.show {
case Occurred(n, a) =>
Cord("Occurred(") ++ n.show ++ Cord(", ") ++ a.show ++ Cord(")")
}
////
private val OccurrenceKey = "occurrence"
private val ValueKey = "value"
}
|
slamdata/slamengine
|
impl/src/main/scala/quasar/impl/schema/Occurred.scala
|
Scala
|
apache-2.0
| 1,958
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import scala.collection.mutable
import org.apache.spark.sql.catalyst.expressions.{Alias, And, Attribute, AttributeReference, Cast, Expression, IntegerLiteral, NamedExpression, PredicateHelper, ProjectionOverSchema, SubqueryExpression}
import org.apache.spark.sql.catalyst.expressions.aggregate
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.planning.ScanOperation
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Filter, LeafNode, Limit, LocalLimit, LogicalPlan, Project, Sample, Sort}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.connector.expressions.SortOrder
import org.apache.spark.sql.connector.expressions.aggregate.{Aggregation, GeneralAggregateFunc}
import org.apache.spark.sql.connector.read.{Scan, ScanBuilder, SupportsPushDownAggregates, SupportsPushDownFilters, V1Scan}
import org.apache.spark.sql.execution.datasources.DataSourceStrategy
import org.apache.spark.sql.sources
import org.apache.spark.sql.types.{DataType, LongType, StructType}
import org.apache.spark.sql.util.SchemaUtils._
object V2ScanRelationPushDown extends Rule[LogicalPlan] with PredicateHelper {
import DataSourceV2Implicits._
def apply(plan: LogicalPlan): LogicalPlan = {
applyColumnPruning(
applyLimit(pushDownAggregates(pushDownFilters(pushDownSample(createScanBuilder(plan))))))
}
private def createScanBuilder(plan: LogicalPlan) = plan.transform {
case r: DataSourceV2Relation =>
ScanBuilderHolder(r.output, r, r.table.asReadable.newScanBuilder(r.options))
}
private def pushDownFilters(plan: LogicalPlan) = plan.transform {
// update the scan builder with filter push down and return a new plan with filter pushed
case Filter(condition, sHolder: ScanBuilderHolder) =>
val filters = splitConjunctivePredicates(condition)
val normalizedFilters =
DataSourceStrategy.normalizeExprs(filters, sHolder.relation.output)
val (normalizedFiltersWithSubquery, normalizedFiltersWithoutSubquery) =
normalizedFilters.partition(SubqueryExpression.hasSubquery)
// `pushedFilters` will be pushed down and evaluated in the underlying data sources.
// `postScanFilters` need to be evaluated after the scan.
// `postScanFilters` and `pushedFilters` can overlap, e.g. the parquet row group filter.
val (pushedFilters, postScanFiltersWithoutSubquery) = PushDownUtils.pushFilters(
sHolder.builder, normalizedFiltersWithoutSubquery)
val pushedFiltersStr = if (pushedFilters.isLeft) {
pushedFilters.left.get.mkString(", ")
} else {
pushedFilters.right.get.mkString(", ")
}
val postScanFilters = postScanFiltersWithoutSubquery ++ normalizedFiltersWithSubquery
logInfo(
s"""
|Pushing operators to ${sHolder.relation.name}
|Pushed Filters: $pushedFiltersStr
|Post-Scan Filters: ${postScanFilters.mkString(",")}
""".stripMargin)
val filterCondition = postScanFilters.reduceLeftOption(And)
filterCondition.map(Filter(_, sHolder)).getOrElse(sHolder)
}
def pushDownAggregates(plan: LogicalPlan): LogicalPlan = plan.transform {
// update the scan builder with agg pushdown and return a new plan with agg pushed
case aggNode @ Aggregate(groupingExpressions, resultExpressions, child) =>
child match {
case ScanOperation(project, filters, sHolder: ScanBuilderHolder)
if filters.isEmpty && project.forall(_.isInstanceOf[AttributeReference]) =>
sHolder.builder match {
case r: SupportsPushDownAggregates =>
val aggExprToOutputOrdinal = mutable.HashMap.empty[Expression, Int]
var ordinal = 0
val aggregates = resultExpressions.flatMap { expr =>
expr.collect {
// Do not push down duplicated aggregate expressions. For example,
// `SELECT max(a) + 1, max(a) + 2 FROM ...`, we should only push down one
// `max(a)` to the data source.
case agg: AggregateExpression
if !aggExprToOutputOrdinal.contains(agg.canonicalized) =>
aggExprToOutputOrdinal(agg.canonicalized) = ordinal
ordinal += 1
agg
}
}
val normalizedAggregates = DataSourceStrategy.normalizeExprs(
aggregates, sHolder.relation.output).asInstanceOf[Seq[AggregateExpression]]
val normalizedGroupingExpressions = DataSourceStrategy.normalizeExprs(
groupingExpressions, sHolder.relation.output)
val pushedAggregates = PushDownUtils.pushAggregates(
r, normalizedAggregates, normalizedGroupingExpressions)
if (pushedAggregates.isEmpty) {
aggNode // return original plan node
} else if (!supportPartialAggPushDown(pushedAggregates.get) &&
!r.supportCompletePushDown(pushedAggregates.get)) {
aggNode // return original plan node
} else {
// No need to do column pruning because only the aggregate columns are used as
// DataSourceV2ScanRelation output columns. All the other columns are not
// included in the output.
val scan = sHolder.builder.build()
// scalastyle:off
// use the group by columns and aggregate columns as the output columns
// e.g. TABLE t (c1 INT, c2 INT, c3 INT)
// SELECT min(c1), max(c1) FROM t GROUP BY c2;
// Use c2, min(c1), max(c1) as output for DataSourceV2ScanRelation
// We want to have the following logical plan:
// == Optimized Logical Plan ==
// Aggregate [c2#10], [min(min(c1)#21) AS min(c1)#17, max(max(c1)#22) AS max(c1)#18]
// +- RelationV2[c2#10, min(c1)#21, max(c1)#22]
// scalastyle:on
val newOutput = scan.readSchema().toAttributes
assert(newOutput.length == groupingExpressions.length + aggregates.length)
val groupAttrs = normalizedGroupingExpressions.zip(newOutput).map {
case (a: Attribute, b: Attribute) => b.withExprId(a.exprId)
case (_, b) => b
}
val aggOutput = newOutput.drop(groupAttrs.length)
val output = groupAttrs ++ aggOutput
logInfo(
s"""
|Pushing operators to ${sHolder.relation.name}
|Pushed Aggregate Functions:
| ${pushedAggregates.get.aggregateExpressions.mkString(", ")}
|Pushed Group by:
| ${pushedAggregates.get.groupByColumns.mkString(", ")}
|Output: ${output.mkString(", ")}
""".stripMargin)
val wrappedScan = getWrappedScan(scan, sHolder, pushedAggregates)
val scanRelation = DataSourceV2ScanRelation(sHolder.relation, wrappedScan, output)
if (r.supportCompletePushDown(pushedAggregates.get)) {
val projectExpressions = resultExpressions.map { expr =>
// TODO At present, only push down group by attribute is supported.
// In future, more attribute conversion is extended here. e.g. GetStructField
expr.transform {
case agg: AggregateExpression =>
val ordinal = aggExprToOutputOrdinal(agg.canonicalized)
val child =
addCastIfNeeded(aggOutput(ordinal), agg.resultAttribute.dataType)
Alias(child, agg.resultAttribute.name)(agg.resultAttribute.exprId)
}
}.asInstanceOf[Seq[NamedExpression]]
Project(projectExpressions, scanRelation)
} else {
val plan = Aggregate(
output.take(groupingExpressions.length), resultExpressions, scanRelation)
// scalastyle:off
// Change the optimized logical plan to reflect the pushed down aggregate
// e.g. TABLE t (c1 INT, c2 INT, c3 INT)
// SELECT min(c1), max(c1) FROM t GROUP BY c2;
// The original logical plan is
// Aggregate [c2#10],[min(c1#9) AS min(c1)#17, max(c1#9) AS max(c1)#18]
// +- RelationV2[c1#9, c2#10] ...
//
// After change the V2ScanRelation output to [c2#10, min(c1)#21, max(c1)#22]
// we have the following
// !Aggregate [c2#10], [min(c1#9) AS min(c1)#17, max(c1#9) AS max(c1)#18]
// +- RelationV2[c2#10, min(c1)#21, max(c1)#22] ...
//
// We want to change it to
// == Optimized Logical Plan ==
// Aggregate [c2#10], [min(min(c1)#21) AS min(c1)#17, max(max(c1)#22) AS max(c1)#18]
// +- RelationV2[c2#10, min(c1)#21, max(c1)#22] ...
// scalastyle:on
plan.transformExpressions {
case agg: AggregateExpression =>
val ordinal = aggExprToOutputOrdinal(agg.canonicalized)
val aggAttribute = aggOutput(ordinal)
val aggFunction: aggregate.AggregateFunction =
agg.aggregateFunction match {
case max: aggregate.Max =>
max.copy(child = addCastIfNeeded(aggAttribute, max.child.dataType))
case min: aggregate.Min =>
min.copy(child = addCastIfNeeded(aggAttribute, min.child.dataType))
case sum: aggregate.Sum =>
sum.copy(child = addCastIfNeeded(aggAttribute, sum.child.dataType))
case _: aggregate.Count =>
aggregate.Sum(addCastIfNeeded(aggAttribute, LongType))
case other => other
}
agg.copy(aggregateFunction = aggFunction)
}
}
}
case _ => aggNode
}
case _ => aggNode
}
}
private def supportPartialAggPushDown(agg: Aggregation): Boolean = {
// We don't know the agg buffer of `GeneralAggregateFunc`, so can't do partial agg push down.
agg.aggregateExpressions().forall(!_.isInstanceOf[GeneralAggregateFunc])
}
private def addCastIfNeeded(aggAttribute: AttributeReference, aggDataType: DataType) =
if (aggAttribute.dataType == aggDataType) {
aggAttribute
} else {
Cast(aggAttribute, aggDataType)
}
def applyColumnPruning(plan: LogicalPlan): LogicalPlan = plan.transform {
case ScanOperation(project, filters, sHolder: ScanBuilderHolder) =>
// column pruning
val normalizedProjects = DataSourceStrategy
.normalizeExprs(project, sHolder.output)
.asInstanceOf[Seq[NamedExpression]]
val (scan, output) = PushDownUtils.pruneColumns(
sHolder.builder, sHolder.relation, normalizedProjects, filters)
logInfo(
s"""
|Output: ${output.mkString(", ")}
""".stripMargin)
val wrappedScan = getWrappedScan(scan, sHolder, Option.empty[Aggregation])
val scanRelation = DataSourceV2ScanRelation(sHolder.relation, wrappedScan, output)
val projectionOverSchema = ProjectionOverSchema(output.toStructType)
val projectionFunc = (expr: Expression) => expr transformDown {
case projectionOverSchema(newExpr) => newExpr
}
val filterCondition = filters.reduceLeftOption(And)
val newFilterCondition = filterCondition.map(projectionFunc)
val withFilter = newFilterCondition.map(Filter(_, scanRelation)).getOrElse(scanRelation)
val withProjection = if (withFilter.output != project) {
val newProjects = normalizedProjects
.map(projectionFunc)
.asInstanceOf[Seq[NamedExpression]]
Project(restoreOriginalOutputNames(newProjects, project.map(_.name)), withFilter)
} else {
withFilter
}
withProjection
}
def pushDownSample(plan: LogicalPlan): LogicalPlan = plan.transform {
case sample: Sample => sample.child match {
case ScanOperation(_, filter, sHolder: ScanBuilderHolder) if filter.isEmpty =>
val tableSample = TableSampleInfo(
sample.lowerBound,
sample.upperBound,
sample.withReplacement,
sample.seed)
val pushed = PushDownUtils.pushTableSample(sHolder.builder, tableSample)
if (pushed) {
sHolder.pushedSample = Some(tableSample)
sample.child
} else {
sample
}
case _ => sample
}
}
private def pushDownLimit(plan: LogicalPlan, limit: Int): LogicalPlan = plan match {
case operation @ ScanOperation(_, filter, sHolder: ScanBuilderHolder) if filter.isEmpty =>
val limitPushed = PushDownUtils.pushLimit(sHolder.builder, limit)
if (limitPushed) {
sHolder.pushedLimit = Some(limit)
}
operation
case s @ Sort(order, _, operation @ ScanOperation(_, filter, sHolder: ScanBuilderHolder))
if filter.isEmpty =>
val orders = DataSourceStrategy.translateSortOrders(order)
if (orders.length == order.length) {
val topNPushed = PushDownUtils.pushTopN(sHolder.builder, orders.toArray, limit)
if (topNPushed) {
sHolder.pushedLimit = Some(limit)
sHolder.sortOrders = orders
operation
} else {
s
}
} else {
s
}
case p: Project =>
val newChild = pushDownLimit(p.child, limit)
p.withNewChildren(Seq(newChild))
case other => other
}
def applyLimit(plan: LogicalPlan): LogicalPlan = plan.transform {
case globalLimit @ Limit(IntegerLiteral(limitValue), child) =>
val newChild = pushDownLimit(child, limitValue)
val newLocalLimit = globalLimit.child.asInstanceOf[LocalLimit].withNewChildren(Seq(newChild))
globalLimit.withNewChildren(Seq(newLocalLimit))
}
private def getWrappedScan(
scan: Scan,
sHolder: ScanBuilderHolder,
aggregation: Option[Aggregation]): Scan = {
scan match {
case v1: V1Scan =>
val pushedFilters = sHolder.builder match {
case f: SupportsPushDownFilters =>
f.pushedFilters()
case _ => Array.empty[sources.Filter]
}
val pushedDownOperators = PushedDownOperators(aggregation,
sHolder.pushedSample, sHolder.pushedLimit, sHolder.sortOrders)
V1ScanWrapper(v1, pushedFilters, pushedDownOperators)
case _ => scan
}
}
}
case class ScanBuilderHolder(
output: Seq[AttributeReference],
relation: DataSourceV2Relation,
builder: ScanBuilder) extends LeafNode {
var pushedLimit: Option[Int] = None
var sortOrders: Seq[SortOrder] = Seq.empty[SortOrder]
var pushedSample: Option[TableSampleInfo] = None
}
// A wrapper for v1 scan to carry the translated filters and the handled ones, along with
// other pushed down operators. This is required by the physical v1 scan node.
case class V1ScanWrapper(
v1Scan: V1Scan,
handledFilters: Seq[sources.Filter],
pushedDownOperators: PushedDownOperators) extends Scan {
override def readSchema(): StructType = v1Scan.readSchema()
}
|
holdenk/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanRelationPushDown.scala
|
Scala
|
apache-2.0
| 16,582
|
package lila.pool
import scala.concurrent.duration._
object PoolList {
import PoolConfig._
val all: List[PoolConfig] = List(
PoolConfig(1 ++ 0, Wave(13 seconds, 20 players)),
PoolConfig(2 ++ 1, Wave(18 seconds, 20 players)),
PoolConfig(3 ++ 0, Wave(15 seconds, 30 players)),
PoolConfig(3 ++ 2, Wave(22 seconds, 20 players)),
PoolConfig(5 ++ 0, Wave(10 seconds, 30 players)),
PoolConfig(5 ++ 3, Wave(25 seconds, 20 players)),
PoolConfig(10 ++ 0, Wave(20 seconds, 20 players)),
PoolConfig(15 ++ 15, Wave(60 seconds, 16 players))
)
val clockStringSet: Set[String] = all.map(_.clock.show).toSet
private implicit class PimpedInt(self: Int) {
def ++(increment: Int) = chess.Clock.Config(self * 60, increment)
def players = NbPlayers(self)
}
}
|
clarkerubber/lila
|
modules/pool/src/main/PoolList.scala
|
Scala
|
agpl-3.0
| 795
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import org.scalatest.FunSuite
class CompressionCodecSuite extends FunSuite {
def testCodec(codec: CompressionCodec) {
// Write 1000 integers to the output stream, compressed.
val outputStream = new ByteArrayOutputStream()
val out = codec.compressedOutputStream(outputStream)
for (i <- 1 until 1000) {
out.write(i % 256)
}
out.close()
// Read the 1000 integers back.
val inputStream = new ByteArrayInputStream(outputStream.toByteArray)
val in = codec.compressedInputStream(inputStream)
for (i <- 1 until 1000) {
assert(in.read() === i % 256)
}
in.close()
}
test("default compression codec") {
val codec = CompressionCodec.createCodec()
assert(codec.getClass === classOf[SnappyCompressionCodec])
testCodec(codec)
}
test("lzf compression codec") {
val codec = CompressionCodec.createCodec(classOf[LZFCompressionCodec].getName)
assert(codec.getClass === classOf[LZFCompressionCodec])
testCodec(codec)
}
test("snappy compression codec") {
val codec = CompressionCodec.createCodec(classOf[SnappyCompressionCodec].getName)
assert(codec.getClass === classOf[SnappyCompressionCodec])
testCodec(codec)
}
}
|
bavardage/spark
|
core/src/test/scala/spark/io/CompressionCodecSuite.scala
|
Scala
|
apache-2.0
| 2,097
|
package com.awesomesauce.minecraft.forge.openautomation.common.tconstruct
import com.awesomesauce.minecraft.forge.core.lib.util.ItemUtil
import com.awesomesauce.minecraft.forge.openautomation.common.OAModule
import com.awesomesauce.minecraft.forge.openautomation.common.tconstruct.te.TileEntityMelter
import cpw.mods.fml.common.registry.GameRegistry
import net.minecraft.block.Block
import net.minecraft.block.material.Material
import net.minecraft.item.ItemStack
import net.minecraftforge.oredict.ShapedOreRecipe
object OpenAutomationTConstruct extends OAModule {
val name = "TConstruct"
var melter: Block = null
var melterCost = 0
var melterCostMultiplier = 0
var melterMultiplier = 0
def preInit() = {
melterCost = oa.config.get("tconstruct", "melterCost", 60).getInt
melterCostMultiplier = oa.config.get("tconstruct", "melterDivider", 10).getInt
melterMultiplier = oa.config.get("tconstruct", "melterSpeed", 2).getInt
}
def init() = {
melter = ItemUtil.makeBlock(oa, "tconstruct.melter", Material.rock, () => new TileEntityMelter)
val materials = GameRegistry.findItem("TConstruct", "materials")
val smeltery = GameRegistry.findBlock("TConstruct", "Smeltery")
ItemUtil.addRecipe(oa, new ShapedOreRecipe(new ItemStack(melter), "xyx", "awa", "xzx",
Character.valueOf('x'), new ItemStack(materials, 1, 2), Character.valueOf('y'), "ingotAwesomeite",
Character.valueOf('z'), "awesomeCore", Character.valueOf('w'), new ItemStack(smeltery, 1, 0),
Character.valueOf('a'), new ItemStack(smeltery, 1, 1)))
}
def postInit() = {
}
}
|
AwesomeSauceMods/OpenAutomation
|
main/scala/com/awesomesauce/minecraft/forge/openautomation/common/tconstruct/OpenAutomationTConstruct.scala
|
Scala
|
mit
| 1,599
|
/**
* this script is enetered with sc already defined
*/
import org.apache.spark.hbase.examples.graph.DemoGraphApp
import scala.math._
import org.apache.spark.hbase._
import org.apache.spark.hbase.keyspace._
import org.apache.spark.hbase.examples.graph._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.storage.StorageLevel._
import org.apache.hadoop.hbase.HConstants._
/**
* Initialise DEMO within the current spark shell context and import all public members into the shell's global scope
*/
val app = new DemoGraphApp(sc)
import app._
help
|
michal-harish/spark-on-hbase
|
scripts/demo-graph-init.scala
|
Scala
|
apache-2.0
| 569
|
package reactivemongo.play.json.compat
import play.api.libs.json.{ JsFalse => F, JsTrue => T }
import reactivemongo.api.bson.BSONBoolean
private[compat] trait ExtendedJsonCompat {
implicit final val toFalse: F.type => BSONBoolean = {
val stable = BSONBoolean(false)
_ => stable
}
implicit final val toTrue: T.type => BSONBoolean = {
val stable = BSONBoolean(true)
_ => stable
}
}
|
ReactiveMongo/Reactivemongo-Play-Json
|
compat/src/main/play-2.6+/ExtendedJsonCompat.scala
|
Scala
|
apache-2.0
| 408
|
package pl.newicom.dddd.view.sql
import com.typesafe.config.{ConfigFactory, Config}
import org.scalactic.Equality
import org.scalatest._
import scala.slick.jdbc.JdbcBackend
class ViewMetadataDaoSpec extends WordSpecLike with Matchers with SqlViewStoreTestSupport {
def config: Config = ConfigFactory.load()
implicit val _ = new Equality[ViewMetadataRecord] {
def areEqual(a: ViewMetadataRecord, b: Any): Boolean =
b match {
case b_rec: ViewMetadataRecord => a.copy(id = Some(-1)) == b_rec.copy(id = Some(-1))
case _ => false
}
}
val dao = new ViewMetadataDao()
import dao.profile.simple._
"ViewMetadataDao" should {
"insert new entry if view does not exist" in {
// When
viewStore withSession { implicit s: Session =>
dao.insertOrUpdate("test view", 0)
}
// Then
viewStore withSession { implicit s: Session =>
dao.byViewId("test view") should not be 'empty
}
}
}
"ViewMetadataDao" should {
"insert & update entry" in {
// When
viewStore withSession { implicit s: Session =>
dao.insertOrUpdate("test view", 0)
dao.insertOrUpdate("test view", 1)
}
// Then
viewStore withSession { implicit s: Session =>
dao.byViewId("test view").get should equal (ViewMetadataRecord(Some(1), "test view", 1))
}
}
}
override def dropSchema(implicit s: JdbcBackend.Session): Unit =
dao.dropSchema
override def createSchema(implicit s: JdbcBackend.Session): Unit =
dao.createSchema
}
|
ahjohannessen/akka-ddd
|
view-update-sql/src/test/scala/pl/newicom/dddd/view/sql/ViewMetadataDaoSpec.scala
|
Scala
|
mit
| 1,567
|
package dotty.communitybuild
import java.nio.file._
import java.io.{PrintWriter, File}
import java.nio.charset.StandardCharsets.UTF_8
import org.junit.{Ignore, Test}
import org.junit.Assert.{assertEquals, fail}
import org.junit.experimental.categories.Category
import CommunityBuildRunner.run
class TestCategory
given testRunner: CommunityBuildRunner with
override def failWith(msg: String) = { fail(msg); ??? }
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestA:
@Test def izumiReflect = projects.izumiReflect.run()
@Test def scalaSTM = projects.scalaSTM.run()
@Test def scalatest = projects.scalatest.run()
@Test def scalatestplusTestNG = projects.scalatestplusTestNG.run()
// 'Sciss/Lucre' dependencies:
// @Test def scissEqual = projects.scissEqual .run()
// @Test def scissFingerTree = projects.scissFingerTree.run()
// @Test def scissLog = projects.scissLog .run()
// @Test def scissModel = projects.scissModel .run()
// @Test def scissNumbers = projects.scissNumbers .run()
// @Test def scissSerial = projects.scissSerial .run()
// @Test def scissAsyncFile = projects.scissAsyncFile .run()
// @Test def scissSpan = projects.scissSpan .run()
@Test def scissLucre = projects.scissLucre.run()
@Test def zio = projects.zio.run()
end CommunityBuildTestA
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestB:
@Test def cats = projects.cats.run()
@Test def catsEffect3 = projects.catsEffect3.run()
@Test def catsMtl = projects.catsMtl.run()
@Test def coop = projects.coop.run()
@Test def discipline = projects.discipline.run()
@Test def disciplineMunit = projects.disciplineMunit.run()
@Test def disciplineSpecs2 = projects.disciplineSpecs2.run()
@Test def fs2 = projects.fs2.run()
@Test def monocle = projects.monocle.run()
@Test def munit = projects.munit.run()
@Test def munitCatsEffect = projects.munitCatsEffect.run()
@Test def perspective = projects.perspective.run()
@Test def scalacheckEffect = projects.scalacheckEffect.run()
@Test def scodec = projects.scodec.run()
@Test def scodecBits = projects.scodecBits.run()
@Test def simulacrumScalafixAnnotations = projects.simulacrumScalafixAnnotations.run()
@Test def spire = projects.spire.run()
@Test def http4s = projects.http4s.run()
end CommunityBuildTestB
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestC:
@Test def akka = projects.akka.run()
@Test def betterfiles = projects.betterfiles.run()
@Test def cask = projects.cask.run()
// Temporarily disabled until problem discovered in comments to #9449 is fixed
// @Test def dottyCpsAsync = projects.dottyCpsAsync.run()
@Test def effpi = projects.effpi.run()
@Test def endpoints4s = projects.endpoints4s.run()
@Test def fansi = projects.fansi.run()
@Test def fastparse = projects.fastparse.run()
@Test def geny = projects.geny.run()
@Test def intent = projects.intent.run()
@Test def jacksonModuleScala = projects.jacksonModuleScala.run()
@Test def libretto = projects.libretto.run()
@Test def minitest = projects.minitest.run()
@Test def onnxScala = projects.onnxScala.run()
@Test def oslib = projects.oslib.run()
// @Test def oslibWatch = projects.oslibWatch.run()
@Test def playJson = projects.playJson.run()
@Test def pprint = projects.pprint.run()
@Test def protoquill = projects.protoquill.run()
@Test def requests = projects.requests.run()
@Test def scalacheck = projects.scalacheck.run()
@Test def scalaCollectionCompat = projects.scalaCollectionCompat.run()
@Test def scalaJava8Compat = projects.scalaJava8Compat.run()
@Test def scalap = projects.scalap.run()
@Test def scalaParallelCollections = projects.scalaParallelCollections.run()
@Test def scalaParserCombinators = projects.scalaParserCombinators.run()
@Test def scalaPB = projects.scalaPB.run()
@Test def scalatestplusScalacheck = projects.scalatestplusScalacheck.run()
@Test def scalaXml = projects.scalaXml.run()
@Test def scalaz = projects.scalaz.run()
@Test def scas = projects.scas.run()
@Test def sconfig = projects.sconfig.run()
@Test def shapeless = projects.shapeless.run()
@Test def sourcecode = projects.sourcecode.run()
@Test def specs2 = projects.specs2.run()
@Test def stdLib213 = projects.stdLib213.run()
@Test def ujson = projects.ujson.run()
@Test def upickle = projects.upickle.run()
@Test def utest = projects.utest.run()
@Test def verify = projects.verify.run()
@Test def xmlInterpolator = projects.xmlInterpolator.run()
end CommunityBuildTestC
@Category(Array(classOf[TestCategory]))
class CommunityBuildTestForwardCompat:
@Test def catsEffect3ForwardCompat = projects.catsEffect3ForwardCompat.run()
@Test def catsForwardCompat = projects.catsForwardCompat.run()
@Test def catsMtlForwardCompat = projects.catsMtlForwardCompat.run()
@Test def coopForwardCompat = projects.coopForwardCompat.run()
@Test def disciplineForwardCompat = projects.disciplineForwardCompat.run()
@Test def disciplineMunitForwardCompat = projects.disciplineMunitForwardCompat.run()
@Test def disciplineSpecs2ForwardCompat = projects.disciplineSpecs2ForwardCompat.run()
@Test def munitForwardCompat = projects.munitForwardCompat.run()
@Test def scalacheckForwardCompat = projects.scalacheckForwardCompat.run()
@Test def simulacrumScalafixAnnotationsForwardCompat = projects.simulacrumScalafixAnnotationsForwardCompat.run()
end CommunityBuildTestForwardCompat
|
lampepfl/dotty
|
community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala
|
Scala
|
apache-2.0
| 5,507
|
package org.broadinstitute.dsde.vault.services.analysis
import com.wordnik.swagger.annotations._
import org.broadinstitute.dsde.vault.DmClientService
import org.broadinstitute.dsde.vault.common.directives.VersioningDirectives._
import org.broadinstitute.dsde.vault.model.AnalysisJsonProtocol._
import org.broadinstitute.dsde.vault.model._
import org.broadinstitute.dsde.vault.services.VaultDirectives
import spray.httpx.SprayJsonSupport._
import spray.routing._
@Api(value = "/analyses", description = "Analysis Service", produces = "application/json")
trait AnalysisIngestService extends HttpService with VaultDirectives {
private final val ApiPrefix = "analyses"
private final val ApiVersions = "v1"
val aiRoute = analysisIngestRoute
@ApiOperation(
value = "Creates Analysis objects",
nickname = "analysis_ingest",
httpMethod = "POST",
produces = "application/json",
consumes = "application/json",
response = classOf[AnalysisIngestResponse],
notes = """Accepts a json packet as POST. Creates a Vault object with the supplied metadata and creates relationships for each input id.
Returns the Vault ID of the created object. The values of the 'input' array must be valid Vault IDs for the ubams used as input to this analysis.
If an invalid id is specified inside the input array, this API will fail with a 404 response code.""")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "version", required = true, dataType = "string", paramType = "path", value = "API version", allowableValues = ApiVersions),
new ApiImplicitParam(name = "body", required = true, dataType = "org.broadinstitute.dsde.vault.model.AnalysisIngest", paramType = "body", value = "Analysis to create")
))
@ApiResponses(Array(
new ApiResponse(code = 200, message = "Successful"),
new ApiResponse(code = 400, message = "Malformed Input"),
new ApiResponse(code = 404, message = "Not Found: if the 'input' array includes an invalid id"),
new ApiResponse(code = 500, message = "Vault Internal Error")
))
def analysisIngestRoute =
pathVersion( ApiPrefix ,1 ) { version =>
post {
respondWithJSON {
entity(as[AnalysisIngest]) { ingest => requestContext =>
val dmService = actorRefFactory.actorOf(DmClientService.props(requestContext))
val ingestActor = actorRefFactory.actorOf(IngestServiceHandler.props(requestContext, version, dmService))
ingestActor ! IngestServiceHandler.IngestMessage(ingest)
}
}
}
}
}
|
broadinstitute/vault-api
|
src/main/scala/org/broadinstitute/dsde/vault/services/analysis/AnalysisIngestService.scala
|
Scala
|
bsd-3-clause
| 2,554
|
package io.eels.component.jdbc
import java.sql.{ResultSet, ResultSetMetaData}
import com.sksamuel.exts.Logging
import io.eels.component.jdbc.dialect.JdbcDialect
import io.eels.schema.{Field, StructType}
/**
* Generates an eel schema from the metadata in a resultset.
*/
object JdbcSchemaFns extends Logging {
def fromJdbcResultset(rs: ResultSet, dialect: JdbcDialect): StructType = {
val md = rs.getMetaData
val columnCount = md.getColumnCount
logger.trace(s"Resultset column count is $columnCount")
val cols = (1 to columnCount).map { k =>
Field(
name = md.getColumnLabel(k),
dataType = dialect.fromJdbcType(k, md),
nullable = md.isNullable(k) == ResultSetMetaData.columnNullable
)
}
StructType(cols.toList)
}
}
|
sksamuel/hadoop-streams
|
eel-core/src/main/scala/io/eels/component/jdbc/JdbcSchemaFns.scala
|
Scala
|
apache-2.0
| 792
|
package scalax.collection
package mutable
import org.scalatest.Suite
import org.scalatest.matchers.ShouldMatchers
import GraphPredef._, GraphEdge._
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
/** Tests [[ExtHashSet]]. */
@RunWith(classOf[JUnitRunner])
class TExtHashSetTest
extends Suite
with ShouldMatchers
{
import Data._
val set = ExtHashSet(outerElemsOfDi_1: _*)
val outerEdge: DiEdge[Int] = outerElemsOfDi_1.head
val graph = Graph(outerElemsOfDi_1: _*)
val innerEdge = graph get outerEdge
def test_findEntry {
/* `inner.edge == outer` returns the expected result because Graph#InnerEdge.equal
* is aware of the inner edge structure. The opposite will be false since, as a rule,
* outer object types will not be Graph-aware with regard to their equal.
*/
def eq(outer: DiEdge[Int], inner: graph.EdgeT) = inner.edge == outer
set.findEntry(innerEdge, eq) should be (outerEdge)
}
def test_drawElement {
val randomElems = collection.mutable.Set.empty[DiEdge[Int]]
val r = new util.Random
for (i <- 1 to (set.size * 16))
randomElems += set draw r
randomElems should have size (set.size)
}
def test_hashCodeIterator {
set.hashCodeIterator(-228876066).toList should have size (0)
outerEdge.hashCode should be (innerEdge.hashCode)
val elems = set.hashCodeIterator(outerEdge.hashCode).toList
elems should have size (1)
elems.head should be (outerEdge)
}
def test_multiHashCodeIterator {
case class C(i: Int, j: Int) {
override def hashCode = i.##
}
val multi = ExtHashSet(C(1,0), C(1,1), C(2,2), C(1,3), C(2,0))
for (i <- 0 to 2) {
val elems = multi.hashCodeIterator(i.##).toList
elems should have size (multi count (_.i == i))
}
}
}
|
Calavoow/scala-graph
|
core/src/test/scala/scalax/collection/mutable/TExtHashSet.scala
|
Scala
|
bsd-3-clause
| 1,860
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.batch
import org.apache.flink.annotation.Experimental
import org.apache.flink.configuration.ConfigOption
import org.apache.flink.configuration.ConfigOptions.key
import org.apache.flink.table.planner.calcite.FlinkContext
import org.apache.flink.table.planner.plan.`trait`.FlinkRelDistribution
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalSort
import org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecSort
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import java.lang.{Boolean => JBoolean}
/**
* Rule that matches [[FlinkLogicalSort]] which sort fields is non-empty and both `fetch` and
* `offset` are null, and converts it to [[BatchExecSort]].
*/
class BatchExecSortRule extends ConverterRule(
classOf[FlinkLogicalSort],
FlinkConventions.LOGICAL,
FlinkConventions.BATCH_PHYSICAL,
"BatchExecSortRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val sort: FlinkLogicalSort = call.rel(0)
// only matches Sort without fetch and offset
!sort.getCollation.getFieldCollations.isEmpty && sort.fetch == null && sort.offset == null
}
override def convert(rel: RelNode): RelNode = {
val sort: FlinkLogicalSort = rel.asInstanceOf[FlinkLogicalSort]
val input = sort.getInput
val config = sort.getCluster.getPlanner.getContext.unwrap(classOf[FlinkContext]).getTableConfig
val enableRangeSort = config.getConfiguration.getBoolean(
BatchExecSortRule.TABLE_EXEC_SORT_RANGE_ENABLED)
val distribution = if (enableRangeSort) {
FlinkRelDistribution.range(sort.getCollation.getFieldCollations)
} else {
FlinkRelDistribution.SINGLETON
}
val requiredTraitSet = input.getTraitSet
.replace(distribution)
.replace(FlinkConventions.BATCH_PHYSICAL)
val providedTraitSet = sort.getTraitSet
.replace(distribution)
.replace(FlinkConventions.BATCH_PHYSICAL)
val newInput = RelOptRule.convert(input, requiredTraitSet)
new BatchExecSort(
sort.getCluster,
providedTraitSet,
newInput,
sort.getCollation)
}
}
object BatchExecSortRule {
val INSTANCE: RelOptRule = new BatchExecSortRule
// It is a experimental config, will may be removed later.
@Experimental
val TABLE_EXEC_SORT_RANGE_ENABLED: ConfigOption[JBoolean] =
key("table.exec.range-sort.enabled")
.defaultValue(JBoolean.valueOf(false))
.withDescription("Sets whether to enable range sort, use range sort to sort all data in" +
" several partitions. When it is false, sorting in only one partition")
}
|
hequn8128/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/batch/BatchExecSortRule.scala
|
Scala
|
apache-2.0
| 3,599
|
package ajr.rundeck.telegram
import java.net.InetSocketAddress
import java.util.{Map => JMap}
import com.dtolabs.rundeck.core.plugins.Plugin
import com.dtolabs.rundeck.plugins.descriptions.PluginDescription
import com.dtolabs.rundeck.plugins.descriptions.PluginProperty
import com.dtolabs.rundeck.plugins.descriptions.TextArea
import com.dtolabs.rundeck.plugins.notification.NotificationPlugin
import scalaj.http._
import com.dtolabs.rundeck.core.plugins.configuration.PropertyScope
import freemarker.cache.StringTemplateLoader
import freemarker.template.Configuration
import TelegramNotificationPlugin._
import java.io.File
import java.io.StringWriter
import java.io.FileNotFoundException
import java.text.SimpleDateFormat
import scala.collection.JavaConverters._
import com.vdurmont.emoji.EmojiParser
object TelegramNotificationPlugin {
val fmConfig = new Configuration
fmConfig.setDefaultEncoding("UTF-8")
}
@Plugin(service = "Notification", name = "TelegramNotification")
@PluginDescription(title = "Telegram")
class TelegramNotificationPlugin extends NotificationPlugin {
@PluginProperty(title = "Bot name/token",
description = "Bot name or auth token. Names must be defined in telegram.properties. If blank inherits the project value if it exists",
required = false, scope = PropertyScope.InstanceOnly)
private var botAuthToken: String = _
@PluginProperty(title = "Project default Bot name/token",
description = "Bot name or auth token. Names must be defined in telegram.properties",
required = false, scope = PropertyScope.Project)
private var projectBotAuthToken: String = _
@PluginProperty(title = "Chat name/ID",
description = "Name or ID of chat to send message to. Names must be defined in telegram.properties",
required = false, scope = PropertyScope.InstanceOnly)
private var chatId: String = _
@PluginProperty(title = "Telegram config file",
description = "Location of the telegram.properties file for bot/chat name mapping",
required = false, defaultValue = "/etc/rundeck/telegram.properties", scope = PropertyScope.Project)
private var telegramProperties: String = _
@PluginProperty(title = "Include job log", scope = PropertyScope.InstanceOnly)
private var includeJobLog: Boolean = false
@PluginProperty(title = "Enable emoji parsing (emoji-java)",
description = "See https://github.com/vdurmont/emoji-java#available-emojis for details", scope = PropertyScope.InstanceOnly)
private var emojiParsing: Boolean = false
@PluginProperty(title = "Template text", description = "Message template. Susbtitution possible eg ${job.name}",
required = false, scope = PropertyScope.InstanceOnly)
@TextArea
private var templateMessage: String = _
@PluginProperty(title = "Message Template",
description = "Name of a FreeMarker template used to generate the notification message. " +
"If unspecified a default message template will be used if it exists.",
required = false, scope = PropertyScope.InstanceOnly)
private var templateName: String = _
@PluginProperty(title = "Project Message Template",
description = "Name of a FreeMarker template. This will be the default if none is specified at the project level",
required = false, scope = PropertyScope.Project)
private var templateNameProject: String = _
@PluginProperty(title = "Template directory",
description = "Location to load Freemarker templates from",
required = false, defaultValue = "/var/lib/rundeck/templates",
scope = PropertyScope.Project)
private var templateDir: String = _
@PluginProperty(title = "Telegram API base URL",
description = "Base URL of Telegram API",
required = false, defaultValue = "https://api.telegram.org",
scope = PropertyScope.Project)
private var telegramApiBaseUrl: String = _
@PluginProperty(title = "Rundeck API key",
description = "Rundeck API key so the plugin can get request job information. Required for job logs",
required = false, scope = PropertyScope.Project)
private var rundeckApiKey: String = _
@PluginProperty(title = "Proxy Host",
description = "Proxy host for telegram API",
required = false, defaultValue = "", scope = PropertyScope.Project)
private var proxyHost: String = _
@PluginProperty(title = "Proxy Port",
description = "Proxy port for telegram API",
required = false, defaultValue = "", scope = PropertyScope.Project)
private var proxyPort: String = _
private val isoFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
def get[T](name: String, map: JMap[_,_], default: => T = null): T = {
val value = Option(map.get(name).asInstanceOf[T]).getOrElse(default)
if (value == null)
missing(name)
value
}
class InvalidConfigException(message: String) extends Exception(message)
def missing(message: String) = throw new InvalidConfigException(s"Missing configuration: $message")
def ifEmpty(str: String, default: => String) = if (str == null || str == "") default else str
override def postNotification(trigger: String, executionData: JMap[_,_], config: JMap[_,_]): Boolean = {
println(s"TelegramNotification: $trigger\\n\\nExecutionData: $executionData\\n\\nConfig: $config")
val tids = new TelegramIds(telegramProperties)
val proxy = if (proxyHost != null && proxyHost != "" ) {
println(s"Using proxy: $proxyHost:$proxyPort")
val addr = new InetSocketAddress(proxyHost, Option(proxyPort).getOrElse("80").toInt)
Some(new java.net.Proxy(java.net.Proxy.Type.HTTP, addr))
}
else None
val myHttp = new BaseHttp(proxy)
val httpNoProxy = new BaseHttp()
try {
val telegramAPi = get[String]("telegramApiBaseUrl", config)
val botAuthO = tids.lookupBot(ifEmpty(get[String]("botAuthToken", config, ""),
get[String]("projectBotAuthToken", config, missing("botAuthToken or projectBotAuthToken"))))
val chatO = tids.lookupChat(get[String]("chatId", config))
val templateMessage = get[String]("templateMessage", config, "")
val templatePath = get[String]("templatePath", config, "")
val templateProject = get[String]("templateProject", config, "")
val message = buildMessage(executionData)
(botAuthO, chatO) match {
case (Some(botAuth), Some(chat)) =>
val telegram = new TelegramMessenger(botAuth, telegramAPi, myHttp)
val (code, response) = telegram.sendMessage(chat, message)
val ok = resultOk(code)
if (ok) {
println("Telegram mesage sent")
if (get[String]("includeJobLog", config, "false").toBoolean) {
getJobLog(chat, executionData, config, httpNoProxy) match {
case Some((log, fileName)) =>
val (code, _) = telegram.sendDocument(chat, log.getBytes, fileName)
resultOk(code)
case _ =>
false
}
}
}
else {
System.err.println(s"Send failed: $code, $response")
}
case _ =>
System.err.println(s"Missing auth token or chat Id")
false
}
true
}
catch {
case e @ (_: InvalidConfigException | _: NumberFormatException | _: FileNotFoundException) =>
System.err.println(s"Failed to send Telegram message - check config: $e")
e.printStackTrace()
false
case e: Throwable =>
System.err.println(s"Failed to send Telegram message: $e")
e.printStackTrace()
false
}
}
private def resultOk(code: Int): Boolean = (code >= 200 && code < 300)
private def getJobLog(chat: Long, executionData: JMap[_,_], config: JMap[_,_], http: BaseHttp): Option[(String, String)] = {
try {
val rundeckKey = get[String]("rundeckApiKey", config)
val context = get[JMap[_,_]]("context", executionData)
val job = get[JMap[_,_]]("job", context)
val serverUrl = get[String]("serverUrl", job)
val execId = get[String]("execid", job).toInt
val name = get[String]("name", job)
val fileName = s"${name}_$execId.txt"
getRundeckLog(execId, rundeckKey, serverUrl, http).map((_, fileName))
}
catch {
case e: Throwable =>
System.err.println(s"Failed to get execution log: $e")
None
}
}
private def getRundeckLog(execId: Int, authToken: String, baseUrl: String, http: BaseHttp) = {
val url = s"${baseUrl.trim}api/6/execution/$execId/output"
val request = http(url).params(Map("authtoken" -> authToken, "format" -> "text")).asString
if (resultOk(request.code))
Some(request.body)
else
None
}
def buildMessage(executionData: JMap[_,_]): String = {
println(s"templateDir: $templateDir")
if (true) {
println("ExecutionData")
for((key,value) <- executionData.asScala) {
println(f" $key%-15s: $value")
}
}
val templateDirFile = new File(templateDir)
if (!templateDirFile.exists())
templateDirFile.mkdir()
if (templateDirFile.exists())
fmConfig.setDirectoryForTemplateLoading(templateDirFile);
for(dateType <- Seq("dateStarted", "dateEnded")) {
Option(executionData.get(dateType)).foreach{ date =>
val dateStr = isoFormatter.format(date)
executionData.asInstanceOf[JMap[String,String]].put(s"${dateType}IsoString", dateStr)
}
}
val template =
if (ifEmpty(templateMessage, "") != "") {
val stringLoader = new StringTemplateLoader()
stringLoader.putTemplate("message", templateMessage)
fmConfig.setTemplateLoader(stringLoader)
fmConfig.getTemplate("message")
}
else if (ifEmpty(templateName, "") != "") {
fmConfig.getTemplate(templateName)
}
else if (ifEmpty(templateNameProject, "") != "") {
fmConfig.getTemplate(templateNameProject)
}
else {
throw new InvalidConfigException("None of templateMessage, templateName, templateNameProject set")
}
val out = new StringWriter()
template.process(executionData, out)
val message = out.toString
if (emojiParsing)
EmojiParser.parseToUnicode(message)
else
message
}
}
|
ajrnz/rundeck-telegram-plugin
|
plugin/src/ajr/rundeck/telegram/TelegramNotificationPlugin.scala
|
Scala
|
apache-2.0
| 10,799
|
object Test extends App {
def foo1(x: AnyRef) = x match { case x: Function0[_] => x() }
def foo2(x: AnyRef) = x match { case x: Function0[Any] => x() }
}
|
yusuke2255/dotty
|
tests/pos/t2168.scala
|
Scala
|
bsd-3-clause
| 158
|
package org.bowlerframework.extractors
import util.matching.Regex
import org.bowlerframework.{HttpMethod, Request}
class UriAndMethodMatches[T](item: T, method: HttpMethod, uri: Regex) extends UriMatches[T](item, uri) {
override def unapply(request: Request): Option[T] = {
if (request.getMethod == method)
return super.unapply(request)
else return None
}
}
|
rkpandey/Bowler
|
core/src/main/scala/org/bowlerframework/extractors/UriAndMethodMatches.scala
|
Scala
|
bsd-3-clause
| 378
|
package kata.prop
import org.scalatest.prop._
import org.scalatest.{Matchers, PropSpec}
import kata.ConvertArabicToRoman._
import org.scalacheck.Gen
class ArabicToRomanPropCheckSpec
extends PropSpec
with GeneratorDrivenPropertyChecks
with Matchers {
//Generators
val allInts = for (n <- Gen.choose(0, 1000)) yield n
val endWith9 = allInts filter (_ % 10 == 9)
val endsWith4 = allInts filter (_ % 10 == 4)
override implicit val generatorDrivenConfig = PropertyCheckConfig(
minSuccessful = 1000,
maxDiscarded = 5000,
minSize = 10,
workers = Runtime.getRuntime.availableProcessors()
)
property("There can't be 3 consecutive I, X, C or M repeated") {
forAll(allInts) { x =>
convertToRoman(x) shouldNot include("XXXX")
convertToRoman(x) shouldNot include("IIII")
convertToRoman(x) shouldNot include("CCCC")
convertToRoman(x) shouldNot include("MMMM")
}
}
property("V, L, and D cannot be repeated, and there is no need to do so.") {
forAll(allInts) { x =>
convertToRoman(x) shouldNot include("VV")
convertToRoman(x) shouldNot include("LL")
convertToRoman(x) shouldNot include("DD")
}
}
property("Should end with IX, if the number ends with 9") {
forAll(endWith9, minSuccessful(10), maxDiscarded(100)) { x =>
convertToRoman(x) should endWith("IX")
}
}
property("Should end with IV, if the number ends with 4") {
forAll(endsWith4, minSuccessful(10), maxDiscarded(100)) { x =>
convertToRoman(x) should endWith("IV")
}
}
}
|
boseabhishek/scala_recursion
|
src/test/scala/kata/prop/ArabicToRomanPropCheckSpec.scala
|
Scala
|
mit
| 1,563
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.words
import org.scalatest.matchers._
import org.scalactic.Prettifier
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="../Matchers.html"><code>Matchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfAnWordToBePropertyMatcherApplication[T](val bePropertyMatcher: BePropertyMatcher[T]) {
override def toString: String = "an (" + Prettifier.default(bePropertyMatcher) + ")"
}
|
travisbrown/scalatest
|
src/main/scala/org/scalatest/words/ResultOfAnWordToBePropertyMatcherApplication.scala
|
Scala
|
apache-2.0
| 1,104
|
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
import java.util.regex.Pattern
import java.net.URL
import java.io.File
class RunnerSuite() extends Suite with PrivateMethodTester {
def testDeprecatedParseArgsIntoLists() {
// this is how i solved the problem of wanting to reuse these val names, runpathList, reportersList, etc.
// by putting them in a little verify method, it gets reused each time i call that method
def verify(
args: Array[String],
expectedRunpathList: List[String],
expectedReporterList: List[String],
expectedSuitesList: List[String],
expectedJunitsList: List[String],
expectedPropsList: List[String],
expectedIncludesList: List[String],
expectedExcludesList: List[String],
expectedConcurrentList: List[String],
expectedMemberOfList: List[String],
expectedBeginsWithList: List[String],
expectedTestNGList: List[String],
expectedSuffixes: Option[Pattern],
expectedChosenStyleList: List[String],
expectedScaleFactorList: List[String],
expectedTestSortingReporterTimeoutList: List[String]
) = {
val (
runpathList,
reportersList,
suitesList,
junitsList,
propsList,
includesList,
excludesList,
concurrentList,
memberOfList,
beginsWithList,
testNGList,
suffixes,
chosenStyleList,
spanScaleFactorList,
testSortingReporterTimeoutList
) = Runner.parseArgs(args)
assert(runpathList === expectedRunpathList)
assert(reportersList === expectedReporterList)
assert(suitesList === expectedSuitesList)
assert(junitsList === expectedJunitsList)
assert(propsList === expectedPropsList)
assert(includesList === expectedIncludesList)
assert(excludesList === expectedExcludesList)
assert(concurrentList === expectedConcurrentList)
assert(memberOfList === expectedMemberOfList)
assert(beginsWithList === expectedBeginsWithList)
assert(testNGList === expectedTestNGList)
assert(chosenStyleList === expectedChosenStyleList)
assert(spanScaleFactorList == expectedScaleFactorList)
assert(testSortingReporterTimeoutList == expectedTestSortingReporterTimeoutList)
if (expectedSuffixes.isEmpty) {
assert(suffixes.isEmpty)
} else {
assert(!suffixes.isEmpty)
assert(suffixes.get.toString === expectedSuffixes.get.toString)
}
}
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out", "-p"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-p"),
List("-g", "-g", "-f", "file.out"),
Nil,
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array(),
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "JustOne", "-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "JustOne"),
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo", "-m", "com.example.webapp",
"-w", "com.example.root"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
Nil,
None,
Nil,
Nil,
Nil
)
// Try a TestNGSuite
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo", "-m", "com.example.webapp",
"-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Try a junit Suite
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-j", "junitTest", "-j", "junitTest2",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne"),
List("-j", "junitTest", "-j", "junitTest2"),
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Test -u option
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Test -q option
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-q", "Spec|Suite",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite)$")),
Nil,
Nil,
Nil
)
// Test -q option
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-q", "Spec", "-q", "Suite",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite)$")),
Nil,
Nil,
Nil
)
// Test -Q option
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-Q", "-q", "foo",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite|foo)$")),
Nil,
Nil,
Nil
)
}
def testParseArgsIntoLists() {
// this is how i solved the problem of wanting to reuse these val names, runpathList, reportersList, etc.
// by putting them in a little verify method, it gets reused each time i call that method
def verify(
args: Array[String],
expectedRunpathList: List[String],
expectedReporterList: List[String],
expectedSuitesList: List[String],
expectedJunitsList: List[String],
expectedPropsList: List[String],
expectedIncludesList: List[String],
expectedExcludesList: List[String],
expectedConcurrentList: List[String],
expectedMemberOfList: List[String],
expectedBeginsWithList: List[String],
expectedTestNGList: List[String],
expectedSuffixes: Option[Pattern],
expectedChosenStyleList: List[String],
expectedSpanScaleFactorList: List[String],
expectedTestSortingReporterTimeoutList: List[String]
) = {
val (
runpathList,
reportersList,
suitesList,
junitsList,
propsList,
includesList,
excludesList,
concurrentList,
memberOfList,
beginsWithList,
testNGList,
suffixes,
chosenStyleList,
spanScaleFactorList,
testSortingReporterTimeoutList
) = Runner.parseArgs(args)
assert(runpathList === expectedRunpathList)
assert(reportersList === expectedReporterList)
assert(suitesList === expectedSuitesList)
assert(junitsList === expectedJunitsList)
assert(propsList === expectedPropsList)
assert(includesList === expectedIncludesList)
assert(excludesList === expectedExcludesList)
assert(concurrentList === expectedConcurrentList)
assert(memberOfList === expectedMemberOfList)
assert(beginsWithList === expectedBeginsWithList)
assert(testNGList === expectedTestNGList)
assert(chosenStyleList === expectedChosenStyleList)
assert(spanScaleFactorList == expectedSpanScaleFactorList)
assert(testSortingReporterTimeoutList == expectedTestSortingReporterTimeoutList)
if (expectedSuffixes.isEmpty) {
assert(suffixes.isEmpty)
} else {
assert(!suffixes.isEmpty)
assert(suffixes.get.toString === expectedSuffixes.get.toString)
}
}
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out", "-R"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-p"),
List("-g", "-g", "-f", "file.out"),
Nil,
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array(),
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "JustOne", "-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "JustOne"),
Nil,
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
Nil,
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
Nil,
Nil,
Nil,
None,
Nil,
Nil,
Nil
)
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo", "-m", "com.example.webapp",
"-w", "com.example.root"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
Nil,
None,
Nil,
Nil,
Nil
)
// Try a TestNGSuite
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-s", "SuiteTwo", "-m", "com.example.webapp",
"-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne", "-s", "SuiteTwo"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Try a junit Suite
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-f", "file.out",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-j", "junitTest", "-j", "junitTest2",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-f", "file.out"),
List("-s", "SuiteOne"),
List("-j", "junitTest", "-j", "junitTest2"),
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Test -u option
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Test -q option
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-q", "Spec|Suite",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite)$")),
Nil,
Nil,
Nil
)
// Test -q option
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-q", "Spec", "-q", "Suite",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite)$")),
Nil,
Nil,
Nil
)
// Test -Q option
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-Q", "-q", "foo",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite|foo)$")),
Nil,
Nil,
Nil
)
// Test -F option
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-Q", "-q", "foo",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml", "-F", "200"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite|foo)$")),
Nil,
List("-F", "200"),
Nil
)
// Test -T option
verify(
Array("-P", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-R",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-u", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne", "-Q", "-q", "foo",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml", "-T", "20"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-u", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
Some(Pattern.compile(".*(Spec|Suite|foo)$")),
Nil,
Nil,
List("-T", "20")
)
// Test -h option
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-h", "directory/",
"-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-h", "directory/"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
// Test -h -Y option
verify(
Array("-c", "-g", "-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188", "-p",
"\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\"", "-g", "-h", "directory/",
"-Y", "mystyles.css", "-n", "One Two Three", "-l", "SlowTests", "-s", "SuiteOne",
"-m", "com.example.webapp", "-w", "com.example.root", "-b", "some/path/file.xml"),
List("-p", "\\"serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar\\""),
List("-g", "-g", "-h", "directory/", "-Y", "mystyles.css"),
List("-s", "SuiteOne"),
Nil,
List("-Dincredible=whatshername", "-Ddbname=testdb", "-Dserver=192.168.1.188"),
List("-n", "One Two Three"),
List("-l", "SlowTests"),
List("-c"),
List("-m", "com.example.webapp"),
List("-w", "com.example.root"),
List("-b", "some/path/file.xml"),
None,
Nil,
Nil,
Nil
)
}
def testParseCompoundArgIntoSet() {
expectResult(Set("Cat", "Dog")) {
Runner.parseCompoundArgIntoSet(List("-n", "Cat Dog"), "-n")
}
}
def testParseConfigSet() {
val parseConfigSet = PrivateMethod[Set[ReporterConfigParam]]('parseConfigSet)
intercept[NullPointerException] {
Runner invokePrivate parseConfigSet(null)
}
intercept[IllegalArgumentException] {
Runner invokePrivate parseConfigSet("-fK")
}
intercept[IllegalArgumentException] {
Runner invokePrivate parseConfigSet("-uK")
}
intercept[IllegalArgumentException] {
Runner invokePrivate parseConfigSet("-oYZTFUPBISARG-")
}
intercept[IllegalArgumentException] {
Runner invokePrivate parseConfigSet("-")
}
intercept[IllegalArgumentException] {
Runner invokePrivate parseConfigSet("")
}
expectResult(Set(FilterTestStarting)) {
Runner invokePrivate parseConfigSet("-oN")
}
expectResult(Set(FilterTestSucceeded)) {
Runner invokePrivate parseConfigSet("-oC")
}
expectResult(Set(FilterTestIgnored)) {
Runner invokePrivate parseConfigSet("-oX")
}
expectResult(Set(FilterTestPending)) {
Runner invokePrivate parseConfigSet("-oE")
}
expectResult(Set(FilterSuiteStarting)) {
Runner invokePrivate parseConfigSet("-oH")
}
expectResult(Set(FilterSuiteCompleted)) {
Runner invokePrivate parseConfigSet("-oL")
}
expectResult(Set(FilterInfoProvided)) {
Runner invokePrivate parseConfigSet("-oO")
}
expectResult(Set(PresentWithoutColor)) {
Runner invokePrivate parseConfigSet("-oW")
}
expectResult(Set(PresentAllDurations)) {
Runner invokePrivate parseConfigSet("-oD")
}
expectResult(Set(PresentFullStackTraces)) {
Runner invokePrivate parseConfigSet("-oF")
}
expectResult(Set[ReporterConfigParam]()) {
Runner invokePrivate parseConfigSet("-f")
}
expectResult(Set[ReporterConfigParam]()) {
Runner invokePrivate parseConfigSet("-u")
}
expectResult(Set(FilterInfoProvided, PresentWithoutColor)) {
Runner invokePrivate parseConfigSet("-oOW")
}
expectResult(Set(FilterInfoProvided, PresentWithoutColor)) {
Runner invokePrivate parseConfigSet("-oWO") // Just reverse the order of the params
}
val allOpts = Set(
FilterInfoProvided,
FilterSuiteCompleted,
FilterSuiteStarting,
FilterTestIgnored,
FilterTestPending,
FilterTestStarting,
FilterTestSucceeded,
PresentAllDurations,
PresentWithoutColor,
PresentFullStackTraces
)
expectResult(allOpts) {
Runner invokePrivate parseConfigSet("-oNCXEHLOWDF")
}
}
def testParseReporterArgsIntoSpecs() {
intercept[NullPointerException] {
Runner.parseReporterArgsIntoConfigurations(null)
}
intercept[NullPointerException] {
Runner.parseReporterArgsIntoConfigurations(List("Hello", null, "World"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("Hello", "-", "World"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("Hello", "", "World"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-g", "-l", "-o"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("Hello", " there", " world!"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-g", "-o", "-g", "-e"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-o", "-o", "-g", "-e"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-e", "-o", "-g", "-e"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-f")) // Can't have -f last, because need a file name
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-u")) // Can't have -u last, because need a directory name
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-r")) // Can't have -r last, because need a reporter class
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-k")) // Can't have -k last, because need a host and port
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-k", "localhost")) // Can't have -k host last, because need a port
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-k", "localhost", "abc")) // -k port number must be integer.
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(Nil)
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-C")) // Can't have -C last, because need a reporter class
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-h"))
}
intercept[IllegalArgumentException] {
Runner.parseReporterArgsIntoConfigurations(List("-h", "html", "-Y"))
}
expectResult(new ReporterConfigurations(Some(new GraphicReporterConfiguration(Set())), Nil, Nil, Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-g"))
}
expectResult(new ReporterConfigurations(Some(new GraphicReporterConfiguration(Set(FilterSuiteCompleted))), Nil, Nil, Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-gL"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, Some(new StandardOutReporterConfiguration(Set())), None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-o"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, Some(new StandardOutReporterConfiguration(Set(FilterTestSucceeded,FilterTestIgnored))), None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-oCX"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, Some(new StandardErrReporterConfiguration(Set())), Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-e"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, Some(new StandardErrReporterConfiguration(Set(PresentFullStackTraces))), Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-eF"))
}
expectResult(new ReporterConfigurations(None, List(new FileReporterConfiguration(Set(), "theFilename")), Nil, Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-f", "theFilename"))
}
expectResult(new ReporterConfigurations(None, Nil, List(new JunitXmlReporterConfiguration(Set(), "target")), Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-u", "target"))
}
expectResult(new ReporterConfigurations(None, Nil, List(new JunitXmlReporterConfiguration(Set(), "target")), Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-uN", "target"))
}
expectResult(new ReporterConfigurations(None, List(new FileReporterConfiguration(Set(FilterTestStarting), "theFilename")), Nil, Nil, Nil, None, None, Nil, Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-fN", "theFilename"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, Nil, List(new CustomReporterConfiguration(Set(), "the.reporter.Class")), Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-r", "the.reporter.Class"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, Nil, List(new CustomReporterConfiguration(Set(FilterTestPending), "the.reporter.Class")), Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-rE", "the.reporter.Class"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, Nil, Nil, List(new SocketReporterConfiguration("localhost", 8888)))) {
Runner.parseReporterArgsIntoConfigurations(List("-k", "localhost", "8888"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, Nil, Nil, List(new SocketReporterConfiguration("localhost", 8888), new SocketReporterConfiguration("another host", 1234)))) {
Runner.parseReporterArgsIntoConfigurations(List("-k", "localhost", "8888", "-k", "another host", "1234"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, List(new HtmlReporterConfiguration(Set(), "html", None)), Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-h", "html"))
}
expectResult(new ReporterConfigurations(None, Nil, Nil, Nil, Nil, None, None, List(new HtmlReporterConfiguration(Set(), "html", Some(new File("MyStyle.css").toURI.toURL))), Nil, Nil)) {
Runner.parseReporterArgsIntoConfigurations(List("-h", "html", "-Y", "MyStyle.css"))
}
}
def testParseSuiteArgsIntoClassNameStrings() {
intercept[NullPointerException] {
Runner.parseSuiteArgsIntoNameStrings(null, "-j")
}
intercept[NullPointerException] {
Runner.parseSuiteArgsIntoNameStrings(List("-j", null, "-j"), "-j")
}
intercept[IllegalArgumentException] {
Runner.parseSuiteArgsIntoNameStrings(List("-j", "SweetSuite", "-j"), "-j")
}
intercept[IllegalArgumentException] {
Runner.parseSuiteArgsIntoNameStrings(List("-j", "SweetSuite", "-j", "-j"), "-j")
}
expectResult(List("SweetSuite", "OKSuite")) {
Runner.parseSuiteArgsIntoNameStrings(List("-j", "SweetSuite", "-j", "OKSuite"), "-j")
}
expectResult(List("SweetSuite", "OKSuite", "SomeSuite")) {
Runner.parseSuiteArgsIntoNameStrings(List("-j", "SweetSuite", "-j", "OKSuite", "-j", "SomeSuite"), "-j")
}
}
def testParseRunpathArgIntoList() {
intercept[NullPointerException] {
Runner.parseRunpathArgIntoList(null)
}
intercept[NullPointerException] {
Runner.parseRunpathArgIntoList(List("-p", null))
}
intercept[NullPointerException] {
Runner.parseRunpathArgIntoList(List(null, "serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar"))
}
intercept[IllegalArgumentException] {
Runner.parseRunpathArgIntoList(List("-p"))
}
intercept[IllegalArgumentException] {
Runner.parseRunpathArgIntoList(List("-p", "bla", "bla"))
}
intercept[IllegalArgumentException] {
Runner.parseRunpathArgIntoList(List("-pX", "bla"))
}
intercept[IllegalArgumentException] {
Runner.parseRunpathArgIntoList(List("-p", " "))
}
intercept[IllegalArgumentException] {
Runner.parseRunpathArgIntoList(List("-p", "\\t"))
}
expectResult(List("bla")) {
Runner.parseRunpathArgIntoList(List("-p", "bla"))
}
expectResult(List("bla", "bla", "bla")) {
Runner.parseRunpathArgIntoList(List("-p", "bla bla bla"))
}
expectResult(List("serviceuitest-1.1beta4.jar", "myjini", "http://myhost:9998/myfile.jar")) {
Runner.parseRunpathArgIntoList(List("-p", "serviceuitest-1.1beta4.jar myjini http://myhost:9998/myfile.jar"))
}
expectResult(List("\\\\", "c:\\\\", "c:\\\\Program Files", "c:\\\\Documents and Settings", "\\\\", "myjini")) {
Runner.parseRunpathArgIntoList(List("-p", """\\ c:\\ c:\\Program\\ Files c:\\Documents\\ and\\ Settings \\ myjini"""))
}
}
def testParsePropertiesArgsIntoMap() {
intercept[NullPointerException] {
Runner.parsePropertiesArgsIntoMap(null)
}
intercept[NullPointerException] {
Runner.parsePropertiesArgsIntoMap(List("-Da=b", null))
}
intercept[IllegalArgumentException] {
Runner.parsePropertiesArgsIntoMap(List("-Dab")) // = sign missing
}
intercept[IllegalArgumentException] {
Runner.parsePropertiesArgsIntoMap(List("ab")) // needs to start with -D
}
intercept[IllegalArgumentException] {
Runner.parsePropertiesArgsIntoMap(List("-D=ab")) // no key
}
intercept[IllegalArgumentException] {
Runner.parsePropertiesArgsIntoMap(List("-Dab=")) // no value
}
expectResult(Map("a" -> "b", "cat" -> "dog", "Glorp" -> "Glib")) {
Runner.parsePropertiesArgsIntoMap(List("-Da=b", "-Dcat=dog", "-DGlorp=Glib"))
}
}
def testDeprecatedCheckArgsForValidity() {
intercept[NullPointerException] {
Runner.checkArgsForValidity(null)
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-p", "serviceuitest-1.1beta4.jar", "-g", "-eFBA", "-s", "MySuite"))
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-z", "testWildcard", "-g", "-eFBA", "-s", "MySuite"))
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-k", "hostname", "-g", "-eFBA", "-s", "MySuite"))
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-p", "serviceuitest-1.1beta4.jar", "-g", "-eFBA", "-s", "MySuite", "-c"))
}
}
def testParseSuiteArgsIntoSuiteParam() {
intercept[NullPointerException] {
Runner.parseSuiteArgsIntoSuiteParam(null, "-s")
}
intercept[NullPointerException] {
Runner.parseSuiteArgsIntoSuiteParam(List("-s", null, "-s", "suite2"), "-s")
}
intercept[IllegalArgumentException] {
Runner.parseSuiteArgsIntoSuiteParam(List("-s", "-s"), "-s")
}
intercept[IllegalArgumentException] {
Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-s"), "-s")
}
intercept[IllegalArgumentException] {
Runner.parseSuiteArgsIntoSuiteParam(List("-sG", "suite1"), "-s")
}
intercept[IllegalArgumentException] {
Runner.parseSuiteArgsIntoSuiteParam(List("-t", "test1"), "-s")
}
intercept[IllegalArgumentException] {
// -i without -s should not be supported, as for example current command is having -s -i, there's no way to tell the next -i should be a -i without -s.
// -i should only be used together with -s to select nested suite.
Runner.parseSuiteArgsIntoSuiteParam(List("-i", "suite1"), "-s")
}
intercept[IllegalArgumentException] {
// -sX -t should not be supported, as -s -t should be used to select a specific test.
Runner.parseSuiteArgsIntoSuiteParam(List("-sX", "suite1", "-t", "test1"), "-s")
}
intercept[IllegalArgumentException] {
// -iX should not be supported, as a nested suite's nested suites should not be included, if it is included, we have to figure out the way to specify if
// nested suite's nested suite's nested suites (and endless down the tree) should be implemented.
Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-iX", "nested1"), "-s")
}
val case1 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-s", "suite2"), "-s")
assert(case1.length === 2)
assert(case1(0).className === "suite1")
assert(case1(0).testNames.length === 0)
assert(case1(1).className === "suite2")
assert(case1(1).testNames.length === 0)
val case2 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-t", "test1", "-t", "test2", "-s", "suite2"), "-s")
assert(case2.length === 2)
assert(case2(0).className === "suite1")
assert(case2(0).testNames.length === 2)
assert(case2(0).testNames(0) === "test1")
assert(case2(0).testNames(1) === "test2")
assert(case2(1).className === "suite2")
assert(case2(1).testNames.length === 0)
val case3 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-i", "nested1"), "-s")
assert(case3.length === 1)
assert(case3(0).className === "suite1")
assert(case3(0).testNames.length === 0)
assert(case3(0).nestedSuites.length === 1)
assert(case3(0).nestedSuites(0).suiteId === "nested1")
assert(case3(0).nestedSuites(0).testNames.length === 0)
val case4 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-i", "nested1", "-t", "test1", "-t", "test2"), "-s")
assert(case4.length === 1)
assert(case4(0).className === "suite1")
assert(case4(0).testNames.length === 0)
assert(case4(0).nestedSuites.length === 1)
assert(case4(0).nestedSuites(0).suiteId === "nested1")
assert(case4(0).nestedSuites(0).testNames.length === 2)
assert(case4(0).nestedSuites(0).testNames(0) === "test1")
assert(case4(0).nestedSuites(0).testNames(1) === "test2")
val case5 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-z", "test1", "-z", "test2", "-s", "suite2"), "-s")
assert(case5.length === 2)
assert(case5(0).className === "suite1")
assert(case5(0).testNames.length === 0)
assert(case5(0).wildcardTestNames.length === 2)
assert(case5(0).wildcardTestNames(0) === "test1")
assert(case5(0).wildcardTestNames(1) === "test2")
assert(case5(1).className === "suite2")
assert(case5(1).wildcardTestNames.length === 0)
assert(case5(1).testNames.length === 0)
val case6 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-t", "test1", "-z", "test2", "-s", "suite2"), "-s")
assert(case6.length === 2)
assert(case6(0).className === "suite1")
assert(case6(0).testNames.length === 1)
assert(case6(0).testNames(0) === "test1")
assert(case6(0).wildcardTestNames.length === 1)
assert(case6(0).wildcardTestNames(0) === "test2")
assert(case6(1).className === "suite2")
assert(case6(1).wildcardTestNames.length === 0)
assert(case6(1).testNames.length === 0)
val case7 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-i", "nested1", "-z", "test1", "-z", "test2"), "-s")
assert(case7.length === 1)
assert(case7(0).className === "suite1")
assert(case7(0).testNames.length === 0)
assert(case7(0).nestedSuites.length === 1)
assert(case7(0).nestedSuites(0).suiteId === "nested1")
assert(case7(0).nestedSuites(0).testNames.length === 0)
assert(case7(0).nestedSuites(0).wildcardTestNames.length === 2)
assert(case7(0).nestedSuites(0).wildcardTestNames(0) === "test1")
assert(case7(0).nestedSuites(0).wildcardTestNames(1) === "test2")
val case8 = Runner.parseSuiteArgsIntoSuiteParam(List("-s", "suite1", "-i", "nested1", "-t", "test1", "-z", "test2"), "-s")
assert(case8.length === 1)
assert(case8(0).className === "suite1")
assert(case8(0).testNames.length === 0)
assert(case8(0).nestedSuites.length === 1)
assert(case8(0).nestedSuites(0).suiteId === "nested1")
assert(case8(0).nestedSuites(0).testNames.length === 1)
assert(case8(0).nestedSuites(0).testNames(0) === "test1")
assert(case8(0).nestedSuites(0).wildcardTestNames.length === 1)
assert(case8(0).nestedSuites(0).wildcardTestNames(0) === "test2")
}
def testCheckArgsForValidity() {
intercept[NullPointerException] {
Runner.checkArgsForValidity(null)
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-R", "serviceuitest-1.1beta4.jar", "-g", "-eFBA", "-s", "MySuite"))
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-z", "test name wildcard", "-g", "-eFBA", "-s", "MySuite"))
}
expectResult(None) {
Runner.checkArgsForValidity(Array("-Ddbname=testdb", "-Dserver=192.168.1.188", "-R", "serviceuitest-1.1beta4.jar", "-g", "-eFBA", "-s", "MySuite", "-P"))
}
}
def testParseChosenStylesIntoChosenStyleSet() {
intercept[IllegalArgumentException] {
Runner.parseChosenStylesIntoChosenStyleSet(List("-a", "aStyle"), "-y")
}
intercept[IllegalArgumentException] {
Runner.parseChosenStylesIntoChosenStyleSet(List("-y"), "-y")
}
intercept[IllegalArgumentException] {
Runner.parseChosenStylesIntoChosenStyleSet(List("-y", "aStyle", "-y"), "-y")
}
val singleStyle = Runner.parseChosenStylesIntoChosenStyleSet(List("-y", "aStyle"), "-y")
assert(singleStyle.size === 1)
assert(singleStyle.contains("aStyle"))
val multiStyle = Runner.parseChosenStylesIntoChosenStyleSet(List("-y", "aStyle", "-y", "bStyle", "-y", "cStyle"), "-y")
assert(multiStyle.size === 3)
assert(multiStyle.contains("aStyle"))
assert(multiStyle.contains("bStyle"))
assert(multiStyle.contains("cStyle"))
}
def testParseDoubleArgument() {
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-a", "123"), "-F", 1.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-F", "abc"), "-F", 1.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-F"), "-F", 1.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-F", "123", "-F"), "-F", 1.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-F", "123", "-F", "456"), "-F", 1.0)
}
val spanScaleFactor = Runner.parseDoubleArgument(List("-F", "888"), "-F", 1.0)
assert(spanScaleFactor === 888)
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-a", "123"), "-T", 15.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-T", "abc"), "-T", 15.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-T"), "-T", 15.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-T", "123", "-T"), "-T", 15.0)
}
intercept[IllegalArgumentException] {
Runner.parseDoubleArgument(List("-T", "123", "-T", "456"), "-T", 15.0)
}
val testSortingReporterTimeout = Runner.parseDoubleArgument(List("-T", "888"), "-T", 15.0)
assert(spanScaleFactor === 888)
}
def testParseConcurrentConfig() {
val emptyConcurrentConfig = Runner.parseConcurrentConfig(List.empty)
assert(emptyConcurrentConfig.numThreads === 0)
assert(emptyConcurrentConfig.enableSuiteSortingReporter === false)
val singleDashP = Runner.parseConcurrentConfig(List("-c"))
assert(singleDashP.numThreads === 0)
assert(singleDashP.enableSuiteSortingReporter === false)
val multiDashP = Runner.parseConcurrentConfig(List("-c", "-c"))
assert(multiDashP.numThreads === 0)
assert(multiDashP.enableSuiteSortingReporter === false)
val singleDashPThreadNum = Runner.parseConcurrentConfig(List("-c10"))
assert(singleDashPThreadNum.numThreads === 10)
assert(singleDashPThreadNum.enableSuiteSortingReporter === false)
val multiDashPThreadNum = Runner.parseConcurrentConfig(List("-c10", "-c5"))
assert(multiDashPThreadNum.numThreads === 10)
assert(multiDashPThreadNum.enableSuiteSortingReporter === false)
val singleDashPS = Runner.parseConcurrentConfig(List("-cS"))
assert(singleDashPS.numThreads === 0)
assert(singleDashPS.enableSuiteSortingReporter === true)
val multiDashPS = Runner.parseConcurrentConfig(List("-c", "-cS"))
assert(multiDashPS.numThreads === 0)
assert(multiDashPS.enableSuiteSortingReporter === true)
val singleDashPSThreadNum = Runner.parseConcurrentConfig(List("-cS8"))
assert(singleDashPSThreadNum.numThreads === 8)
assert(singleDashPSThreadNum.enableSuiteSortingReporter === true)
val multipDashPSThreadNum = Runner.parseConcurrentConfig(List("-cS8", "-c10"))
assert(multipDashPSThreadNum.numThreads === 8)
assert(multipDashPSThreadNum.enableSuiteSortingReporter === true)
}
/*
def testRunpathPropertyAddedToPropertiesMap() {
val a = new Suite {
var theProperties: Map[String, Any] = Map()
override def execute(testName: Option[String], reporter: Reporter, stopper: Stopper, includes: Set[String], excludes: Set[String],
properties: Map[String, Any], distributor: Option[Distributor]) {
theProperties = properties
}
}
val dispatchReporter = new DispatchReporter(Nil, System.out)
val suitesList = List("org.scalatest.usefulstuff.RunpathPropCheckerSuite")
// Runner.doRunRunRunADoRunRun(new DispatchReporter)
// Runner.doRunRunRunADoRunRun(dispatchReporter, suitesList, new Stopper {}, Filter(), Map(), false,
List(), List(), runpath: "build_tests", loader: ClassLoader,
doneListener: RunDoneListener) = {
()
}
}
package org.scalatest.usefulstuff {
class RunpathPropCheckerSuite extends Suite {
var theProperties: Map[String, Any] = Map()
override def execute(testName: Option[String], reporter: Reporter, stopper: Stopper, includes: Set[String], excludes: Set[String],
properties: Map[String, Any], distributor: Option[Distributor]) {
theProperties = properties
}
}
*/
}
|
hubertp/scalatest
|
src/test/scala/org/scalatest/tools/RunnerSuite.scala
|
Scala
|
apache-2.0
| 55,265
|
package lila.streamer
import lila.db.dsl._
import reactivemongo.api.bson._
private object BsonHandlers {
implicit val StreamerIdBSONHandler = stringAnyValHandler[Streamer.Id](_.value, Streamer.Id.apply)
implicit val StreamerListedBSONHandler =
booleanAnyValHandler[Streamer.Listed](_.value, Streamer.Listed.apply)
implicit val StreamerNameBSONHandler = stringAnyValHandler[Streamer.Name](_.value, Streamer.Name.apply)
implicit val StreamerHeadlineBSONHandler =
stringAnyValHandler[Streamer.Headline](_.value, Streamer.Headline.apply)
implicit val StreamerDescriptionBSONHandler =
stringAnyValHandler[Streamer.Description](_.value, Streamer.Description.apply)
import Streamer.{ Approval, Twitch, YouTube }
implicit val StreamerTwitchBSONHandler = Macros.handler[Twitch]
implicit val StreamerYouTubeBSONHandler = Macros.handler[YouTube]
implicit val StreamerApprovalBSONHandler = Macros.handler[Approval]
implicit val StreamerBSONHandler = Macros.handler[Streamer]
}
|
luanlv/lila
|
modules/streamer/src/main/BsonHandlers.scala
|
Scala
|
mit
| 1,012
|
/*
* Copyright 2017 Zhang Di
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dizhang.seqspark.stat
import breeze.linalg.{*, CSCMatrix, DenseMatrix, DenseVector, SparseVector}
import org.dizhang.seqspark.stat.HypoTest.NullModel.{Fitted => SNM}
import org.dizhang.seqspark.util.General._
/**
* score test for regression model
*
* Here we only compute the score vectors and its cov matrix
* leave the p-value to specific association method, e.g.
* Burden, VT, MetaAnalysis summary
*
* for efficiency, avoid CSCMatrix * DenseMatrix,
* instead, use DenseMatrix * CSCMatrix
*
* The Sparse class uses SparseMatrix to coding rare variants,
* so this test is very fast for large sample rare variant association
*
* Only implement linear and logistic models here
* They are unified into one model.
*/
object ScoreTest {
def apply(nm: SNM, x: CSCMatrix[Double]): ScoreTest = {
Sparse(nm, x)
}
def apply(nm: SNM, x: DenseMatrix[Double]): ScoreTest = {
Dense(nm, x)
}
def apply(nm: SNM, x: DenseVector[Double]): ScoreTest = {
Dense(nm, DenseVector.horzcat(x))
}
def apply(nm: SNM, x: SparseVector[Double]): ScoreTest = {
Sparse(nm, SparseVector.horzcat(x))
}
def apply(nm: SNM,
x1: DenseMatrix[Double],
x2: CSCMatrix[Double]): ScoreTest = {
Mixed(nm, x1, x2)
}
case class Sparse(nm: SNM,
x: CSCMatrix[Double]) extends ScoreTest {
val score = (nm.residuals.toDenseMatrix * x).toDenseVector / nm.a
lazy val variance = {
val c = nm.xs
val IccInv = nm.invInfo * nm.a
val Igg = (colMultiply(x, nm.b).t * x).toDense
val Icg = (c(::, *) *:* nm.b).t * x
val Igc = Icg.t
(Igg - Igc * IccInv * Icg) / nm.a
}
}
case class Dense(nm: SNM,
x: DenseMatrix[Double]) extends ScoreTest {
val score = x.t * nm.residuals / nm.a
lazy val variance = {
val c = nm.xs
val IccInv = nm.invInfo * nm.a
val Igg = (x(::, *) *:* nm.b).t * x
val Icg = (c(::, *) *:* nm.b).t * x
val Igc = Icg.t
(Igg - Igc * IccInv * Icg)/nm.a
}
}
case class Mixed(nm: SNM,
x1: DenseMatrix[Double],
x2: CSCMatrix[Double]) extends ScoreTest {
private val dense = Dense(nm, x1)
private val sparse = Sparse(nm, x2)
val score = DenseVector.vertcat(dense.score, sparse.score)
lazy val variance = {
val v1 = dense.variance
val v4 = sparse.variance
val v2 = {
val c = nm.xs
val IccInv = nm.invInfo * nm.a
val Igg = (x1(::, *) *:* nm.b).t * x2
val Icg = (c(::, *) *:* nm.b).t * x2
val Igc = x1.t * (c(::, *) *:* nm.b).t
(Igg - Igc * IccInv * Icg) / nm.a
}
val v3 = v2.t
val v12 = DenseMatrix.horzcat(v1, v2)
val v34 = DenseMatrix.horzcat(v3, v4)
DenseMatrix.vertcat(v12, v34)
}
}
case class Mock(score: DenseVector[Double],
variance: DenseMatrix[Double]) extends ScoreTest
}
@SerialVersionUID(7778780001L)
sealed trait ScoreTest extends HypoTest {
def score: DenseVector[Double]
def variance: DenseMatrix[Double]
}
|
statgenetics/seqspark
|
src/main/scala/org/dizhang/seqspark/stat/ScoreTest.scala
|
Scala
|
apache-2.0
| 3,704
|
import sbt._
import sbt.Keys._
object BuildSettings {
val commonSettings = Defaults.coreDefaultSettings ++ Seq (
organization := "com.softwaremill.macwire",
version := "2.2.1",
scalaVersion := "2.11.7",
// Sonatype OSS deployment
publishTo <<= version { (v: String) =>
val nexus = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
},
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { _ => false },
pomExtra :=
<scm>
<url>git@github.com:adamw/macwire.git</url>
<connection>scm:git:git@github.com:adamw/macwire.git</connection>
</scm>
<developers>
<developer>
<id>adamw</id>
<name>Adam Warski</name>
<url>http://www.warski.org</url>
</developer>
</developers>,
licenses := ("Apache2", new java.net.URL("http://www.apache.org/licenses/LICENSE-2.0.txt")) :: Nil,
homepage := Some(new java.net.URL("http://www.softwaremill.com"))
)
val testSettings = commonSettings ++ Seq(
publishArtifact := false,
scalacOptions ++= Seq("-Ywarn-dead-code"),
// Otherwise when running tests in sbt, the macro is not visible
// (both macro and usages are compiled in the same compiler run)
fork in Test := true
)
}
object Dependencies {
val tagging = "com.softwaremill.common" %% "tagging" % "1.0.0"
val scalatest = "org.scalatest" %% "scalatest" % "2.2.5"
val javassist = "org.javassist" % "javassist" % "3.20.0-GA"
}
object MacwireBuild extends Build {
import BuildSettings._
import Dependencies._
lazy val root = project.in(file(".")).
settings(commonSettings).
settings(
publishArtifact := false).
aggregate(
util, macros, proxy, tests, tests2, testUtil, utilTests, examplesScalatra)
lazy val util = project.in(file("util")).
settings(
libraryDependencies += tagging).
settings(commonSettings)
lazy val macros = project.in(file("macros")).
settings(commonSettings).
settings(
libraryDependencies += "org.scala-lang" % "scala-reflect" % scalaVersion.value).
dependsOn(util % "provided")
lazy val proxy = project.in(file("proxy")).
settings(commonSettings).
settings(
libraryDependencies ++= Seq(javassist, scalatest)).
dependsOn(macros % "test")
lazy val testUtil = project.in(file("test-util")).
settings(testSettings).
settings(
libraryDependencies ++= Seq(
scalatest,
"org.scala-lang" % "scala-compiler" % scalaVersion.value))
lazy val tests = project.in(file("tests")).
settings(testSettings).
dependsOn(
macros % "provided",
testUtil % "test",
proxy)
lazy val utilTests = project.in(file("util-tests")).
settings(testSettings).
dependsOn(
macros % "provided",
util % "test",
testUtil % "test")
// The tests here are that the tests compile.
lazy val tests2 = project.in(file("tests2")).
settings(testSettings).
settings(
libraryDependencies += scalatest).
dependsOn(
util, macros % "provided", proxy)
lazy val examplesScalatra: Project = {
val ScalatraVersion = "2.3.1"
val scalatraCore = "org.scalatra" %% "scalatra" % ScalatraVersion
val scalatraScalate = "org.scalatra" %% "scalatra-scalate" % ScalatraVersion
val logback = "ch.qos.logback" % "logback-classic" % "1.1.3"
val jetty = "org.eclipse.jetty" % "jetty-webapp" % "9.3.3.v20150827" % "compile"
val servletApi = "org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "compile" artifacts (Artifact("javax.servlet", "jar", "jar"))
Project(
"examples-scalatra",
file("examples/scalatra"),
settings = commonSettings ++ Seq(
publishArtifact := false,
classpathTypes ~= (_ + "orbit"),
libraryDependencies ++= Seq(scalatraCore, scalatraScalate, jetty, servletApi, logback)
)
) dependsOn(util, macros % "provided", proxy)
}
// Enabling debug project-wide. Can't find a better way to pass options to scalac.
System.setProperty("macwire.debug", "")
}
|
numesmat/macwire
|
project/Build.scala
|
Scala
|
apache-2.0
| 4,402
|
/**
* Play HTML Compressor
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.md.
* It is also available through the world-wide-web at this URL:
* https://github.com/mohiva/play-html-compressor/blob/master/LICENSE.md
*/
package com.mohiva.play.htmlcompressor
import play.twirl.api.Html
import play.api.mvc._
import play.api.Play
import play.api.Play.current
import play.api.http.{ MimeTypes, HeaderNames }
import play.api.libs.iteratee.Enumerator
import com.googlecode.htmlcompressor.compressor.HtmlCompressor
import com.mohiva.play.compressor.CompressorFilter
/**
* Uses Google's HTML Processor to compress the HTML code of a response.
*
* @param f Function which returns the configured HTML compressor.
*/
class HTMLCompressorFilter(f: => HtmlCompressor) extends CompressorFilter[HtmlCompressor](f) {
/**
* Check if the given result is a HTML result.
*
* @param result The result to check.
* @return True if the result is a HTML result, false otherwise.
*/
override protected def isCompressible(result: Result): Boolean = {
lazy val contentTypeHtml = result.header.headers.get(HeaderNames.CONTENT_TYPE).exists(_.contains(MimeTypes.HTML))
lazy val htmlEnumerator = manifest[Enumerator[Html]].runtimeClass.isInstance(result.body)
super.isCompressible(result) && contentTypeHtml && htmlEnumerator
}
}
/**
* Default implementation of the HTML compressor filter.
*/
object HTMLCompressorFilter {
/**
* Gets the default Google HTML compressor instance.
*/
lazy val default = {
val compressor = new HtmlCompressor()
if (Play.isDev) {
compressor.setPreserveLineBreaks(true)
}
compressor.setRemoveComments(true)
compressor.setRemoveIntertagSpaces(false)
compressor.setRemoveHttpProtocol(true)
compressor.setRemoveHttpsProtocol(true)
compressor
}
/**
* Creates the HTML compressor filter.
*
* @return The HTML compressor filter.
*/
def apply(): HTMLCompressorFilter = new HTMLCompressorFilter(default)
}
|
CapeSepias/play-html-compressor
|
app/com/mohiva/play/htmlcompressor/HTMLCompressorFilter.scala
|
Scala
|
bsd-3-clause
| 2,089
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras
import com.intel.analytics.bigdl.nn.{Cropping2D, _}
import com.intel.analytics.bigdl.nn.abstractnn.DataFormat
import com.intel.analytics.bigdl.utils.{Shape, TestUtils}
class Cropping3DSpec extends KerasBaseSpec {
"Cropping3D" should "with CHANNEL_FIRST work properly" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3, 4, 5, 6])
|input = np.random.uniform(-1, 1, [2, 3, 4, 5, 6])
|output_tensor = Cropping3D(
| cropping=((1, 1), (1, 1), (1, 1)), dim_ordering='th')(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val model = Cropping3D[Float](Array(1, 1), Array(1, 1), Array(1, 1), Cropping3D.CHANNEL_FIRST)
checkOutputAndGrad(model, kerasCode)
}
"Cropping3D" should "with CHANNEL_LAST work properly" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3, 4, 5, 6])
|input = np.random.uniform(-1, 1, [2, 3, 4, 5, 6])
|output_tensor = Cropping3D(
| cropping=((1, 1), (1, 1), (1, 1)), dim_ordering='tf')(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val model = Cropping3D[Float](Array(1, 1), Array(1, 1), Array(1, 1), Cropping3D.CHANNEL_LAST)
checkOutputAndGrad(model, kerasCode)
}
"Cropping3D computeOutputShape CHANNEL_FIRST" should "work properly" in {
val layer = Cropping3D[Float](Array(2, 3), Array(2, 4), Array(1, 2))
TestUtils.compareOutputShape(layer, Shape(3, 24, 28, 32)) should be (true)
}
"Cropping3D computeOutputShape CHANNEL_LAST" should "work properly" in {
val layer = Cropping3D[Float](Array(1, 3), Array(2, 1), Array(4, 2), Cropping3D.CHANNEL_LAST)
TestUtils.compareOutputShape(layer, Shape(32, 32, 32, 4)) should be (true)
}
}
|
yiheng/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/keras/Cropping3DSpec.scala
|
Scala
|
apache-2.0
| 2,450
|
package mesosphere.marathon
package integration.setup
import com.typesafe.scalalogging.Logger
import java.io.File
import java.net.{ URLDecoder, URLEncoder }
import java.nio.file.Files
import java.util.UUID
import java.util.concurrent.ConcurrentLinkedQueue
import akka.Done
import akka.actor.{ ActorSystem, Cancellable, Scheduler }
import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding.Get
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, StatusCodes }
import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller
import akka.stream.Materializer
import akka.stream.scaladsl.Sink
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.typesafe.scalalogging.StrictLogging
import mesosphere.AkkaUnitTestLike
import mesosphere.marathon.api.RestResource
import mesosphere.marathon.integration.facades._
import mesosphere.marathon.raml.{ App, AppHostVolume, AppHealthCheck, Network, NetworkMode, PodState, PodStatus, ReadMode }
import mesosphere.marathon.state.PathId
import mesosphere.marathon.util.{ Lock, Retry, Timeout }
import mesosphere.util.PortAllocator
import org.apache.commons.io.FileUtils
import org.scalatest.concurrent.{ Eventually, ScalaFutures }
import org.scalatest.exceptions.TestFailedDueToTimeoutException
import org.scalatest.time.{ Milliseconds, Minutes, Seconds, Span }
import org.scalatest.{ BeforeAndAfterAll, Suite }
import play.api.libs.json.{ JsObject, Json }
import scala.annotation.tailrec
import scala.async.Async.{ async, await }
import scala.collection.mutable
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import scala.sys.process.Process
import scala.util.Try
import scala.util.control.NonFatal
/**
* Runs a marathon server for the given test suite
* @param autoStart true if marathon should be started immediately
* @param suiteName The test suite that owns this marathon
* @param masterUrl The mesos master url
* @param zkUrl The ZK url
* @param conf any particular configuration
* @param mainClass The main class
*/
case class LocalMarathon(
autoStart: Boolean,
suiteName: String,
masterUrl: String,
zkUrl: String,
conf: Map[String, String] = Map.empty,
mainClass: String = "mesosphere.marathon.Main")(implicit
system: ActorSystem,
mat: Materializer,
ctx: ExecutionContext,
scheduler: Scheduler) extends AutoCloseable with StrictLogging {
system.registerOnTermination(close())
lazy val uuid = UUID.randomUUID.toString
lazy val httpPort = PortAllocator.ephemeralPort()
lazy val url = conf.get("https_port").fold(s"http://localhost:$httpPort")(httpsPort => s"https://localhost:$httpsPort")
lazy val client = new MarathonFacade(url, PathId.empty)
private val workDir = {
val f = Files.createTempDirectory(s"marathon-$httpPort").toFile
f.deleteOnExit()
f
}
private def write(dir: File, fileName: String, content: String): String = {
val file = File.createTempFile(fileName, "", dir)
file.deleteOnExit()
FileUtils.write(file, content)
file.setReadable(true)
file.getAbsolutePath
}
private val secretPath = write(workDir, fileName = "marathon-secret", content = "secret1")
val config = Map(
"master" -> masterUrl,
"mesos_authentication_principal" -> "principal",
"mesos_role" -> "foo",
"http_port" -> httpPort.toString,
"zk" -> zkUrl,
"zk_timeout" -> 20.seconds.toMillis.toString,
"zk_connection_timeout" -> 20.seconds.toMillis.toString,
"zk_session_timeout" -> 20.seconds.toMillis.toString,
"mesos_authentication_secret_file" -> s"$secretPath",
"access_control_allow_origin" -> "*",
"reconciliation_initial_delay" -> 5.minutes.toMillis.toString,
"min_revive_offers_interval" -> "100",
"hostname" -> "localhost",
"logging_level" -> "debug",
"offer_matching_timeout" -> 10.seconds.toMillis.toString // see https://github.com/mesosphere/marathon/issues/4920
) ++ conf
val args = config.flatMap {
case (k, v) =>
if (v.nonEmpty) {
Seq(s"--$k", v)
} else {
Seq(s"--$k")
}
}(collection.breakOut)
@volatile private var marathon = Option.empty[Process]
if (autoStart) {
start()
}
// it'd be great to be able to execute in memory, but we can't due to GuiceFilter using a static :(
private lazy val processBuilder = {
val java = sys.props.get("java.home").fold("java")(_ + "/bin/java")
val cp = sys.props.getOrElse("java.class.path", "target/classes")
val cmd = Seq(java, "-Xmx1024m", "-Xms256m", "-XX:+UseConcMarkSweepGC", "-XX:ConcGCThreads=2",
// lower the memory pressure by limiting threads.
"-Dakka.actor.default-dispatcher.fork-join-executor.parallelism-min=2",
"-Dakka.actor.default-dispatcher.fork-join-executor.factor=1",
"-Dakka.actor.default-dispatcher.fork-join-executor.parallelism-max=4",
"-Dscala.concurrent.context.minThreads=2",
"-Dscala.concurrent.context.maxThreads=32",
s"-DmarathonUUID=$uuid -DtestSuite=$suiteName", "-classpath", cp, "-client", mainClass) ++ args
Process(cmd, workDir, sys.env.toSeq: _*)
}
def create(): Process = {
marathon.getOrElse {
val process = processBuilder.run(ProcessOutputToLogStream(s"$suiteName-LocalMarathon-$httpPort"))
marathon = Some(process)
process
}
}
def start(): Future[Done] = {
create()
val port = conf.get("http_port").orElse(conf.get("https_port")).map(_.toInt).getOrElse(httpPort)
val future = Retry(s"Waiting for Marathon on $port", maxAttempts = Int.MaxValue, minDelay = 1.milli, maxDelay = 5.seconds, maxDuration = 4.minutes) {
async {
val result = await(Http().singleRequest(Get(s"http://localhost:$port/v2/leader")))
result.discardEntityBytes() // forget about the body
if (result.status.isSuccess()) { // linter:ignore //async/await
Done
} else {
throw new Exception(s"Marathon on port=$port hasn't started yet. Giving up waiting..")
}
}
}
future
}
private def activePids: Seq[String] = {
val PIDRE = """^\\s*(\\d+)\\s+(\\S*)\\s*(.*)$""".r
Process("jps -lv").!!.split("\\n").collect {
case PIDRE(pid, main, jvmArgs) if main.contains(mainClass) && jvmArgs.contains(uuid) => pid
}(collection.breakOut)
}
def isRunning(): Boolean =
activePids.nonEmpty
def exitValue(): Option[Int] = marathon.map(_.exitValue())
def stop(): Future[Done] = {
marathon.fold(Future.successful(Done)){ p =>
p.destroy()
Timeout.blocking(30.seconds){ p.exitValue(); Done }
.recover {
case NonFatal(e) =>
logger.warn(s"Could not shutdown Marathon $suiteName in time", e)
val pids = activePids
if (pids.nonEmpty) {
Process(s"kill -9 ${pids.mkString(" ")}").!
}
Done
}
}.andThen {
case _ =>
marathon = Option.empty[Process]
}
}
def restart(): Future[Done] = {
logger.info(s"Restarting Marathon on $httpPort")
async {
await(stop())
val x = await(start())
logger.info(s"Restarted Marathon on $httpPort")
x
}
}
override def close(): Unit = {
stop()
Try(FileUtils.deleteDirectory(workDir))
}
}
trait HealthCheckEndpoint extends StrictLogging with ScalaFutures {
protected val healthChecks = Lock(mutable.ListBuffer.empty[IntegrationHealthCheck])
val registeredReadinessChecks = Lock(mutable.ListBuffer.empty[IntegrationReadinessCheck])
implicit val system: ActorSystem
implicit val mat: Materializer
/**
* Note! This is declared as lazy in order to prevent eager evaluation of values on which it depends
* We initialize it during the before hook and wait for Marathon to respond.
*/
protected[setup] lazy val healthEndpoint = {
val route = {
import akka.http.scaladsl.server.Directives._
val mapper = new ObjectMapper() with ScalaObjectMapper
mapper.registerModule(DefaultScalaModule)
implicit val unmarshal = new FromRequestUnmarshaller[Map[String, Any]] {
override def apply(value: HttpRequest)(implicit ec: ExecutionContext, materializer: Materializer): Future[Map[String, Any]] = {
value.entity.toStrict(patienceConfig.timeout)(materializer).map { entity =>
mapper.readValue[Map[String, Any]](entity.data.utf8String)
}(ec)
}
}
get {
path(Segment / Segment / "health") { (uriEncodedAppId, versionId) =>
import PathId._
val appId = URLDecoder.decode(uriEncodedAppId, "UTF-8").toRootPath
def instance = healthChecks(_.find { c => c.appId == appId && c.versionId == versionId })
val state = instance.fold(true)(_.healthy)
logger.info(s"Received health check request: app=$appId, version=$versionId reply=$state")
if (state) {
complete(HttpResponse(status = StatusCodes.OK))
} else {
complete(HttpResponse(status = StatusCodes.InternalServerError))
}
} ~ path(Segment / Segment / Segment / "ready") { (uriEncodedAppId, versionId, taskId) =>
import PathId._
val appId = URLDecoder.decode(uriEncodedAppId, "UTF-8").toRootPath
// Find a fitting registred readiness check. If the check has no task id set we ignore it.
def check: Option[IntegrationReadinessCheck] = registeredReadinessChecks(_.find { c =>
c.appId == appId && c.versionId == versionId && c.taskId.fold(true)(_ == taskId)
})
// An app is not ready by default to avoid race conditions.
val isReady = check.fold(false)(_.call)
logger.info(s"Received readiness check request: app=$appId, version=$versionId taskId=$taskId reply=$isReady")
if (isReady) {
complete(HttpResponse(status = StatusCodes.OK))
} else {
complete(HttpResponse(status = StatusCodes.InternalServerError))
}
} ~ path(Remaining) { path =>
require(false, s"$path was unmatched!")
complete(HttpResponse(status = StatusCodes.InternalServerError))
}
}
}
val port = PortAllocator.ephemeralPort()
logger.info(s"Starting health check endpoint on port $port.")
val server = Http().bindAndHandle(route, "0.0.0.0", port).futureValue
logger.info(s"Listening for health events on $port")
server
}
/**
* Add an integration health check to internal health checks. The integration health check is used to control the
* health check replies for our app mock.
*
* @param appId The app id of the app mock
* @param versionId The version of the app mock
* @param state The initial health status of the app mock
* @return The IntegrationHealthCheck object which is used to control the replies.
*/
def registerAppProxyHealthCheck(appId: PathId, versionId: String, state: Boolean): IntegrationHealthCheck = {
val check = new IntegrationHealthCheck(appId, versionId, state)
healthChecks { checks =>
checks.filter(c => c.appId == appId && c.versionId == versionId).foreach(checks -= _)
checks += check
}
check
}
/**
* Adds an integration readiness check to internal readiness checks. The behaviour is similar to integration health
* checks.
*
* @param appId The app id of the app mock
* @param versionId The version of the app mock
* @param taskId Optional task id to identify the task of the app mock.
* @return The IntegrationReadinessCheck object which is used to control replies.
*/
def registerProxyReadinessCheck(appId: PathId, versionId: String, taskId: Option[String] = None): IntegrationReadinessCheck = {
val check = new IntegrationReadinessCheck(appId, versionId, taskId)
registeredReadinessChecks { checks =>
checks.filter(c => c.appId == appId && c.versionId == versionId && c.taskId == taskId).foreach(checks -= _)
checks += check
}
check
}
}
/**
* Base trait for tests that need a marathon
*/
trait MarathonTest extends HealthCheckEndpoint with ScalaFutures with Eventually {
protected def logger: Logger
def marathonUrl: String
def marathon: MarathonFacade
def leadingMarathon: Future[LocalMarathon]
def mesos: MesosFacade
val testBasePath: PathId
def suiteName: String
implicit val system: ActorSystem
implicit val mat: Materializer
implicit val ctx: ExecutionContext
implicit val scheduler: Scheduler
case class CallbackEvent(eventType: String, info: Map[String, Any])
object CallbackEvent {
def apply(event: ITEvent): CallbackEvent = CallbackEvent(event.eventType, event.info)
}
implicit class CallbackEventToStatusUpdateEvent(val event: CallbackEvent) {
def taskStatus: String = event.info.get("taskStatus").map(_.toString).getOrElse("")
def message: String = event.info("message").toString
def id: String = event.info("id").toString
def running: Boolean = taskStatus == "TASK_RUNNING"
def finished: Boolean = taskStatus == "TASK_FINISHED"
def failed: Boolean = taskStatus == "TASK_FAILED"
}
object StatusUpdateEvent {
def unapply(event: CallbackEvent): Option[CallbackEvent] = {
if (event.eventType == "status_update_event") Some(event)
else None
}
}
protected val events = new ConcurrentLinkedQueue[ITSSEEvent]()
implicit class PathIdTestHelper(path: String) {
def toRootTestPath: PathId = testBasePath.append(path).canonicalPath()
def toTestPath: PathId = testBasePath.append(path)
}
/**
* Constructs the proper health proxy endpoint argument for the Python app mock.
*
* @param appId The app id whose health is checked
* @param versionId The version of the app
* @return URL to health check endpoint
*/
def healthEndpointFor(appId: PathId, versionId: String): String = {
val encodedAppId = URLEncoder.encode(appId.toString, "UTF-8")
s"http://$$HOST:${healthEndpoint.localAddress.getPort}/$encodedAppId/$versionId"
}
def appProxyHealthCheck(
gracePeriod: FiniteDuration = 1.seconds,
interval: FiniteDuration = 1.second,
maxConsecutiveFailures: Int = Int.MaxValue,
portIndex: Option[Int] = Some(0)): AppHealthCheck =
raml.AppHealthCheck(
gracePeriodSeconds = gracePeriod.toSeconds.toInt,
intervalSeconds = interval.toSeconds.toInt,
maxConsecutiveFailures = maxConsecutiveFailures,
portIndex = portIndex,
protocol = raml.AppHealthCheckProtocol.Http
)
def appProxy(appId: PathId, versionId: String, instances: Int,
healthCheck: Option[raml.AppHealthCheck] = Some(appProxyHealthCheck()),
dependencies: Set[PathId] = Set.empty): App = {
val projectDir = sys.props.getOrElse("user.dir", ".")
val appMock: File = new File(projectDir, "src/test/python/app_mock.py")
val cmd = Some(s"""echo APP PROXY $$MESOS_TASK_ID RUNNING; ${appMock.getAbsolutePath} """ +
s"""$$PORT0 $appId $versionId ${healthEndpointFor(appId, versionId)}""")
App(
id = appId.toString,
cmd = cmd,
executor = "//cmd",
instances = instances,
cpus = 0.01, mem = 32.0,
healthChecks = healthCheck.toSet,
dependencies = dependencies.map(_.toString)
)
}
def dockerAppProxy(appId: PathId, versionId: String, instances: Int, healthCheck: Option[AppHealthCheck] = Some(appProxyHealthCheck()), dependencies: Set[PathId] = Set.empty): App = {
val projectDir = sys.props.getOrElse("user.dir", ".")
val containerDir = "/opt/marathon"
val encodedAppId = URLEncoder.encode(appId.toString, "UTF-8")
val cmd = Some("""echo APP PROXY $$MESOS_TASK_ID RUNNING; /opt/marathon/python/app_mock.py """ +
s"""$$PORT0 $appId $versionId ${healthEndpointFor(appId, versionId)}""")
App(
id = appId.toString,
cmd = cmd,
container = Some(raml.Container(
`type` = raml.EngineType.Docker,
docker = Some(raml.DockerContainer(
image = "python:3.4.6-alpine"
)),
volumes = collection.immutable.Seq(
AppHostVolume(hostPath = s"$projectDir/src/test/python", containerPath = s"$containerDir/python", mode = ReadMode.Ro)
)
)),
instances = instances,
cpus = 0.5,
mem = 128,
healthChecks = healthCheck.toSet,
dependencies = dependencies.map(_.toString),
networks = Seq(Network(mode = NetworkMode.Host))
)
}
def waitForTasks(appId: PathId, num: Int, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis)(implicit facade: MarathonFacade = marathon): List[ITEnrichedTask] = {
eventually(timeout(Span(maxWait.toMillis, Milliseconds))) {
val tasks = Try(facade.tasks(appId)).map(_.value).getOrElse(Nil).filter(_.launched)
logger.info(s"${tasks.size}/$num tasks launched for $appId")
require(tasks.size == num, s"Waiting for $num tasks to be launched")
tasks
}
}
// We shouldn't eat exceptions in clenaUp() methods: it's a source of hard to find bugs if
// we just move on to the next test, that expects a "clean state". We should fail loud and
// proud here and find out why the clean-up fails.
def cleanUp(): Unit = {
logger.info(">>> Starting to CLEAN UP...")
events.clear()
// Wait for a clean slate in Marathon, if there is a running deployment or a runSpec exists
logger.info("Clean Marathon State")
//do not fail here, since the require statements will ensure a correct setup and fail otherwise
Try(waitForDeployment(eventually(marathon.deleteGroup(testBasePath, force = true))))
val cleanUpPatienceConfig = WaitTestSupport.PatienceConfig(timeout = Span(1, Minutes), interval = Span(1, Seconds))
WaitTestSupport.waitUntil("clean slate in Mesos") {
val occupiedAgents = mesos.state.value.agents.filter { agent => agent.usedResources.nonEmpty || agent.reservedResourcesByRole.nonEmpty }
occupiedAgents.foreach { agent =>
import mesosphere.marathon.integration.facades.MesosFormats._
val usedResources: String = Json.prettyPrint(Json.toJson(agent.usedResources))
val reservedResources: String = Json.prettyPrint(Json.toJson(agent.reservedResourcesByRole))
logger.info(s"""Waiting for blank slate Mesos...\\n "used_resources": "$usedResources"\\n"reserved_resources": "$reservedResources"""")
}
occupiedAgents.isEmpty
}(cleanUpPatienceConfig)
val apps = marathon.listAppsInBaseGroup
require(apps.value.isEmpty, s"apps weren't empty: ${apps.entityPrettyJsonString}")
val pods = marathon.listPodsInBaseGroup
require(pods.value.isEmpty, s"pods weren't empty: ${pods.entityPrettyJsonString}")
val groups = marathon.listGroupsInBaseGroup
require(groups.value.isEmpty, s"groups weren't empty: ${groups.entityPrettyJsonString}")
events.clear()
healthChecks(_.clear())
logger.info("... CLEAN UP finished <<<")
}
def waitForHealthCheck(check: IntegrationHealthCheck, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis) = {
WaitTestSupport.waitUntil("Health check to get queried", maxWait) { check.pinged.get }
}
def waitForDeploymentId(deploymentId: String, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = {
waitForEventWith("deployment_success", _.id == deploymentId, maxWait)
}
def waitForStatusUpdates(kinds: String*) = kinds.foreach { kind =>
logger.info(s"Wait for status update event with kind: $kind")
waitForEventWith("status_update_event", _.taskStatus == kind)
}
def waitForEvent(
kind: String,
maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent =
waitForEventWith(kind, _ => true, maxWait)
def waitForEventWith(
kind: String,
fn: CallbackEvent => Boolean, maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = {
waitForEventMatching(s"event $kind to arrive", maxWait) { event =>
event.eventType == kind && fn(event)
}
}
/**
* Consumes the next event from the events queue within deadline. Does not throw. Returns None if unable to return an
* event by that time.
*
* @param deadline The time after which to stop attempting to get an event and return None
*/
private def nextEvent(deadline: Deadline): Option[ITSSEEvent] = try {
eventually(timeout(Span(deadline.timeLeft.toMillis, Milliseconds))) {
val r = Option(events.poll)
if (r.isEmpty)
throw new NoSuchElementException
r
}
} catch {
case _: NoSuchElementException =>
None
case _: TestFailedDueToTimeoutException =>
None
}
/**
* Method waits for events and calls their callbacks independently of the events order. It receives a
* map of EventId -> Callback e.g.:
* Map("deployment_failed" -> _.id == deploymentId, "deployment_successful" -> _.id == rollbackId)),
* checks every event for it's existence in the map and if found, calls it's callback method. If successful, the entry
* is removed from the map. Returns if the map is empty.
*/
def waitForEventsWith(
description: String,
eventsMap: Map[String, CallbackEvent => Boolean],
maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis) = {
val waitingFor = mutable.Map(eventsMap.toSeq: _*)
waitForEventMatching(description, maxWait) { event =>
if (waitingFor.get(event.eventType).fold(false)(fn => fn(event))) {
waitingFor -= event.eventType
}
waitingFor.isEmpty
}
}
/**
* Method waits for ANY (and only one) of the given events. It receives a map of EventId -> Callback e.g.:
* Map("deployment_failed" -> _.id == deploymentId, "deployment_successful" -> _.id == rollbackId)),
* and checks every incoming event for it's existence in the map and if found, calls it's callback method.
* Returns if event found and callback returns true.
*/
def waitForAnyEventWith(
description: String,
eventsMap: Map[String, CallbackEvent => Boolean],
maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis) = {
val waitingForAny = mutable.Map(eventsMap.toSeq: _*)
waitForEventMatching(description, maxWait) { event =>
waitingForAny.get(event.eventType).fold(false)(fn => fn(event))
}
}
def waitForEventMatching(
description: String,
maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis)(fn: CallbackEvent => Boolean): CallbackEvent = {
val deadline = maxWait.fromNow
@tailrec
def iter(): CallbackEvent = {
nextEvent(deadline) match {
case Some(ITConnected) =>
throw new MarathonTest.UnexpectedConnect
case Some(event: ITEvent) =>
val cbEvent = CallbackEvent(event)
if (fn(cbEvent)) {
cbEvent
} else {
logger.info(s"Event $event did not match criteria skipping to next event")
iter()
}
case None =>
throw new RuntimeException(s"No events matched <$description>")
}
}
iter()
}
/**
* Blocks until a single connected event is consumed. Discards any events up to that point.
*
* Not reasoning about SSE connection state will lead to flaky tests. If a master is killed, you should wait for the
* SSE stream to reconnect before doing anything else, or you could miss events.
*/
def waitForSSEConnect(maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): Unit = {
@tailrec
val deadline = maxWait.fromNow
def iter(): Unit = {
nextEvent(deadline) match {
case Some(event: ITEvent) =>
logger.info(s"Event ${event} was not a connected event; skipping")
iter()
case Some(ITConnected) =>
logger.info("ITConnected event consumed")
case None =>
throw new RuntimeException("No connected events")
}
}
iter()
}
/**
* Wait for the events of the given kinds (=types).
*/
def waitForEvents(kinds: String*)(maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): Map[String, Seq[CallbackEvent]] = {
val deadline = maxWait.fromNow
/** Receive the events for the given kinds (duplicates allowed) in any order. */
val receivedEventsForKinds: Seq[CallbackEvent] = {
var eventsToWaitFor = kinds
val receivedEvents = Vector.newBuilder[CallbackEvent]
while (eventsToWaitFor.nonEmpty) {
val event = waitForEventMatching(s"event $eventsToWaitFor to arrive", deadline.timeLeft) { event =>
eventsToWaitFor.contains(event.eventType)
}
receivedEvents += event
// Remove received event kind. Only remove one element for duplicates.
val kindIndex = eventsToWaitFor.indexWhere(_ == event.eventType)
assert(kindIndex >= 0)
eventsToWaitFor = eventsToWaitFor.patch(kindIndex, Nil, 1)
}
receivedEvents.result()
}
receivedEventsForKinds.groupBy(_.eventType)
}
def waitForDeployment(change: RestResult[_], maxWait: FiniteDuration = patienceConfig.timeout.toMillis.millis): CallbackEvent = {
require(change.success, s"Deployment request has not been successful. httpCode=${change.code} body=${change.entityString}")
val deploymentId = change.originalResponse.headers.find(_.name == RestResource.DeploymentHeader).getOrElse(throw new IllegalArgumentException("No deployment id found in Http Header"))
waitForDeploymentId(deploymentId.value, maxWait)
}
def waitForPod(podId: PathId): PodStatus = {
eventually {
Try(marathon.status(podId)).map(_.value).toOption.filter(_.status == PodState.Stable).get
}
}
protected[setup] def teardown(): Unit = {
Try {
val frameworkId = marathon.info.entityJson.as[JsObject].value("frameworkId").as[String]
mesos.teardown(frameworkId).futureValue
}
Try(healthEndpoint.unbind().futureValue)
}
/**
* Connects repeatedly to the Marathon SSE endpoint until cancelled.
* Yields each event in order.
*/
def startEventSubscriber(): Cancellable = {
@volatile var cancelled = false
def iter(): Unit = {
import akka.stream.scaladsl.Source
logger.info("SSEStream: Connecting")
Source.fromFuture(leadingMarathon)
.mapAsync(1) { leader =>
async {
logger.info(s"SSEStream: Acquiring connection to ${leader.url}")
val stream = await(leader.client.events())
logger.info(s"SSEStream: Connection acquired to ${leader.url}")
/* A potentially impossible edge case exists in which we query the leader, and then before we get a connection
* to that instance, it restarts and is no longer a leader.
*
* By checking the leader again once obtaining a connection to the SSE event stream, we have conclusive proof
* that we are consuming from the current leader, and we keep our connected events as deterministic as
* possible. */
val leaderAfterConnection = await(leadingMarathon)
logger.info(s"SSEStream: ${leader.url} is the leader")
if (leader != leaderAfterConnection) {
stream.runWith(Sink.cancelled)
throw new RuntimeException("Leader status changed since first connecting to stream")
} else {
stream
}
}
}
.flatMapConcat { stream =>
// We prepend the ITConnected event here in order to avoid emitting an ITConnected event on failed connections
stream.prepend(Source.single(ITConnected))
}
.runForeach { e: ITSSEEvent =>
e match {
case ITConnected =>
logger.info(s"SSEStream: Connected")
case event: ITEvent =>
logger.info(s"SSEStream: Received callback event: ${event.eventType} with props ${event.info}")
}
events.offer(e)
}
.onComplete {
case result =>
if (!cancelled) {
logger.info(s"SSEStream: Leader event stream was closed reason: ${result}")
logger.info("Reconnecting")
/* There is a small window between Jetty hanging up the event stream, and Jetty not accepting and
* responding to new requests. In the tests, under heavy load, retrying within 15 milliseconds is enough
* to hit this window.
*
* 10 times the interval would probably suffice. Timeout is way more time then we need. Half timeout seems
* like an okay compromise.
*/
scheduler.scheduleOnce(patienceConfig.timeout / 2) { iter() }
}
}
}
iter()
new Cancellable {
override def cancel(): Boolean = {
cancelled = true
true
}
override def isCancelled: Boolean = cancelled
}
}
}
object MarathonTest extends StrictLogging {
class UnexpectedConnect extends Exception("Received an unexpected SSE event stream Connection event. This is " +
"considered an exception because not thinking about re-connection events properly can lead to race conditions in " +
"the tests. You should call waitForSSEConnect() after killing a Marathon leader to ensure no events are dropped.")
}
/**
* Fixture that can be used for a single test case.
*/
trait MarathonFixture extends AkkaUnitTestLike with MesosClusterTest with ZookeeperServerTest {
protected def logger: Logger
def withMarathon[T](suiteName: String, marathonArgs: Map[String, String] = Map.empty)(f: (LocalMarathon, MarathonTest) => T): T = {
val marathonServer = LocalMarathon(autoStart = false, suiteName = suiteName, masterUrl = mesosMasterUrl,
zkUrl = s"zk://${zkServer.connectUri}/marathon-$suiteName", conf = marathonArgs)
marathonServer.start().futureValue
val marathonTest = new MarathonTest {
override protected val logger: Logger = MarathonFixture.this.logger
override def marathonUrl: String = s"http://localhost:${marathonServer.httpPort}"
override def marathon: MarathonFacade = marathonServer.client
override def mesos: MesosFacade = MarathonFixture.this.mesos
override val testBasePath: PathId = PathId("/")
override implicit val system: ActorSystem = MarathonFixture.this.system
override implicit val mat: Materializer = MarathonFixture.this.mat
override implicit val ctx: ExecutionContext = MarathonFixture.this.ctx
override implicit val scheduler: Scheduler = MarathonFixture.this.scheduler
override val suiteName: String = MarathonFixture.this.suiteName
override implicit def patienceConfig: PatienceConfig = PatienceConfig(MarathonFixture.this.patienceConfig.timeout, MarathonFixture.this.patienceConfig.interval)
override def leadingMarathon = Future.successful(marathonServer)
}
val sseStream = marathonTest.startEventSubscriber()
try {
marathonTest.healthEndpoint
marathonTest.waitForSSEConnect()
f(marathonServer, marathonTest)
} finally {
sseStream.cancel()
if (marathonServer.isRunning()) marathonTest.cleanUp()
marathonTest.teardown()
marathonServer.stop()
}
}
}
object MarathonFixture extends MarathonFixture
/**
* base trait that spins up/tears down a marathon and has all of the original tooling from
* SingleMarathonIntegrationTest.
*/
trait MarathonSuite extends Suite with StrictLogging with ScalaFutures with BeforeAndAfterAll with Eventually with MarathonTest {
abstract override def afterAll(): Unit = {
teardown()
super.afterAll()
}
}
/**
* Base trait that starts a local marathon but doesn't have mesos/zookeeper yet
*/
trait LocalMarathonTest extends MarathonTest with ScalaFutures
with AkkaUnitTestLike with MesosTest with ZookeeperServerTest {
def marathonArgs: Map[String, String] = Map.empty
lazy val marathonServer = LocalMarathon(autoStart = false, suiteName = suiteName, masterUrl = mesosMasterUrl,
zkUrl = s"zk://${zkServer.connectUri}/marathon",
conf = marathonArgs)
lazy val marathonUrl = s"http://localhost:${marathonServer.httpPort}"
val testBasePath: PathId = PathId("/")
lazy val marathon = marathonServer.client
lazy val appMock: AppMockFacade = new AppMockFacade()
/**
* Return the current leading Marathon
* Expected to retry for a significant period of time until succeeds
*/
override def leadingMarathon: Future[LocalMarathon] =
Future.successful(marathonServer)
@volatile private var sseStream: Option[Cancellable] = None
abstract override def beforeAll(): Unit = {
super.beforeAll()
marathonServer.start().futureValue
sseStream = Some(startEventSubscriber())
waitForSSEConnect()
}
abstract override def afterAll(): Unit = {
sseStream.foreach(_.cancel)
teardown()
Try(marathonServer.close())
super.afterAll()
}
}
/**
* trait that has marathon, zk, and a mesos ready to go
*/
trait EmbeddedMarathonTest extends Suite with StrictLogging with ZookeeperServerTest with MesosClusterTest with LocalMarathonTest {
/* disable failover timeout to assist with cleanup ops; terminated marathons are immediately removed from mesos's
* list of frameworks
*
* Until https://issues.apache.org/jira/browse/MESOS-8171 is resolved, we cannot set this value to 0.
*/
override def marathonArgs: Map[String, String] = Map("failover_timeout" -> "1")
}
/**
* Trait that has a Marathon cluster, zk, and Mesos via mesos-local ready to go.
*
* It provides multiple Marathon instances. This allows e.g. leadership rotation.
*/
trait MarathonClusterTest extends Suite with StrictLogging with ZookeeperServerTest with MesosClusterTest with LocalMarathonTest {
val numAdditionalMarathons = 2
lazy val additionalMarathons = 0.until(numAdditionalMarathons).map { _ =>
LocalMarathon(autoStart = false, suiteName = suiteName, masterUrl = mesosMasterUrl,
zkUrl = s"zk://${zkServer.connectUri}/marathon",
conf = marathonArgs)
}
lazy val marathonFacades = marathon +: additionalMarathons.map(_.client)
lazy val allMarathonServers = marathonServer +: additionalMarathons
override def leadingMarathon: Future[LocalMarathon] = {
val leader = Retry("querying leader", maxAttempts = 50, maxDelay = 1.second, maxDuration = patienceConfig.timeout) {
Future.firstCompletedOf(marathonFacades.map(_.leaderAsync()))
}
leader.map { leader =>
allMarathonServers.find { _.httpPort == leader.value.port }.head
}
}
override def beforeAll(): Unit = {
super.beforeAll()
Future.sequence(additionalMarathons.map(_.start())).futureValue
}
override def afterAll(): Unit = {
Try(additionalMarathons.foreach(_.close()))
super.afterAll()
}
override def cleanUp(): Unit = {
Future.sequence(marathonServer.start() +: additionalMarathons.map(_.start())).futureValue
super.cleanUp()
}
}
|
janisz/marathon
|
src/test/scala/mesosphere/marathon/integration/setup/MarathonTest.scala
|
Scala
|
apache-2.0
| 35,193
|
package eventstore
package core
package operations
import OperationError._
import Inspection.Decision._
private[eventstore] final case class TransactionWriteInspection(out: TransactionWrite)
extends ErrorInspection[TransactionWriteCompleted, OperationError] {
def decision(error: OperationError) = {
error match {
case PrepareTimeout => Retry
case CommitTimeout => Retry
case ForwardTimeout => Retry
case WrongExpectedVersion => Unexpected
case StreamDeleted => Unexpected
case InvalidTransaction => Unexpected
case AccessDenied => Fail(AccessDeniedException(s"Write access denied"))
}
}
}
|
EventStore/EventStore.JVM
|
core/src/main/scala/eventstore/core/operations/TransactionWriteInspection.scala
|
Scala
|
bsd-3-clause
| 686
|
import controllers.br.CustomQueries
import formatters.AttendanceLogFormatter
import models.AttendanceLog
import org.joda.time.DateTime
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.{GlobalSettings, Application, Logger}
import play.api.http.HeaderNames
import play.api.libs.json.{JsString, Json}
import play.api.test._
import play.api.test.Helpers._
import formatters.UserFormatter.JsonUserFormatter
import utils.{CustomQueriesGenerator, NamedParameterHelper}
import scala.concurrent.Future
import scala.util.Random
/**
* Add your spec here.
* You can mock out a whole application including requests, plugins etc.
* For more information, consult the wiki.
*/
@RunWith(classOf[JUnitRunner])
class ApplicationSpec extends PlaySpecification {
var access_token = "Bearer "
"Application" should {
"Authorization" in new WithApplication {
val result = route(FakeRequest(POST, "/oauth2/access_token").withFormUrlEncodedBody(("grant_type", "password"),
("client_id", "test_client_id"), ("client_secret", "test_client_secret"), ("username", "test_user"), ("password", "test_password"))).get
access_token += (contentAsJson(result) \\ "access_token").as[String]
assert((contentAsJson(result) \\ "token_type").as[String] == "Bearer")
}
"User validation" in new WithApplication {
val json = Json.obj(
"pin" -> JsString("111111111"),
"pwd" -> JsString("admin"),
"fp" -> JsString(""),
"card" -> JsString(""),
"face" -> JsString("")
)
val req = FakeRequest(POST,"/limbo/br/employee/validate").withBody(json).
withHeaders(HeaderNames.AUTHORIZATION->access_token,HeaderNames.CONTENT_TYPE -> "application/json")
val result = route(req).get
status(result) mustEqual OK
contentType(result) must beSome("application/json")
(contentAsJson(result) \\ "first_name").as[String] must_== "Administrator"
}
"User details" in new WithApplication {
val req = FakeRequest(GET,"/limbo/br/employee/1").
withHeaders(HeaderNames.AUTHORIZATION->access_token)
val result = route(req).get
status(result) mustEqual OK
contentType(result) must beSome("application/json")
(contentAsJson(result) \\ "first_name").as[String] must_== "Administrator"
}
//TODO:After test delete added data!
"Attendance Log insertion" in new WithApplication{
val req = FakeRequest(POST,"/limbo/br/attendancelog").withBody(AttendanceLogFormatter.JsonAttendanceLogFormatter.writes(
new AttendanceLog(Random.nextLong(),1,1,3,DateTime.now().getMillis,1,"Work Time",0,"Alper's ZPAD"))).
withHeaders(HeaderNames.AUTHORIZATION->access_token)
val result = route(req).get
status(result) mustEqual OK
contentAsString(result) must_== "AttendanceLog added to database"
}
//TODO:Fix broken queries!
/*
"Generic queries without parameters" in new WithApplication {
CustomQueriesGenerator.queryMap.foreach {
case (k,v) =>
if(!NamedParameterHelper.isContainParameter(v)){
Logger.info("Query:"+k)
val result = route(FakeRequest(GET,"/br/query/"+k).
withHeaders(HeaderNames.AUTHORIZATION->access_token)).get
contentType(result) must beSome("application/json")
contentAsString(result) must not contain("errorCode")
}
}
} */
}
}
|
ZKTecoEu/ZKRestApi
|
ZKRestServer/test/ApplicationSpec.scala
|
Scala
|
mit
| 3,498
|
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Async
import monix.catnap.ConsumerF
import monix.tail.Iterant
import monix.tail.Iterant.{haltS, suspendS, Next, NextBatch}
import monix.tail.batches.Batch
private[tail] object IterantFromConsumer {
/**
* Implementation for [[Iterant.fromConsumer]].
*/
def apply[F[_], A](consumer: ConsumerF[F, Option[Throwable], A], maxBatchSize: Int)(
implicit F: Async[F]): Iterant[F, A] = {
suspendS(
if (maxBatchSize > 1)
loopMany(consumer, maxBatchSize)
else
loopOne(consumer)
)
}
private def loopOne[F[_], A](consumer: ConsumerF[F, Option[Throwable], A])(implicit F: Async[F]): F[Iterant[F, A]] = {
F.map(consumer.pull) {
case Left(e) => haltS(e)
case Right(a) => Next(a, loopOne(consumer))
}
}
private def loopMany[F[_], A](consumer: ConsumerF[F, Option[Throwable], A], maxBatchSize: Int)(
implicit F: Async[F]): F[Iterant[F, A]] = {
F.map(consumer.pullMany(1, maxBatchSize)) {
case Left(e) => haltS(e)
case Right(seq) =>
NextBatch(Batch.fromSeq(seq), loopMany(consumer, maxBatchSize))
}
}
}
|
alexandru/monifu
|
monix-tail/shared/src/main/scala/monix/tail/internal/IterantFromConsumer.scala
|
Scala
|
apache-2.0
| 1,826
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.container
import java.io.File
import java.net.{URL, UnknownHostException}
import java.nio.file.Path
import java.util
import java.util.Base64
import java.util.concurrent.{ScheduledExecutorService, ExecutorService, Executors, TimeUnit}
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.apache.samza.checkpoint.{CheckpointListener, CheckpointManagerFactory, OffsetManager, OffsetManagerMetrics}
import org.apache.samza.config.JobConfig.Config2Job
import org.apache.samza.config.MetricsConfig.Config2Metrics
import org.apache.samza.config.SerializerConfig.Config2Serializer
import org.apache.samza.config.StorageConfig.Config2Storage
import org.apache.samza.config.StreamConfig.Config2Stream
import org.apache.samza.config.SystemConfig.Config2System
import org.apache.samza.config.TaskConfig.Config2Task
import org.apache.samza.config._
import org.apache.samza.container.disk.DiskSpaceMonitor.Listener
import org.apache.samza.container.disk.{DiskQuotaPolicyFactory, DiskSpaceMonitor, NoThrottlingDiskQuotaPolicyFactory, PollingScanDiskSpaceMonitor}
import org.apache.samza.container.host.{StatisticsMonitorImpl, SystemMemoryStatistics, SystemStatisticsMonitor}
import org.apache.samza.coordinator.stream.{CoordinatorStreamManager, CoordinatorStreamSystemProducer}
import org.apache.samza.job.model.JobModel
import org.apache.samza.metrics.{JmxServer, JvmMetrics, MetricsRegistryMap, MetricsReporter}
import org.apache.samza.serializers._
import org.apache.samza.serializers.model.SamzaObjectMapper
import org.apache.samza.storage.{StorageEngineFactory, TaskStorageManager}
import org.apache.samza.system._
import org.apache.samza.system.chooser.{DefaultChooser, MessageChooserFactory, RoundRobinChooserFactory}
import org.apache.samza.table.TableManager
import org.apache.samza.task._
import org.apache.samza.util.Util.asScalaClock
import org.apache.samza.util._
import org.apache.samza.{SamzaContainerStatus, SamzaException}
import scala.collection.JavaConverters._
object SamzaContainer extends Logging {
val DEFAULT_READ_JOBMODEL_DELAY_MS = 100
val DISK_POLL_INTERVAL_KEY = "container.disk.poll.interval.ms"
/**
* Fetches config, task:SSP assignments, and task:changelog partition
* assignments, and returns objects to be used for SamzaContainer's
* constructor.
*/
def readJobModel(url: String, initialDelayMs: Int = scala.util.Random.nextInt(DEFAULT_READ_JOBMODEL_DELAY_MS) + 1) = {
info("Fetching configuration from: %s" format url)
SamzaObjectMapper
.getObjectMapper
.readValue(
Util.read(
url = new URL(url),
retryBackoff = new ExponentialSleepStrategy(initialDelayMs = initialDelayMs)),
classOf[JobModel])
}
def apply(
containerId: String,
jobModel: JobModel,
config: Config,
customReporters: Map[String, MetricsReporter] = Map[String, MetricsReporter](),
taskFactory: Object) = {
val containerModel = jobModel.getContainers.get(containerId)
val containerName = "samza-container-%s" format containerId
val maxChangeLogStreamPartitions = jobModel.maxChangeLogStreamPartitions
var coordinatorStreamManager: CoordinatorStreamManager = null
var localityManager: LocalityManager = null
if (new ClusterManagerConfig(config).getHostAffinityEnabled()) {
val registryMap = new MetricsRegistryMap(containerName)
val coordinatorStreamSystemProducer = new CoordinatorStreamSystemProducer(config, new SamzaContainerMetrics(containerName, registryMap).registry)
coordinatorStreamManager = new CoordinatorStreamManager(coordinatorStreamSystemProducer)
localityManager = new LocalityManager(coordinatorStreamManager)
}
val containerPID = Util.getContainerPID
info("Setting up Samza container: %s" format containerName)
startupLog("Samza container PID: %s" format containerPID)
println("Container PID: %s" format containerPID)
startupLog("Using configuration: %s" format config)
startupLog("Using container model: %s" format containerModel)
val registry = new MetricsRegistryMap(containerName)
val samzaContainerMetrics = new SamzaContainerMetrics(containerName, registry)
val systemProducersMetrics = new SystemProducersMetrics(registry)
val systemConsumersMetrics = new SystemConsumersMetrics(registry)
val offsetManagerMetrics = new OffsetManagerMetrics(registry)
val clock = if (config.getMetricsTimerEnabled) {
new HighResolutionClock {
override def nanoTime(): Long = System.nanoTime()
}
} else {
new HighResolutionClock {
override def nanoTime(): Long = 0L
}
}
val inputSystemStreamPartitions = containerModel
.getTasks
.values
.asScala
.flatMap(_.getSystemStreamPartitions.asScala)
.toSet
val inputSystemStreams = inputSystemStreamPartitions
.map(_.getSystemStream)
.toSet
val inputSystems = inputSystemStreams
.map(_.getSystem)
.toSet
val systemNames = config.getSystemNames
info("Got system names: %s" format systemNames)
val serdeStreams = systemNames.foldLeft(Set[SystemStream]())(_ ++ config.getSerdeStreams(_))
info("Got serde streams: %s" format serdeStreams)
val systemFactories = systemNames.map(systemName => {
val systemFactoryClassName = config
.getSystemFactory(systemName)
.getOrElse(throw new SamzaException("A stream uses system %s, which is missing from the configuration." format systemName))
(systemName, Util.getObj[SystemFactory](systemFactoryClassName))
}).toMap
info("Got system factories: %s" format systemFactories.keys)
val systemAdmins = new SystemAdmins(config)
info("Got system admins: %s" format systemAdmins.getSystemAdminsMap().keySet())
val streamMetadataCache = new StreamMetadataCache(systemAdmins)
val inputStreamMetadata = streamMetadataCache.getStreamMetadata(inputSystemStreams)
info("Got input stream metadata: %s" format inputStreamMetadata)
val consumers = inputSystems
.map(systemName => {
val systemFactory = systemFactories(systemName)
try {
(systemName, systemFactory.getConsumer(systemName, config, samzaContainerMetrics.registry))
} catch {
case e: Exception =>
error("Failed to create a consumer for %s, so skipping." format systemName, e)
(systemName, null)
}
})
.filter(_._2 != null)
.toMap
info("Got system consumers: %s" format consumers.keys)
val producers = systemFactories
.map {
case (systemName, systemFactory) =>
try {
(systemName, systemFactory.getProducer(systemName, config, samzaContainerMetrics.registry))
} catch {
case e: Exception =>
error("Failed to create a producer for %s, so skipping." format systemName, e)
(systemName, null)
}
}
.filter(_._2 != null)
info("Got system producers: %s" format producers.keys)
val serdesFromFactories = config.getSerdeNames.map(serdeName => {
val serdeClassName = config
.getSerdeClass(serdeName)
.getOrElse(Util.defaultSerdeFactoryFromSerdeName(serdeName))
val serde = Util.getObj[SerdeFactory[Object]](serdeClassName)
.getSerde(serdeName, config)
(serdeName, serde)
}).toMap
info("Got serdes from factories: %s" format serdesFromFactories.keys)
val serializableSerde = new SerializableSerde[Serde[Object]]()
val serdesFromSerializedInstances = config.subset(SerializerConfig.SERIALIZER_PREFIX format "").asScala
.filter { case (key, value) => key.endsWith(SerializerConfig.SERIALIZED_INSTANCE_SUFFIX) }
.flatMap { case (key, value) =>
val serdeName = key.replace(SerializerConfig.SERIALIZED_INSTANCE_SUFFIX, "")
debug(s"Trying to deserialize serde instance for $serdeName")
try {
val bytes = Base64.getDecoder.decode(value)
val serdeInstance = serializableSerde.fromBytes(bytes)
debug(s"Returning serialized instance for $serdeName")
Some((serdeName, serdeInstance))
} catch {
case e: Exception =>
warn(s"Ignoring invalid serialized instance for $serdeName: $value", e)
None
}
}
info("Got serdes from serialized instances: %s" format serdesFromSerializedInstances.keys)
val serdes = serdesFromFactories ++ serdesFromSerializedInstances
/*
* A Helper function to build a Map[String, Serde] (systemName -> Serde) for systems defined
* in the config. This is useful to build both key and message serde maps.
*/
val buildSystemSerdeMap = (getSerdeName: (String) => Option[String]) => {
systemNames
.filter(systemName => getSerdeName(systemName).isDefined)
.flatMap(systemName => {
val serdeName = getSerdeName(systemName).get
val serde = serdes.getOrElse(serdeName,
throw new SamzaException("buildSystemSerdeMap: No class defined for serde: %s." format serdeName))
// this shouldn't happen since system level serdes can't be set programmatically using the high level
// API, but adding this for safety.
Option(serde)
.filter(!_.isInstanceOf[NoOpSerde[Any]])
.map(serde => (systemName, serde))
}).toMap
}
/*
* A Helper function to build a Map[SystemStream, Serde] for streams defined in the config.
* This is useful to build both key and message serde maps.
*/
val buildSystemStreamSerdeMap = (getSerdeName: (SystemStream) => Option[String]) => {
(serdeStreams ++ inputSystemStreamPartitions)
.filter(systemStream => getSerdeName(systemStream).isDefined)
.flatMap(systemStream => {
val serdeName = getSerdeName(systemStream).get
val serde = serdes.getOrElse(serdeName,
throw new SamzaException("buildSystemStreamSerdeMap: No serde found for name: %s." format serdeName))
// respect explicitly set no-op serdes in high level API
Option(serde)
.filter(!_.isInstanceOf[NoOpSerde[Any]])
.map(serde => (systemStream, serde))
}).toMap
}
val systemKeySerdes = buildSystemSerdeMap(systemName => config.getSystemKeySerde(systemName))
debug("Got system key serdes: %s" format systemKeySerdes)
val systemMessageSerdes = buildSystemSerdeMap(systemName => config.getSystemMsgSerde(systemName))
debug("Got system message serdes: %s" format systemMessageSerdes)
val systemStreamKeySerdes = buildSystemStreamSerdeMap(systemStream => config.getStreamKeySerde(systemStream))
debug("Got system stream key serdes: %s" format systemStreamKeySerdes)
val systemStreamMessageSerdes = buildSystemStreamSerdeMap(systemStream => config.getStreamMsgSerde(systemStream))
debug("Got system stream message serdes: %s" format systemStreamMessageSerdes)
val changeLogSystemStreams = config
.getStoreNames
.filter(config.getChangelogStream(_).isDefined)
.map(name => (name, config.getChangelogStream(name).get)).toMap
.mapValues(Util.getSystemStreamFromNames(_))
info("Got change log system streams: %s" format changeLogSystemStreams)
val intermediateStreams = config
.getStreamIds
.filter(config.getIsIntermediate(_))
.toList
info("Got intermediate streams: %s" format intermediateStreams)
val controlMessageKeySerdes = intermediateStreams
.flatMap(streamId => {
val systemStream = config.streamIdToSystemStream(streamId)
systemStreamKeySerdes.get(systemStream)
.orElse(systemKeySerdes.get(systemStream.getSystem))
.map(serde => (systemStream, new StringSerde("UTF-8")))
}).toMap
val intermediateStreamMessageSerdes = intermediateStreams
.flatMap(streamId => {
val systemStream = config.streamIdToSystemStream(streamId)
systemStreamMessageSerdes.get(systemStream)
.orElse(systemMessageSerdes.get(systemStream.getSystem))
.map(serde => (systemStream, new IntermediateMessageSerde(serde)))
}).toMap
val serdeManager = new SerdeManager(
serdes = serdes,
systemKeySerdes = systemKeySerdes,
systemMessageSerdes = systemMessageSerdes,
systemStreamKeySerdes = systemStreamKeySerdes,
systemStreamMessageSerdes = systemStreamMessageSerdes,
changeLogSystemStreams = changeLogSystemStreams.values.toSet,
controlMessageKeySerdes = controlMessageKeySerdes,
intermediateMessageSerdes = intermediateStreamMessageSerdes)
info("Setting up JVM metrics.")
val jvm = new JvmMetrics(samzaContainerMetrics.registry)
info("Setting up message chooser.")
val chooserFactoryClassName = config.getMessageChooserClass.getOrElse(classOf[RoundRobinChooserFactory].getName)
val chooserFactory = Util.getObj[MessageChooserFactory](chooserFactoryClassName)
val chooser = DefaultChooser(inputStreamMetadata, chooserFactory, config, samzaContainerMetrics.registry, systemAdmins)
info("Setting up metrics reporters.")
val reporters = MetricsReporterLoader.getMetricsReporters(config, containerName).asScala.toMap ++ customReporters
info("Got metrics reporters: %s" format reporters.keys)
val securityManager = config.getSecurityManagerFactory match {
case Some(securityManagerFactoryClassName) =>
Util
.getObj[SecurityManagerFactory](securityManagerFactoryClassName)
.getSecurityManager(config)
case _ => null
}
info("Got security manager: %s" format securityManager)
val checkpointManager = config.getCheckpointManagerFactory()
.filterNot(_.isEmpty)
.map(Util.getObj[CheckpointManagerFactory](_).getCheckpointManager(config, samzaContainerMetrics.registry))
.orNull
info("Got checkpoint manager: %s" format checkpointManager)
// create a map of consumers with callbacks to pass to the OffsetManager
val checkpointListeners = consumers.filter(_._2.isInstanceOf[CheckpointListener])
.map { case (system, consumer) => (system, consumer.asInstanceOf[CheckpointListener])}
info("Got checkpointListeners : %s" format checkpointListeners)
val offsetManager = OffsetManager(inputStreamMetadata, config, checkpointManager, systemAdmins, checkpointListeners, offsetManagerMetrics)
info("Got offset manager: %s" format offsetManager)
val dropDeserializationError = config.getDropDeserialization match {
case Some(dropError) => dropError.toBoolean
case _ => false
}
val dropSerializationError = config.getDropSerialization match {
case Some(dropError) => dropError.toBoolean
case _ => false
}
val pollIntervalMs = config
.getPollIntervalMs
.getOrElse(SystemConsumers.DEFAULT_POLL_INTERVAL_MS.toString)
.toInt
val consumerMultiplexer = new SystemConsumers(
chooser = chooser,
consumers = consumers,
serdeManager = serdeManager,
metrics = systemConsumersMetrics,
dropDeserializationError = dropDeserializationError,
pollIntervalMs = pollIntervalMs,
clock = clock)
val producerMultiplexer = new SystemProducers(
producers = producers,
serdeManager = serdeManager,
metrics = systemProducersMetrics,
dropSerializationError = dropSerializationError)
val storageEngineFactories = config
.getStoreNames
.map(storeName => {
val storageFactoryClassName = config
.getStorageFactoryClassName(storeName)
.getOrElse(throw new SamzaException("Missing storage factory for %s." format storeName))
(storeName, Util.getObj[StorageEngineFactory[Object, Object]](storageFactoryClassName))
}).toMap
info("Got storage engines: %s" format storageEngineFactories.keys)
val singleThreadMode = config.getSingleThreadMode
info("Got single thread mode: " + singleThreadMode)
val threadPoolSize = config.getThreadPoolSize
info("Got thread pool size: " + threadPoolSize)
val taskThreadPool = if (!singleThreadMode && threadPoolSize > 0) {
Executors.newFixedThreadPool(threadPoolSize,
new ThreadFactoryBuilder().setNameFormat("Samza Container Thread-%d").build())
} else {
null
}
val finalTaskFactory = TaskFactoryUtil.finalizeTaskFactory(
taskFactory,
singleThreadMode,
taskThreadPool)
// Wire up all task-instance-level (unshared) objects.
val taskNames = containerModel
.getTasks
.values
.asScala
.map(_.getTaskName)
.toSet
val containerContext = new SamzaContainerContext(containerId, config, taskNames.asJava, samzaContainerMetrics.registry)
// TODO not sure how we should make this config based, or not. Kind of
// strange, since it has some dynamic directories when used with YARN.
val defaultStoreBaseDir = new File(System.getProperty("user.dir"), "state")
info("Got default storage engine base directory: %s" format defaultStoreBaseDir)
val storeWatchPaths = new util.HashSet[Path]()
val timerExecutor = Executors.newSingleThreadScheduledExecutor
val taskInstances: Map[TaskName, TaskInstance] = containerModel.getTasks.values.asScala.map(taskModel => {
debug("Setting up task instance: %s" format taskModel)
val taskName = taskModel.getTaskName
val task = finalTaskFactory match {
case tf: AsyncStreamTaskFactory => tf.asInstanceOf[AsyncStreamTaskFactory].createInstance()
case tf: StreamTaskFactory => tf.asInstanceOf[StreamTaskFactory].createInstance()
}
val taskInstanceMetrics = new TaskInstanceMetrics("TaskName-%s" format taskName)
val collector = new TaskInstanceCollector(producerMultiplexer, taskInstanceMetrics)
val storeConsumers = changeLogSystemStreams
.map {
case (storeName, changeLogSystemStream) =>
val systemConsumer = systemFactories
.getOrElse(changeLogSystemStream.getSystem,
throw new SamzaException("Changelog system %s for store %s does not " +
"exist in the config." format (changeLogSystemStream, storeName)))
.getConsumer(changeLogSystemStream.getSystem, config, taskInstanceMetrics.registry)
samzaContainerMetrics.addStoreRestorationGauge(taskName, storeName)
(storeName, systemConsumer)
}
info("Got store consumers: %s" format storeConsumers)
var loggedStorageBaseDir: File = null
if(System.getenv(ShellCommandConfig.ENV_LOGGED_STORE_BASE_DIR) != null) {
val jobNameAndId = Util.getJobNameAndId(config)
loggedStorageBaseDir = new File(System.getenv(ShellCommandConfig.ENV_LOGGED_STORE_BASE_DIR)
+ File.separator + jobNameAndId._1 + "-" + jobNameAndId._2)
} else {
warn("No override was provided for logged store base directory. This disables local state re-use on " +
"application restart. If you want to enable this feature, set LOGGED_STORE_BASE_DIR as an environment " +
"variable in all machines running the Samza container")
loggedStorageBaseDir = defaultStoreBaseDir
}
info("Got base directory for logged data stores: %s" format loggedStorageBaseDir)
val taskStores = storageEngineFactories
.map {
case (storeName, storageEngineFactory) =>
val changeLogSystemStreamPartition = if (changeLogSystemStreams.contains(storeName)) {
new SystemStreamPartition(changeLogSystemStreams(storeName), taskModel.getChangelogPartition)
} else {
null
}
val keySerde = config.getStorageKeySerde(storeName) match {
case Some(keySerde) => serdes.getOrElse(keySerde,
throw new SamzaException("StorageKeySerde: No class defined for serde: %s." format keySerde))
case _ => null
}
val msgSerde = config.getStorageMsgSerde(storeName) match {
case Some(msgSerde) => serdes.getOrElse(msgSerde,
throw new SamzaException("StorageMsgSerde: No class defined for serde: %s." format msgSerde))
case _ => null
}
val storeDir = if (changeLogSystemStreamPartition != null) {
TaskStorageManager.getStorePartitionDir(loggedStorageBaseDir, storeName, taskName)
} else {
TaskStorageManager.getStorePartitionDir(defaultStoreBaseDir, storeName, taskName)
}
storeWatchPaths.add(storeDir.toPath)
val storageEngine = storageEngineFactory.getStorageEngine(
storeName,
storeDir,
keySerde,
msgSerde,
collector,
taskInstanceMetrics.registry,
changeLogSystemStreamPartition,
containerContext)
(storeName, storageEngine)
}
info("Got task stores: %s" format taskStores)
val storageManager = new TaskStorageManager(
taskName = taskName,
taskStores = taskStores,
storeConsumers = storeConsumers,
changeLogSystemStreams = changeLogSystemStreams,
maxChangeLogStreamPartitions,
streamMetadataCache = streamMetadataCache,
storeBaseDir = defaultStoreBaseDir,
loggedStoreBaseDir = loggedStorageBaseDir,
partition = taskModel.getChangelogPartition,
systemAdmins = systemAdmins,
new StorageConfig(config).getChangeLogDeleteRetentionsInMs,
new SystemClock)
val tableManager = new TableManager(config, serdes.asJava)
info("Got table manager")
val systemStreamPartitions = taskModel
.getSystemStreamPartitions
.asScala
.toSet
info("Retrieved SystemStreamPartitions " + systemStreamPartitions + " for " + taskName)
def createTaskInstance(task: Any): TaskInstance = new TaskInstance(
task = task,
taskName = taskName,
config = config,
metrics = taskInstanceMetrics,
systemAdmins = systemAdmins,
consumerMultiplexer = consumerMultiplexer,
collector = collector,
containerContext = containerContext,
offsetManager = offsetManager,
storageManager = storageManager,
tableManager = tableManager,
reporters = reporters,
systemStreamPartitions = systemStreamPartitions,
exceptionHandler = TaskInstanceExceptionHandler(taskInstanceMetrics, config),
jobModel = jobModel,
streamMetadataCache = streamMetadataCache,
timerExecutor = timerExecutor)
val taskInstance = createTaskInstance(task)
(taskName, taskInstance)
}).toMap
val maxThrottlingDelayMs = config.getLong("container.disk.quota.delay.max.ms", TimeUnit.SECONDS.toMillis(1))
val runLoop = RunLoopFactory.createRunLoop(
taskInstances,
consumerMultiplexer,
taskThreadPool,
maxThrottlingDelayMs,
samzaContainerMetrics,
config,
clock)
val memoryStatisticsMonitor : SystemStatisticsMonitor = new StatisticsMonitorImpl()
memoryStatisticsMonitor.registerListener(new SystemStatisticsMonitor.Listener {
override def onUpdate(sample: SystemMemoryStatistics): Unit = {
val physicalMemoryBytes : Long = sample.getPhysicalMemoryBytes
val physicalMemoryMb : Double = physicalMemoryBytes / (1024.0 * 1024.0)
logger.debug("Container physical memory utilization (mb): " + physicalMemoryMb)
samzaContainerMetrics.physicalMemoryMb.set(physicalMemoryMb)
}
})
val diskQuotaBytes = config.getLong("container.disk.quota.bytes", Long.MaxValue)
samzaContainerMetrics.diskQuotaBytes.set(diskQuotaBytes)
val diskQuotaPolicyFactoryString = config.get("container.disk.quota.policy.factory",
classOf[NoThrottlingDiskQuotaPolicyFactory].getName)
val diskQuotaPolicyFactory = Util.getObj[DiskQuotaPolicyFactory](diskQuotaPolicyFactoryString)
val diskQuotaPolicy = diskQuotaPolicyFactory.create(config)
var diskSpaceMonitor: DiskSpaceMonitor = null
val diskPollMillis = config.getInt(DISK_POLL_INTERVAL_KEY, 0)
if (diskPollMillis != 0) {
diskSpaceMonitor = new PollingScanDiskSpaceMonitor(storeWatchPaths, diskPollMillis)
diskSpaceMonitor.registerListener(new Listener {
override def onUpdate(diskUsageBytes: Long): Unit = {
val newWorkRate = diskQuotaPolicy.apply(1.0 - (diskUsageBytes.toDouble / diskQuotaBytes))
runLoop.asInstanceOf[Throttleable].setWorkFactor(newWorkRate)
samzaContainerMetrics.executorWorkFactor.set(runLoop.asInstanceOf[Throttleable].getWorkFactor)
samzaContainerMetrics.diskUsageBytes.set(diskUsageBytes)
}
})
info("Initialized disk space monitor watch paths to: %s" format storeWatchPaths)
} else {
info(s"Disk quotas disabled because polling interval is not set ($DISK_POLL_INTERVAL_KEY)")
}
info("Samza container setup complete.")
new SamzaContainer(
containerContext = containerContext,
taskInstances = taskInstances,
runLoop = runLoop,
systemAdmins = systemAdmins,
consumerMultiplexer = consumerMultiplexer,
producerMultiplexer = producerMultiplexer,
offsetManager = offsetManager,
coordinatorStreamManager = coordinatorStreamManager,
localityManager = localityManager,
securityManager = securityManager,
metrics = samzaContainerMetrics,
reporters = reporters,
jvm = jvm,
diskSpaceMonitor = diskSpaceMonitor,
hostStatisticsMonitor = memoryStatisticsMonitor,
taskThreadPool = taskThreadPool,
timerExecutor = timerExecutor)
}
}
class SamzaContainer(
containerContext: SamzaContainerContext,
taskInstances: Map[TaskName, TaskInstance],
runLoop: Runnable,
systemAdmins: SystemAdmins,
consumerMultiplexer: SystemConsumers,
producerMultiplexer: SystemProducers,
metrics: SamzaContainerMetrics,
diskSpaceMonitor: DiskSpaceMonitor = null,
hostStatisticsMonitor: SystemStatisticsMonitor = null,
offsetManager: OffsetManager = new OffsetManager,
coordinatorStreamManager: CoordinatorStreamManager = null,
localityManager: LocalityManager = null,
securityManager: SecurityManager = null,
reporters: Map[String, MetricsReporter] = Map(),
jvm: JvmMetrics = null,
taskThreadPool: ExecutorService = null,
timerExecutor: ScheduledExecutorService = Executors.newSingleThreadScheduledExecutor) extends Runnable with Logging {
val shutdownMs = containerContext.config.getShutdownMs.getOrElse(TaskConfigJava.DEFAULT_TASK_SHUTDOWN_MS)
var shutdownHookThread: Thread = null
var jmxServer: JmxServer = null
val isAutoCommitEnabled = containerContext.config.isAutoCommitEnabled
@volatile private var status = SamzaContainerStatus.NOT_STARTED
private var exceptionSeen: Throwable = null
private var paused: Boolean = false
private var containerListener: SamzaContainerListener = null
def getStatus(): SamzaContainerStatus = status
def getTaskInstances() = taskInstances
def setContainerListener(listener: SamzaContainerListener): Unit = {
containerListener = listener
}
def run {
try {
info("Starting container.")
val startTime = System.nanoTime()
status = SamzaContainerStatus.STARTING
jmxServer = new JmxServer()
startMetrics
startAdmins
startOffsetManager
startLocalityManager
startStores
startTableManager
startDiskSpaceMonitor
startHostStatisticsMonitor
startProducers
startTask
startConsumers
startSecurityManger
addShutdownHook
info("Entering run loop.")
status = SamzaContainerStatus.STARTED
if (containerListener != null) {
containerListener.onContainerStart()
}
metrics.containerStartupTime.update(System.nanoTime() - startTime)
runLoop.run
} catch {
case e: Throwable =>
if (status.equals(SamzaContainerStatus.STARTED)) {
error("Caught exception/error in run loop.", e)
} else {
error("Caught exception/error while initializing container.", e)
}
status = SamzaContainerStatus.FAILED
exceptionSeen = e
}
try {
info("Shutting down.")
removeShutdownHook
jmxServer.stop
shutdownConsumers
shutdownTask
shutdownTableManager
shutdownStores
shutdownDiskSpaceMonitor
shutdownHostStatisticsMonitor
shutdownProducers
shutdownLocalityManager
shutdownOffsetManager
shutdownMetrics
shutdownSecurityManger
shutdownAdmins
if (!status.equals(SamzaContainerStatus.FAILED)) {
status = SamzaContainerStatus.STOPPED
}
info("Shutdown complete.")
} catch {
case e: Throwable =>
error("Caught exception/error while shutting down container.", e)
if (exceptionSeen == null) {
exceptionSeen = e
}
status = SamzaContainerStatus.FAILED
}
status match {
case SamzaContainerStatus.STOPPED =>
if (containerListener != null) {
containerListener.onContainerStop(paused)
}
case SamzaContainerStatus.FAILED =>
if (containerListener != null) {
containerListener.onContainerFailed(exceptionSeen)
}
}
}
// TODO: We want to introduce a "PAUSED" state for SamzaContainer in the future so that StreamProcessor can pause and
// unpause the container when the jobmodel changes.
/**
* Marks the [[SamzaContainer]] as being paused by the called due to a change in [[JobModel]] and then, asynchronously
* shuts down this [[SamzaContainer]]
*/
def pause(): Unit = {
paused = true
shutdown()
}
/**
* <p>
* Asynchronously shuts down this [[SamzaContainer]]
* </p>
* <br>
* <b>Implementation</b>: Stops the [[RunLoop]], which will eventually transition the container from
* [[SamzaContainerStatus.STARTED]] to either [[SamzaContainerStatus.STOPPED]] or [[SamzaContainerStatus.FAILED]]].
* Based on the final `status`, [[SamzaContainerListener#onContainerStop(boolean)]] or
* [[SamzaContainerListener#onContainerFailed(Throwable)]] will be invoked respectively.
*
* @throws SamzaException, Thrown when the container has already been stopped or failed
*/
def shutdown(): Unit = {
if (status == SamzaContainerStatus.STOPPED || status == SamzaContainerStatus.FAILED) {
throw new IllegalContainerStateException("Cannot shutdown a container with status " + status)
}
shutdownRunLoop()
}
// Shutdown Runloop
def shutdownRunLoop() = {
runLoop match {
case runLoop: RunLoop => runLoop.shutdown
case asyncRunLoop: AsyncRunLoop => asyncRunLoop.shutdown()
}
}
def startDiskSpaceMonitor: Unit = {
if (diskSpaceMonitor != null) {
info("Starting disk space monitor")
diskSpaceMonitor.start()
}
}
def startHostStatisticsMonitor: Unit = {
if (hostStatisticsMonitor != null) {
info("Starting host statistics monitor")
hostStatisticsMonitor.start()
}
}
def startMetrics {
info("Registering task instances with metrics.")
taskInstances.values.foreach(_.registerMetrics)
info("Starting JVM metrics.")
if (jvm != null) {
jvm.start
}
info("Starting metrics reporters.")
reporters.values.foreach(reporter => {
reporter.register(metrics.source, metrics.registry)
reporter.start
})
}
def startOffsetManager {
info("Registering task instances with offsets.")
taskInstances.values.foreach(_.registerOffsets)
info("Starting offset manager.")
offsetManager.start
}
def startLocalityManager {
if(localityManager != null) {
if(coordinatorStreamManager == null) {
// This should never happen.
throw new IllegalStateException("Cannot start LocalityManager without a CoordinatorStreamManager")
}
val containerName = "SamzaContainer-" + String.valueOf(containerContext.id)
info("Registering %s with the coordinator stream manager." format containerName)
coordinatorStreamManager.start
coordinatorStreamManager.register(containerName)
info("Writing container locality and JMX address to Coordinator Stream")
try {
val hostInet = Util.getLocalHost
val jmxUrl = if (jmxServer != null) jmxServer.getJmxUrl else ""
val jmxTunnelingUrl = if (jmxServer != null) jmxServer.getTunnelingJmxUrl else ""
localityManager.writeContainerToHostMapping(containerContext.id, hostInet.getHostName, jmxUrl, jmxTunnelingUrl)
} catch {
case uhe: UnknownHostException =>
warn("Received UnknownHostException when persisting locality info for container %s: " +
"%s" format (containerContext.id, uhe.getMessage)) //No-op
case unknownException: Throwable =>
warn("Received an exception when persisting locality info for container %s: " +
"%s" format (containerContext.id, unknownException.getMessage))
}
}
}
def startStores {
taskInstances.values.foreach(taskInstance => {
val startTime = System.currentTimeMillis()
info("Starting stores in task instance %s" format taskInstance.taskName)
taskInstance.startStores
// Measuring the time to restore the stores
val timeToRestore = System.currentTimeMillis() - startTime
val taskGauge = metrics.taskStoreRestorationMetrics.asScala.getOrElse(taskInstance.taskName, null)
if (taskGauge != null) {
taskGauge.set(timeToRestore)
}
})
}
def startTableManager: Unit = {
taskInstances.values.foreach(taskInstance => {
info("Starting table manager in task instance %s" format taskInstance.taskName)
taskInstance.startTableManager
})
}
def startTask {
info("Initializing stream tasks.")
taskInstances.values.foreach(_.initTask)
}
def startAdmins {
info("Starting admin multiplexer.")
systemAdmins.start
}
def startProducers {
info("Registering task instances with producers.")
taskInstances.values.foreach(_.registerProducers)
info("Starting producer multiplexer.")
producerMultiplexer.start
}
def startConsumers {
info("Registering task instances with consumers.")
taskInstances.values.foreach(_.registerConsumers)
info("Starting consumer multiplexer.")
consumerMultiplexer.start
}
def startSecurityManger {
if (securityManager != null) {
info("Starting security manager.")
securityManager.start
}
}
def addShutdownHook {
val runLoopThread = Thread.currentThread()
shutdownHookThread = new Thread("Samza Container Shutdown Hook Thread") {
override def run() = {
info("Shutting down, will wait up to %s ms." format shutdownMs)
shutdownRunLoop() //TODO: Pull out shutdown hook to LocalContainerRunner or SP
try {
runLoopThread.join(shutdownMs)
} catch {
case e: Throwable => // Ignore to avoid deadlock with uncaughtExceptionHandler. See SAMZA-1220
error("Did not shut down within %s ms, exiting." format shutdownMs, e)
}
if (!runLoopThread.isAlive) {
info("Shutdown complete")
} else {
error("Did not shut down within %s ms, exiting." format shutdownMs)
Util.logThreadDump("Thread dump from Samza Container Shutdown Hook.")
}
}
}
Runtime.getRuntime().addShutdownHook(shutdownHookThread)
}
def removeShutdownHook = {
try {
if (shutdownHookThread != null) {
Runtime.getRuntime.removeShutdownHook(shutdownHookThread)
}
} catch {
case e: IllegalStateException => {
// Thrown when then JVM is already shutting down, so safe to ignore.
}
}
}
def shutdownConsumers {
info("Shutting down consumer multiplexer.")
consumerMultiplexer.stop
}
def shutdownAdmins {
info("Shutting down admin multiplexer.")
systemAdmins.stop
}
def shutdownProducers {
info("Shutting down producer multiplexer.")
producerMultiplexer.stop
}
def shutdownTask {
info("Shutting down task instance stream tasks.")
if (taskThreadPool != null) {
info("Shutting down task thread pool")
try {
taskThreadPool.shutdown()
if(taskThreadPool.awaitTermination(shutdownMs, TimeUnit.MILLISECONDS)) {
taskThreadPool.shutdownNow()
}
} catch {
case e: Exception => error(e.getMessage, e)
}
}
if (timerExecutor != null) {
info("Shutting down timer executor")
try {
timerExecutor.shutdown()
if (timerExecutor.awaitTermination(shutdownMs, TimeUnit.MILLISECONDS)) {
timerExecutor.shutdownNow()
}
} catch {
case e: Exception => error("Ignoring exception shutting down timer executor", e)
}
}
if (isAutoCommitEnabled) {
info("Committing offsets for all task instances")
taskInstances.values.foreach(_.commit)
}
taskInstances.values.foreach(_.shutdownTask)
}
def shutdownStores {
info("Shutting down task instance stores.")
taskInstances.values.foreach(_.shutdownStores)
}
def shutdownTableManager: Unit = {
info("Shutting down task instance table manager.")
taskInstances.values.foreach(_.shutdownTableManager)
}
def shutdownLocalityManager {
if(coordinatorStreamManager != null) {
info("Shutting down coordinator stream manager used by locality manager.")
coordinatorStreamManager.stop
}
}
def shutdownOffsetManager {
info("Shutting down offset manager.")
offsetManager.stop
}
def shutdownMetrics {
info("Shutting down metrics reporters.")
reporters.values.foreach(_.stop)
if (jvm != null) {
info("Shutting down JVM metrics.")
jvm.stop
}
}
def shutdownSecurityManger: Unit = {
if (securityManager != null) {
info("Shutting down security manager.")
securityManager.stop
}
}
def shutdownDiskSpaceMonitor: Unit = {
if (diskSpaceMonitor != null) {
info("Shutting down disk space monitor.")
diskSpaceMonitor.stop()
}
}
def shutdownHostStatisticsMonitor: Unit = {
if (hostStatisticsMonitor != null) {
info("Shutting down host statistics monitor.")
hostStatisticsMonitor.stop()
}
}
}
/**
* Exception thrown when the SamzaContainer tries to transition to an illegal state.
* {@link SamzaContainerStatus} has more details on the state transitions.
*
* @param s String, Message associated with the exception
* @param t Throwable, Wrapped error/exception thrown, if any.
*/
class IllegalContainerStateException(s: String, t: Throwable) extends SamzaException(s, t) {
def this(s: String) = this(s, null)
}
|
TiVo/samza
|
samza-core/src/main/scala/org/apache/samza/container/SamzaContainer.scala
|
Scala
|
apache-2.0
| 40,036
|
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.onlinetechvision.spark.hazelcast.connector.rdd
import java.util.Properties
import com.hazelcast.core._
import com.onlinetechvision.spark.hazelcast.connector.config.SparkHazelcastService
import com.onlinetechvision.spark.hazelcast.connector.validator.SparkHazelcastValidator._
import org.apache.spark.rdd.RDD
/**
* Created by eren.avsarogullari on 2/8/16.
*/
package object implicits {
protected trait HazelcastWriter[T] extends Serializable {
protected val rdd: RDD[T]
protected def write(iterator: Iterator[T], distributedObject: DistributedObject)
protected def writeToHazelcast(properties: Properties) {
rdd.sparkContext.runJob(rdd, ((iterator: Iterator[T]) => new HazelcastTask(properties).execute(iterator)))
}
private class HazelcastTask(properties: Properties) extends Serializable {
def execute(iterator: Iterator[T]) {
val sparkHazelcastData = SparkHazelcastService.getSparkHazelcastData(properties)
write(iterator, sparkHazelcastData.getDistributedObject())
}
}
}
implicit class HazelcastItemWriter[T](receivedRDD: RDD[T]) extends HazelcastWriter[T] {
override val rdd: RDD[T] = receivedRDD
override protected def write(iterator: Iterator[T], distributedObject: DistributedObject) {
distributedObject match {
case hzList: IList[T] => iterator.foreach(item => hzList.add(item))
case hzSet: ISet[T] => iterator.foreach(item => hzSet.add(item))
case hzQueue: IQueue[T] => iterator.foreach(item => hzQueue.add(item))
case _ => throw new IllegalStateException("Unexpected Distributed Object Type Found!")
}
}
def writeItemToHazelcast(properties: Properties) {
validateProperties(properties)
writeToHazelcast(properties)
}
}
implicit class HazelcastMessageWriter[T](receivedRDD: RDD[T]) extends HazelcastWriter[T] {
override val rdd: RDD[T] = receivedRDD
override protected def write(iterator: Iterator[T], distributedObject: DistributedObject) {
distributedObject match {
case hzTopic: ITopic[T] => iterator.foreach(message => hzTopic.publish(message))
case _ => throw new IllegalStateException("Unexpected Distributed Object Type Found!")
}
}
def writeMessageToHazelcast(properties: Properties) {
validateProperties(properties)
writeToHazelcast(properties)
}
}
implicit class HazelcastEntryWriter[K, V](receivedRDD: RDD[(K, V)]) extends HazelcastWriter[(K, V)] {
override val rdd: RDD[(K, V)] = receivedRDD
override protected def write(iterator: Iterator[(K, V)], distributedObject: DistributedObject) {
distributedObject match {
case hzMap: IMap[K, V] => iterator.foreach(tuple => hzMap.put(tuple._1, tuple._2))
case hzMultiMap: MultiMap[K, V] => iterator.foreach(tuple => hzMultiMap.put(tuple._1, tuple._2))
case hzReplicatedMap: ReplicatedMap[K, V] => iterator.foreach(tuple => hzReplicatedMap.put(tuple._1, tuple._2))
case _ => throw new IllegalStateException("Unexpected Distributed Object Type Found!")
}
}
def writeEntryToHazelcast(properties: Properties) {
validateProperties(properties)
writeToHazelcast(properties)
}
}
}
|
erenavsarogullari/spark-hazelcast-connector
|
src/main/scala/com/onlinetechvision/spark/hazelcast/connector/rdd/implicits/package.scala
|
Scala
|
apache-2.0
| 3,879
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.plugins.transformer
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.FlatSpec
import de.fuberlin.wiwiss.silk.plugins.Plugins
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import de.fuberlin.wiwiss.silk.plugins.transformer.normalize.RemoveSpecialCharsTransformer
@RunWith(classOf[JUnitRunner])
class RemoveSpecialCharsTransformerTest extends FlatSpec with ShouldMatchers {
Plugins.register()
val transformer = new RemoveSpecialCharsTransformer()
"RemoveSpecialCharsTransformer" should "return 'abc'" in {
transformer.evaluate("a.b.c-") should equal("abc")
}
}
|
fusepoolP3/p3-silk
|
silk-core/src/test/scala/de/fuberlin/wiwiss/silk/plugins/transformer/RemoveSpecialCharsTransformerTest.scala
|
Scala
|
apache-2.0
| 1,213
|
package com.productfoundry.akka.cqrs.process
/**
* Exception indicating an internal problem with the process, probably caused by a programming error.
*/
case class ProcessManagerInternalException(message: String)
extends ProcessManagerException(message)
|
odd/akka-cqrs
|
core/src/main/scala/com/productfoundry/akka/cqrs/process/ProcessManagerInternalException.scala
|
Scala
|
apache-2.0
| 259
|
/*
* Copyright 2013-2014 IQ TECH <http://www.iqtech.pl>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.abyss.test.graph
import akka.actor.ActorSystem
import akka.util.Timeout
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
/**
* Created by cane, 12/1/13 12:15 PM
* $Id: PersistenceTestBase.scala,v 1.2 2013-12-31 21:09:28 cane Exp $
*/
trait PersistenceTestBase {
System.setProperty ("config.resource", "/persistence-test.conf")
implicit val ec = ExecutionContext.global
implicit val system = ActorSystem("test")
def awaitTermination(s: Int) = try {
println("Awaiting termination...")
val timeout = Timeout(s seconds)
val duration = timeout.duration
system.awaitTermination(duration)
} catch {
case e: Throwable =>
println("EOT -> %s" format e.getMessage)
e.printStackTrace()
}
}
|
iqtech/abyss
|
abyss-graph/src/test/scala/io/abyss/test/graph/PersistenceTestBase.scala
|
Scala
|
apache-2.0
| 1,367
|
import sbt._
import Keys._
import java.net.URL
object ScalaWurfl extends Build {
val project = (Project("scalawurfl", file(".")) settings(
organization := "org.scalawurfl",
name := "scala-wurfl",
version := "1.0-SNAPSHOT",
scalaVersion := "2.10.2",
crossScalaVersions := Seq("2.10.0", "2.10.2"),
licenses := Seq("Apache License, Version 2.0"->new URL("http://www.apache.org/licenses/LICENSE-2.0.html")),
libraryDependencies ++= dependencies,
autoCompilerPlugins := true
) settings(publishSettings:_*))
def publishSettings: Seq[Setting[_]] = Seq(
// If we want on maven central, we need to be in maven style.
publishMavenStyle := true,
publishArtifact in Test := false,
// The Nexus repo we're publishing to.
publishTo <<= version { (v: String) =>
val nexus = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT")) {
Some("snapshots" at nexus + "content/repositories/snapshots")
}
else {
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
},
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
// Maven central cannot allow other repos. We're ok here because the artifacts we
// we use externally are *optional* dependencies.
pomIncludeRepository := { x => false },
// Maven central wants some extra metadata to keep things 'clean'.
pomExtra := (
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>git@github.com:filosganga/scala-wurfl.git</url>
<connection>scm:git:git@github.com:filosganga/scala-wurfl.git</connection>
</scm>
<developers>
<developer>
<id>filosganga</id>
<name>Filippo De Luca</name>
<url>http://filippodeluca.com</url>
</developer>
</developers>)
)
def dependencies = Seq(
"com.typesafe" % "config" % "1.0.0",
"com.jsuereth" %% "scala-arm" % "1.3",
"org.specs2" %% "specs2" % "1.14" % "test",
"org.mockito" % "mockito-all" % "1.9.0" % "test"
)
}
|
filosganga/scala-wurfl
|
project/Build.scala
|
Scala
|
apache-2.0
| 2,304
|
package com.overviewdocs.blobstorage
import akka.stream.scaladsl.{Sink,Source}
import akka.util.ByteString
import java.io.{ File, IOException, InputStream }
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path }
import scala.concurrent.Future
import com.overviewdocs.test.ActorSystemContext
class FileStrategySpec extends StrategySpecification {
trait FileBaseScope extends BaseScope {
val tmpDir: Path = Files.createTempDirectory("overview-file-strategy-spec")
def rimraf(path: Path): Unit = {
import java.nio.file._
import java.nio.file.attribute.BasicFileAttributes
val deleteVisitor = new SimpleFileVisitor[Path] {
override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
Files.delete(file)
FileVisitResult.CONTINUE
}
override def postVisitDirectory(file: Path, ioe: IOException): FileVisitResult = {
Files.delete(file)
FileVisitResult.CONTINUE
}
}
Files.walkFileTree(path, deleteVisitor)
}
override def after = {
if (tmpDir.toFile.exists) { // We delete it before this sometimes
rimraf(tmpDir)
}
super.after
}
val mockConfig = mock[BlobStorageConfig]
mockConfig.fileBaseDirectory returns tmpDir.toString
object TestFileStrategy extends FileStrategy {
override val config = mockConfig
}
def invalidLocationThrowsException[T](f: String => T) = {
(f("fil:BUCKET:KEY") must throwA[IllegalArgumentException]) and
(f("file::key") must throwA[IllegalArgumentException]) and
(f("file:bucket:") must throwA[IllegalArgumentException])
}
// Create bucket1
val bucket = "bucket1"
val bucketFile = new File(tmpDir.toString, bucket)
bucketFile.mkdir()
}
trait ExistingFileScope extends FileBaseScope with ActorSystemContext {
// Create key1
val key = "key1"
val keyFile = new File(bucketFile.toString, key)
Files.write(keyFile.toPath, "data1".getBytes("utf-8"))
def readSource(s: Source[ByteString, akka.NotUsed]): Array[Byte] = {
val sink = Sink.fold[ByteString, ByteString](ByteString.empty)(_ ++ _)
val futureByteString: Future[ByteString] = s.runWith(sink)
await(futureByteString).toArray
}
}
"#get" should {
"throw an exception when get location does not look like file:BUCKET:KEY" in new ExistingFileScope {
invalidLocationThrowsException(TestFileStrategy.get)
}
"throw a delayed exception when the key does not exist in the bucket which does" in new ExistingFileScope {
val source = TestFileStrategy.get(s"file:$bucket:x$key")
readSource(source) must throwA[IOException]
}
"throw a delayed exception when the bucket does not exist" in new ExistingFileScope {
val source = TestFileStrategy.get(s"file:x$bucket:$key")
readSource(source) must throwA[IOException]
}
"throw a delayed exception when the base directory does not exist" in new ExistingFileScope {
rimraf(tmpDir)
val source = TestFileStrategy.get(s"file:$bucket:$key")
readSource(source) must throwA[IOException]
}
"stream the file" in new ExistingFileScope {
val source = TestFileStrategy.get(s"file:$bucket:$key")
val byteArray = readSource(source)
new String(byteArray, "utf-8") must beEqualTo("data1")
}
}
"#getUrl" should {
"throw an exception when delete location does not look like file:BUCKET:KEY" in new ExistingFileScope {
(TestFileStrategy.getUrl("fil:BUCKET:KEY", "image/png") must throwA[IllegalArgumentException]) and
(TestFileStrategy.getUrl("file::key", "image/png") must throwA[IllegalArgumentException]) and
(TestFileStrategy.getUrl("file:bucket:", "image/png") must throwA[IllegalArgumentException])
}
"throw a delayed exception when the key does not exist in the bucket which does" in new ExistingFileScope {
val future = TestFileStrategy.getUrl(s"file:$bucket:x$key", "image/png")
await(future) must throwA[IOException]
}
"throw a delayed exception when the bucket does not exist" in new ExistingFileScope {
val future = TestFileStrategy.getUrl(s"file:x$bucket:$key", "image/png")
await(future) must throwA[IOException]
}
"throw a delayed exception when the base directory does not exist" in new ExistingFileScope {
rimraf(tmpDir)
val future = TestFileStrategy.getUrl(s"file:$bucket:$key", "image/png")
await(future) must throwA[IOException]
}
"create a data: URL" in new ExistingFileScope {
val future = TestFileStrategy.getUrl(s"file:$bucket:$key", "image/png")
await(future) must beEqualTo("data:image/png;base64,ZGF0YTE=")
}
}
"#delete" should {
"throw an exception when delete location does not look like file:BUCKET:KEY" in new ExistingFileScope {
invalidLocationThrowsException(TestFileStrategy.delete)
}
"succeed when the key does not exist in the bucket which does" in new ExistingFileScope {
val future = TestFileStrategy.delete(s"file:$bucket:x$key")
await(future) must beEqualTo(())
}
"succeed when the bucket does not exist" in new ExistingFileScope {
val future = TestFileStrategy.delete(s"file:x$bucket:$key")
await(future) must beEqualTo(())
}
"succeed when the base directory does not exist" in new ExistingFileScope {
rimraf(tmpDir)
val future = TestFileStrategy.delete(s"file:$bucket:$key")
await(future) must beEqualTo(())
}
"delete the file" in new ExistingFileScope {
val future = TestFileStrategy.delete(s"file:$bucket:$key")
await(future) must beEqualTo(())
keyFile.exists must beFalse
}
}
"#create" should {
trait CreateScope extends FileBaseScope {
val locationRegex = s"^file:$bucket:([-\\w]+)$$".r
val toCreate: Path = tempFile("foo")
def fileAtLocation(location: String): Path = location match {
case locationRegex(key) => new File(bucketFile, key).toPath
}
}
"throw an exception when create location does not look like file:BUCKET:KEY" in new CreateScope {
invalidLocationThrowsException(TestFileStrategy.create(_, toCreate))
}
"return location" in new CreateScope {
val future = TestFileStrategy.create(s"file:$bucket", toCreate)
val location = await(future)
location must beMatching(locationRegex)
}
"write to the file" in new CreateScope {
val location = await(TestFileStrategy.create(s"file:$bucket", toCreate))
val path = fileAtLocation(location)
path.toFile.exists must beTrue
new String(Files.readAllBytes(path), "utf-8") must beEqualTo("foo")
path must not(beEqualTo(toCreate))
}
"create any missing directories in path" in new CreateScope {
rimraf(tmpDir)
val future = TestFileStrategy.create(s"file:$bucket", toCreate)
val location = await(future)
fileAtLocation(location).toFile.exists must beTrue
}
}
}
|
overview/overview-server
|
common/src/test/scala/com/overviewdocs/blobstorage/FileStrategySpec.scala
|
Scala
|
agpl-3.0
| 7,071
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.