code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package neilw4.c4scala.controller import neilw4.c4scala.state._ /** Controls the internal logic of the application. */ class Controller(state: State) extends StateListener { state.attachListener(this) val loggingListener = new LoggingListener state.attachListener(loggingListener) // The AI task being run if it exists. var asyncAi: Option[AsyncAi] = None def makeMove(col: Int) = { state.board.stopAiThinking() if (state.board.winner.isEmpty) { state.board.add(col) state.board.checkWinner(col) } } def onDestroy() = { state.removeListener(this) state.removeListener(loggingListener) } /** If the difficulty changes while the AI is playing, restart the AI. */ override def onDifficultyChanged(difficulty: Int) = if (state.board.aiThinking) state.board.startAiThinking() /** If the current player has changed between human and AI, stop/start the AI. */ override def onPlayerAiChanged(player: Player, isAi: Boolean) = (player == state.board.nextPlayer, isAi) match { case (true, true) => state.board.startAiThinking() case (true, false) => state.board.stopAiThinking() case _ => } /** When a move is made, check if the AI should play. */ override def onBoardPieceChanged(x: Int, y: Int) = if (state.playerAi(state.board.nextPlayer)) state.board.startAiThinking() /** Starts an AsyncTask for the AI to run. */ override def onStartAiThinking() = { if (state.board.winner.isEmpty && state.playerAi(state.board.nextPlayer)) { val task = new NativeAi(state.board, this) asyncAi = Some(task) task.execute(state.difficulty) } } /** Stops the AsyncTask the AI is using. */ override def onStopAiThinking() = { asyncAi.foreach(_.cancel(true)) asyncAi = None } override def onGameEnd(winner: Winner) = state.board.stopAiThinking() }
neilw4/connect4scala
src/main/scala/neilw4/c4scala/controller/Controller.scala
Scala
mit
1,979
package io.getquill.ast sealed trait Operator sealed trait UnaryOperator extends Operator sealed trait PrefixUnaryOperator extends UnaryOperator sealed trait PostfixUnaryOperator extends UnaryOperator sealed trait BinaryOperator extends Operator object EqualityOperator { case object `==` extends BinaryOperator case object `!=` extends BinaryOperator } object BooleanOperator { case object `!` extends PrefixUnaryOperator case object `&&` extends BinaryOperator case object `||` extends BinaryOperator } object StringOperator { case object `+` extends BinaryOperator case object `toUpperCase` extends PostfixUnaryOperator case object `toLowerCase` extends PostfixUnaryOperator case object `toLong` extends PostfixUnaryOperator case object `toInt` extends PostfixUnaryOperator } object NumericOperator { case object `-` extends BinaryOperator with PrefixUnaryOperator case object `+` extends BinaryOperator case object `*` extends BinaryOperator case object `>` extends BinaryOperator case object `>=` extends BinaryOperator case object `<` extends BinaryOperator case object `<=` extends BinaryOperator case object `/` extends BinaryOperator case object `%` extends BinaryOperator } object SetOperator { case object `contains` extends BinaryOperator case object `nonEmpty` extends PostfixUnaryOperator case object `isEmpty` extends PostfixUnaryOperator } sealed trait AggregationOperator extends Operator object AggregationOperator { case object `min` extends AggregationOperator case object `max` extends AggregationOperator case object `avg` extends AggregationOperator case object `sum` extends AggregationOperator case object `size` extends AggregationOperator }
jcranky/quill
quill-core/src/main/scala/io/getquill/ast/Operator.scala
Scala
apache-2.0
1,731
package thistle.examples.webevent trait WebEvent { val referrer: String val url: String val pageId: Long def name = this.getClass.getName.split("\\\\.").last } trait ListingsDisplay { val listingIds: Seq[Long] } trait HasShop { val shopId: Long } case class SearchEvent( referrer: String, url: String, pageId: Long, query: String, listingIds: Seq[Long]) extends WebEvent with ListingsDisplay{} case class ListingEvent( referrer: String, url: String, pageId: Long, listingId: Long, shopId: Long) extends WebEvent with HasShop{} case class ShopEvent( referrer: String, url: String, pageId: Long, shopId: Long) extends WebEvent with HasShop{} case class PurchaseEvent( referrer: String, url: String, pageId: Long, listingIds: Seq[Long]) extends WebEvent with ListingsDisplay{} case class DefaultWebEvent( referrer: String, url: String, pageId: Long) extends WebEvent {}
smarden1/thistle
src/main/scala/thistle/examples/webevent/WebEvent.scala
Scala
mit
931
package com.wavesplatform.it.sync.smartcontract.smartasset import scala.concurrent.duration._ import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.it.api.SyncHttpApi._ import com.wavesplatform.it.sync._ import com.wavesplatform.it.transactions.BaseTransactionSuite import com.wavesplatform.lang.v1.estimator.v2.ScriptEstimatorV2 import com.wavesplatform.state.IntegerDataEntry import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves} import com.wavesplatform.transaction.smart.script.ScriptCompiler import com.wavesplatform.transaction.transfer.MassTransferTransaction.Transfer import com.wavesplatform.transaction.transfer.TransferTransaction class AssetSupportedTransactionsSuite extends BaseTransactionSuite { private val estimator = ScriptEstimatorV2 var asset = "" protected override def beforeAll(): Unit = { super.beforeAll() asset = sender .issue( firstKeyPair, "MyAsset", "Test Asset", someAssetAmount, 0, reissuable = true, issueFee, 2, Some(scriptBase64), waitForTx = true ) .id } test("transfer verification with asset script") { val firstAssetBalance = sender.assetBalance(firstAddress, asset).balance val secondAssetBalance = sender.assetBalance(secondAddress, asset).balance val thirdAssetBalance = sender.assetBalance(thirdAddress, asset).balance sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(asset), waitForTx = true) miner.assertAssetBalance(firstAddress, asset, firstAssetBalance - 100) miner.assertAssetBalance(secondAddress, asset, secondAssetBalance + 100) sender.transfer(secondKeyPair, thirdAddress, 100, smartMinFee, Some(asset), waitForTx = true) //deprecate transfers with amount > 99 val scr = ScriptCompiler( s""" |match tx { | case _ : SetAssetScriptTransaction => true | case t: TransferTransaction => t.amount <= 99 | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) assertApiError(sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) assertApiError(sender.transfer(thirdKeyPair, secondAddress, 100, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) sender.transfer(thirdKeyPair, secondAddress, 99, smartMinFee, Some(asset), waitForTx = true) miner.assertAssetBalance(secondAddress, asset, secondAssetBalance + 99) miner.assertAssetBalance(thirdAddress, asset, thirdAssetBalance + 1) } test("transfer goes only to addresses from list (white or black)") { val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case t: TransferTransaction => t.recipient == addressFromPublicKey(base58'${secondKeyPair.publicKey}') | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(asset), waitForTx = true) assertApiError(sender.transfer(firstKeyPair, thirdAddress, 100, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) assertApiError(sender.transfer(firstKeyPair, firstAddress, 1, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) val scr1 = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case t: TransferTransaction => t.recipient != addressFromPublicKey(base58'${secondKeyPair.publicKey}') && t.recipient != addressFromPublicKey(base58'${firstKeyPair.publicKey}') | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr1), waitForTx = true) sender.transfer(firstKeyPair, thirdAddress, 100, smartMinFee, Some(asset), waitForTx = true) assertApiError(sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) assertApiError(sender.transfer(firstKeyPair, firstAddress, 1, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) } test("smart asset requires fee in other asset") { val feeAsset = sender .issue(firstKeyPair, "FeeAsset", "Asset for fee of Smart Asset", someAssetAmount, 2, reissuable = false, issueFee, waitForTx = true) .id sender.sponsorAsset(firstKeyPair, feeAsset, baseFee = 2, fee = sponsorReducedFee + smartFee, waitForTx = true) val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case t: TransferTransaction => t.feeAssetId == base58'$feeAsset' | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) assertApiError(sender.transfer(firstKeyPair, thirdAddress, 100, 2, Some(asset), feeAssetId = Some(feeAsset))) { error => error.message should include("does not exceed minimal value") } sender.transfer(firstKeyPair, thirdAddress, 100, 10, Some(asset), feeAssetId = Some(feeAsset), waitForTx = true) assertApiError(sender.transfer(firstKeyPair, firstAddress, 1, smartMinFee, Some(asset)), errNotAllowedByTokenApiError) } test("token that can be only transferred with the issuer's permission - black label") { val blackAsset = sender .issue( firstKeyPair, "BlackAsset", "Test Asset", someAssetAmount, 0, reissuable = false, issueFee, 2, Some(scriptBase64), waitForTx = true ) .id sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(blackAsset), waitForTx = true) val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case t: TransferTransaction => let issuer = extract(addressFromString("$firstAddress")) | isDefined(getInteger(issuer,toBase58String(t.id))) == true | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(blackAsset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) val blackTx = TransferTransaction.selfSigned( 2.toByte, secondKeyPair, thirdKeyPair.toAddress, IssuedAsset(ByteStr.decodeBase58(blackAsset).get), 1, Waves, smartMinFee, ByteStr.empty, System.currentTimeMillis + 1.minutes.toMillis ) .explicitGet() val incorrectTx = TransferTransaction .selfSigned( 2.toByte, secondKeyPair, thirdKeyPair.toAddress, IssuedAsset(ByteStr.decodeBase58(blackAsset).get), 1, Waves, smartMinFee, ByteStr.empty, System.currentTimeMillis + 10.minutes.toMillis ) .explicitGet() val dataTx = sender.putData(firstKeyPair, List(IntegerDataEntry(s"${blackTx.id()}", 42)), minFee).id nodes.waitForHeightAriseAndTxPresent(dataTx) sender.signedBroadcast(blackTx.json(), waitForTx = true) assertApiError(sender.signedBroadcast(incorrectTx.json()), errNotAllowedByTokenApiError) } test("burner is from the list (white or black)") { sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scriptBase64), waitForTx = true) sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(asset), waitForTx = true) sender.transfer(firstKeyPair, thirdAddress, 100, smartMinFee, Some(asset), waitForTx = true) val scr = ScriptCompiler( s""" |match tx { | case _ : SetAssetScriptTransaction => true | case b: BurnTransaction => b.sender == addressFromPublicKey(base58'${secondKeyPair.publicKey}') | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) sender.burn(secondKeyPair, asset, 10, smartMinFee, waitForTx = true) assertApiError(sender.burn(firstKeyPair, asset, 10, smartMinFee), errNotAllowedByTokenApiError) val scr1 = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case b: BurnTransaction => b.sender != addressFromPublicKey(base58'${secondKeyPair.publicKey}') | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr1), waitForTx = true) sender.burn(thirdKeyPair, asset, 10, smartMinFee, waitForTx = true) sender.burn(firstKeyPair, asset, 10, smartMinFee, waitForTx = true) assertApiError(sender.burn(secondKeyPair, asset, 10, smartMinFee), errNotAllowedByTokenApiError) } ignore("burn by some height") { val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case _: BurnTransaction => height % 2 == 0 | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) if (nodes.map(_.height).max % 2 != 0) nodes.waitForHeightArise() sender.burn(firstKeyPair, asset, 10, smartMinFee, waitForTx = true) if (nodes.map(_.height).max % 2 == 0) { nodes.waitForHeightArise() } assertApiError(sender.burn(firstKeyPair, asset, 10, smartMinFee), errNotAllowedByTokenApiError) } test("unburnable asset") { val unBurnable = sender .issue( firstKeyPair, "Unburnable", "Test Asset", someAssetAmount, 0, reissuable = false, issueFee, 2, Some( ScriptCompiler( s""" |match tx { | case _: BurnTransaction => false | case _ => true |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 ), waitForTx = true ) .id assertApiError(sender.burn(firstKeyPair, unBurnable, 10, smartMinFee).id, errNotAllowedByTokenApiError) } test("masstransfer - taxation") { val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case m: MassTransferTransaction => | let twoTransfers = size(m.transfers) == 2 | let issuerIsRecipient = m.transfers[0].recipient == addressFromString("$firstAddress") | let taxesPaid = m.transfers[0].amount >= m.transfers[1].amount / 10 | twoTransfers && issuerIsRecipient && taxesPaid | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) val transfers = List(Transfer(firstAddress, 10), Transfer(secondAddress, 100)) val massTransferFee = calcMassTransferFee(transfers.size) sender.massTransfer(firstKeyPair, transfers, massTransferFee + smartFee, assetId = Some(asset), waitForTx = true) val transfers2 = List(Transfer(firstAddress, 9), Transfer(secondAddress, 100)) assertApiError(sender.massTransfer(firstKeyPair, transfers2, massTransferFee + smartFee, assetId = Some(asset)), errNotAllowedByTokenApiError) } test("masstransfer - transferCount <=2") { val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case m: MassTransferTransaction => | m.transferCount <= 2 | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) val transfers = List(Transfer(firstAddress, 10), Transfer(secondAddress, 100), Transfer(firstAddress, 10)) val massTransferTransactionFee = calcMassTransferFee(transfers.size) assertApiError( sender.massTransfer(firstKeyPair, transfers, massTransferTransactionFee + smartFee, assetId = Some(asset)), errNotAllowedByTokenApiError ) } test("reissue by non-issuer") { sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scriptBase64), waitForTx = true) assertApiError(sender.reissue(secondKeyPair, asset, someAssetAmount, reissuable = true, fee = issueFee + smartFee)) { error => error.message should include("Reason: Asset was issued by other address") } val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case r: ReissueTransaction => r.sender == addressFromPublicKey(base58'${secondKeyPair.publicKey}') | case _ => false |} """.stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(asset, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) assertApiError(sender.reissue(secondKeyPair, asset, someAssetAmount, reissuable = true, fee = issueFee + smartFee)) { error => error.message should include("Reason: Asset was issued by other address") } } test("reissue by issuer and non-issuer non-re issuable smart asset ") { val assetNonReissue = sender .issue( firstKeyPair, "MyAsset", "Test Asset", someAssetAmount, 0, reissuable = false, issueFee, 2, Some(scriptBase64), waitForTx = true ) .id val scr = ScriptCompiler( s""" |match tx { | case _: SetAssetScriptTransaction => true | case r: ReissueTransaction => r.sender == addressFromPublicKey(base58'${secondKeyPair.publicKey}') | case _ => false |}""".stripMargin, isAssetScript = true, estimator ).explicitGet()._1.bytes().base64 sender.setAssetScript(assetNonReissue, firstKeyPair, setAssetScriptFee + smartFee, Some(scr), waitForTx = true) assertApiError( sender.reissue(secondKeyPair, assetNonReissue, someAssetAmount, reissuable = true, fee = issueFee + smartFee), AssertiveApiError(112, "State check failed. Reason: Asset was issued by other address") ) assertApiError( sender.reissue(firstKeyPair, assetNonReissue, someAssetAmount, reissuable = true, fee = issueFee + smartFee), AssertiveApiError(112, "State check failed. Reason: Asset is not reissuable") ) } test("try to send transactions forbidden by the asset's script") { val assetWOSupport = sender .issue( firstKeyPair, "assetWOSuppor", "Test coin for SetAssetScript tests", someAssetAmount, 0, reissuable = false, issueFee, 2, script = Some(ScriptCompiler(s"false".stripMargin, isAssetScript = true, estimator).explicitGet()._1.bytes().base64), waitForTx = true ) .id assertApiError(sender.setAssetScript(assetWOSupport, firstKeyPair, setAssetScriptFee, Some(scriptBase64)), errNotAllowedByTokenApiError) assertApiError(sender.transfer(firstKeyPair, secondAddress, 100, smartMinFee, Some(assetWOSupport)), errNotAllowedByTokenApiError) assertApiError(sender.burn(firstKeyPair, assetWOSupport, 10, smartMinFee), errNotAllowedByTokenApiError) assertApiError( sender.reissue(firstKeyPair, assetWOSupport, someAssetAmount, true, issueFee + smartFee), AssertiveApiError(112, "State check failed. Reason: Asset is not reissuable") ) val transfers = List(Transfer(firstAddress, 10)) assertApiError( sender.massTransfer(firstKeyPair, transfers, calcMassTransferFee(transfers.size) + smartFee, assetId = Some(assetWOSupport)), errNotAllowedByTokenApiError ) } }
wavesplatform/Waves
node-it/src/test/scala/com/wavesplatform/it/sync/smartcontract/smartasset/AssetSupportedTransactionsSuite.scala
Scala
mit
16,956
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import org.apache.parquet.column.ColumnDescriptor import org.apache.parquet.io.ParquetDecodingException import org.apache.parquet.schema._ import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName import org.apache.parquet.schema.Type._ import org.apache.spark.SparkException import org.apache.spark.sql.catalyst.ScalaReflection import org.apache.spark.sql.execution.QueryExecutionException import org.apache.spark.sql.execution.datasources.SchemaColumnConvertNotSupportedException import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType._ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ abstract class ParquetSchemaTest extends ParquetTest with SharedSparkSession { /** * Checks whether the reflected Parquet message type for product type `T` conforms `messageType`. */ protected def testSchemaInference[T <: Product: ClassTag: TypeTag]( testName: String, messageType: String, binaryAsString: Boolean, int96AsTimestamp: Boolean, writeLegacyParquetFormat: Boolean, expectedParquetColumn: Option[ParquetColumn] = None): Unit = { testSchema( testName, StructType.fromAttributes(ScalaReflection.attributesFor[T]), messageType, binaryAsString, int96AsTimestamp, writeLegacyParquetFormat, expectedParquetColumn = expectedParquetColumn) } protected def testParquetToCatalyst( testName: String, sqlSchema: StructType, parquetSchema: String, binaryAsString: Boolean, int96AsTimestamp: Boolean, caseSensitive: Boolean = false, sparkReadSchema: Option[StructType] = None, expectedParquetColumn: Option[ParquetColumn] = None): Unit = { val converter = new ParquetToSparkSchemaConverter( assumeBinaryIsString = binaryAsString, assumeInt96IsTimestamp = int96AsTimestamp, caseSensitive = caseSensitive) test(s"sql <= parquet: $testName") { val actualParquetColumn = converter.convertParquetColumn( MessageTypeParser.parseMessageType(parquetSchema), sparkReadSchema) val actual = actualParquetColumn.sparkType val expected = sqlSchema assert( actual === expected, s"""Schema mismatch. |Expected schema: ${expected.json} |Actual schema: ${actual.json} """.stripMargin) if (expectedParquetColumn.isDefined) { compareParquetColumn(actualParquetColumn, expectedParquetColumn.get) } } } protected def testCatalystToParquet( testName: String, sqlSchema: StructType, parquetSchema: String, writeLegacyParquetFormat: Boolean, outputTimestampType: SQLConf.ParquetOutputTimestampType.Value = SQLConf.ParquetOutputTimestampType.INT96): Unit = { val converter = new SparkToParquetSchemaConverter( writeLegacyParquetFormat = writeLegacyParquetFormat, outputTimestampType = outputTimestampType) test(s"sql => parquet: $testName") { val actual = converter.convert(sqlSchema) val expected = MessageTypeParser.parseMessageType(parquetSchema) actual.checkContains(expected) expected.checkContains(actual) } } protected def testSchema( testName: String, sqlSchema: StructType, parquetSchema: String, binaryAsString: Boolean, int96AsTimestamp: Boolean, writeLegacyParquetFormat: Boolean, outputTimestampType: SQLConf.ParquetOutputTimestampType.Value = SQLConf.ParquetOutputTimestampType.INT96, expectedParquetColumn: Option[ParquetColumn] = None): Unit = { testCatalystToParquet( testName, sqlSchema, parquetSchema, writeLegacyParquetFormat, outputTimestampType) testParquetToCatalyst( testName, sqlSchema, parquetSchema, binaryAsString, int96AsTimestamp, expectedParquetColumn = expectedParquetColumn) } protected def compareParquetColumn(actual: ParquetColumn, expected: ParquetColumn): Unit = { assert(actual.sparkType == expected.sparkType, "sparkType mismatch: " + s"actual = ${actual.sparkType}, expected = ${expected.sparkType}") assert(actual.descriptor === expected.descriptor, "column descriptor mismatch: " + s"actual = ${actual.descriptor}, expected = ${expected.descriptor})") // since Parquet ColumnDescriptor equals only compares path, we'll need to compare other // fields explicitly here if (actual.descriptor.isDefined && expected.descriptor.isDefined) { val actualDesc = actual.descriptor.get val expectedDesc = expected.descriptor.get assert(actualDesc.getMaxRepetitionLevel == expectedDesc.getMaxRepetitionLevel) assert(actualDesc.getMaxRepetitionLevel == expectedDesc.getMaxRepetitionLevel) assert(actualDesc.getPrimitiveType === expectedDesc.getPrimitiveType) } assert(actual.repetitionLevel == expected.repetitionLevel, "repetition level mismatch: " + s"actual = ${actual.repetitionLevel}, expected = ${expected.repetitionLevel}") assert(actual.definitionLevel == expected.definitionLevel, "definition level mismatch: " + s"actual = ${actual.definitionLevel}, expected = ${expected.definitionLevel}") assert(actual.required == expected.required, "required mismatch: " + s"actual = ${actual.required}, expected = ${expected.required}") assert(actual.path == expected.path, "path mismatch: " + s"actual = ${actual.path}, expected = ${expected.path}") assert(actual.children.size == expected.children.size, "size of children mismatch: " + s"actual = ${actual.children.size}, expected = ${expected.children.size}") actual.children.zip(expected.children).foreach { case (actualChild, expectedChild) => compareParquetColumn(actualChild, expectedChild) } } protected def primitiveParquetColumn( sparkType: DataType, parquetTypeName: PrimitiveTypeName, repetition: Repetition, repetitionLevel: Int, definitionLevel: Int, path: Seq[String], logicalTypeAnnotation: Option[LogicalTypeAnnotation] = None): ParquetColumn = { var typeBuilder = repetition match { case Repetition.REQUIRED => Types.required(parquetTypeName) case Repetition.OPTIONAL => Types.optional(parquetTypeName) case Repetition.REPEATED => Types.repeated(parquetTypeName) } if (logicalTypeAnnotation.isDefined) { typeBuilder = typeBuilder.as(logicalTypeAnnotation.get) } ParquetColumn( sparkType = sparkType, descriptor = Some(new ColumnDescriptor(path.toArray, typeBuilder.named(path.last), repetitionLevel, definitionLevel)), repetitionLevel = repetitionLevel, definitionLevel = definitionLevel, required = repetition != Repetition.OPTIONAL, path = path, children = Seq.empty) } } class ParquetSchemaInferenceSuite extends ParquetSchemaTest { testSchemaInference[(Boolean, Int, Long, Float, Double, Array[Byte])]( "basic types", """ |message root { | required boolean _1; | required int32 _2; | required int64 _3; | required float _4; | required double _5; | optional binary _6; |} """.stripMargin, binaryAsString = false, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some( ParquetColumn( sparkType = StructType.fromAttributes( ScalaReflection.attributesFor[(Boolean, Int, Long, Float, Double, Array[Byte])]), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( primitiveParquetColumn(BooleanType, PrimitiveTypeName.BOOLEAN, Repetition.REQUIRED, 0, 0, Seq("_1")), primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 0, 0, Seq("_2")), primitiveParquetColumn(LongType, PrimitiveTypeName.INT64, Repetition.REQUIRED, 0, 0, Seq("_3")), primitiveParquetColumn(FloatType, PrimitiveTypeName.FLOAT, Repetition.REQUIRED, 0, 0, Seq("_4")), primitiveParquetColumn(DoubleType, PrimitiveTypeName.DOUBLE, Repetition.REQUIRED, 0, 0, Seq("_5")), primitiveParquetColumn(BinaryType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 0, 1, Seq("_6")) ))) ) testSchemaInference[(Byte, Short, Int, Long, java.sql.Date)]( "logical integral types", """ |message root { | required int32 _1 (INT_8); | required int32 _2 (INT_16); | required int32 _3 (INT_32); | required int64 _4 (INT_64); | optional int32 _5 (DATE); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some( ParquetColumn( sparkType = StructType.fromAttributes( ScalaReflection.attributesFor[(Byte, Short, Int, Long, java.sql.Date)]), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( primitiveParquetColumn(ByteType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 0, 0, Seq("_1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.intType(8, true))), primitiveParquetColumn(ShortType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 0, 0, Seq("_2"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.intType(16, true))), primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 0, 0, Seq("_3"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.intType(32, true))), primitiveParquetColumn(LongType, PrimitiveTypeName.INT64, Repetition.REQUIRED, 0, 0, Seq("_4"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.intType(64, true))), primitiveParquetColumn(DateType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 0, 1, Seq("_5"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.dateType())) )))) testSchemaInference[Tuple1[String]]( "string", """ |message root { | optional binary _1 (UTF8); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some( ParquetColumn( sparkType = StructType.fromAttributes( ScalaReflection.attributesFor[Tuple1[String]]), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 0, 1, Seq("_1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())) )))) testSchemaInference[Tuple1[String]]( "binary enum as string", """ |message root { | optional binary _1 (ENUM); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some( ParquetColumn( sparkType = StructType.fromAttributes( ScalaReflection.attributesFor[Tuple1[String]]), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 0, 1, Seq("_1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.enumType())) )))) testSchemaInference[Tuple1[Seq[Int]]]( "non-nullable array - non-standard", """ |message root { | optional group _1 (LIST) { | repeated int32 array; | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", ArrayType(IntegerType, containsNull = false)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REPEATED, 1, 2, Seq("_1", "array"))) ))))) testSchemaInference[Tuple1[Seq[Int]]]( "non-nullable array - standard", """ |message root { | optional group _1 (LIST) { | repeated group list { | required int32 element; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", ArrayType(IntegerType, containsNull = false)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("_1", "list", "element")) )))))) testSchemaInference[Tuple1[Seq[Integer]]]( "nullable array - non-standard", """ |message root { | optional group _1 (LIST) { | repeated group bag { | optional int32 array; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", ArrayType(IntegerType, containsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 1, 3, Seq("_1", "bag", "array")) )))))) testSchemaInference[Tuple1[Seq[Integer]]]( "nullable array - standard", """ |message root { | optional group _1 (LIST) { | repeated group list { | optional int32 element; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", ArrayType(IntegerType, containsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 1, 3, Seq("_1", "list", "element")) )))))) testSchemaInference[Tuple1[Map[Int, String]]]( "map - standard", """ |message root { | optional group _1 (MAP) { | repeated group key_value { | required int32 key; | optional binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("_1", "key_value", "key")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("_1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testSchemaInference[Tuple1[Map[Int, String]]]( "map - non-standard", """ |message root { | optional group _1 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | optional binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("_1", "key_value", "key")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("_1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testSchemaInference[Tuple1[Map[(String, String), String]]]( "map - group type key", """ |message root { | optional group _1 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required group key { | optional binary _1 (UTF8); | optional binary _2 (UTF8); | } | optional binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", MapType(StructType(Seq(StructField("_1", StringType), StructField("_2", StringType))), StringType, valueContainsNull = true), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType( StructType(Seq(StructField("_1", StringType), StructField("_2", StringType))), StringType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( ParquetColumn( sparkType = StructType(Seq(StructField("_1", StringType), StructField("_2", StringType))), descriptor = None, repetitionLevel = 1, definitionLevel = 2, required = true, path = Seq("_1", "key_value", "key"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("_1", "key_value", "key", "_1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("_1", "key_value", "key", "_2"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("_1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testSchemaInference[Tuple1[(Int, String)]]( "struct", """ |message root { | optional group _1 { | required int32 _1; | optional binary _2 (UTF8); | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", StringType)))))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", StringType))), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 0, 1, Seq("_1", "_1")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 0, 2, Seq("_1", "_2"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testSchemaInference[Tuple1[Map[Int, (String, Seq[(Int, Double)])]]]( "deeply nested type - non-standard", """ |message root { | optional group _1 (MAP_KEY_VALUE) { | repeated group key_value { | required int32 key; | optional group value { | optional binary _1 (UTF8); | optional group _2 (LIST) { | repeated group bag { | optional group array { | required int32 _1; | required double _2; | } | } | } | } | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", MapType(IntegerType, StructType(Seq( StructField("_1", StringType), StructField("_2", ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))))))), valueContainsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StructType(Seq( StructField("_1", StringType), StructField("_2", ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))))))), valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("_1", "key_value", "key")), ParquetColumn( sparkType = StructType(Seq( StructField("_1", StringType), StructField("_2", ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))))))), descriptor = None, repetitionLevel = 1, definitionLevel = 3, required = false, path = Seq("_1", "key_value", "value"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 4, Seq("_1", "key_value", "value", "_1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())), ParquetColumn( sparkType = ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false)))), descriptor = None, repetitionLevel = 1, definitionLevel = 4, required = false, path = Seq("_1", "key_value", "value", "_2"), children = Seq( ParquetColumn( sparkType = StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))), descriptor = None, repetitionLevel = 2, definitionLevel = 6, required = false, path = Seq("_1", "key_value", "value", "_2", "bag", "array"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 2, 6, Seq("_1", "key_value", "value", "_2", "bag", "array", "_1")), primitiveParquetColumn(DoubleType, PrimitiveTypeName.DOUBLE, Repetition.REQUIRED, 2, 6, Seq("_1", "key_value", "value", "_2", "bag", "array", "_2")) )))))) )))))) testSchemaInference[Tuple1[Map[Int, (String, Seq[(Int, Double)])]]]( "deeply nested type - standard", """ |message root { | optional group _1 (MAP) { | repeated group key_value { | required int32 key; | optional group value { | optional binary _1 (UTF8); | optional group _2 (LIST) { | repeated group list { | optional group element { | required int32 _1; | required double _2; | } | } | } | } | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "_1", MapType(IntegerType, StructType(Seq( StructField("_1", StringType), StructField("_2", ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))))))), valueContainsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StructType(Seq( StructField("_1", StringType), StructField("_2", ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))))))), valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("_1", "key_value", "key")), ParquetColumn( sparkType = StructType(Seq( StructField("_1", StringType), StructField("_2", ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))))))), descriptor = None, repetitionLevel = 1, definitionLevel = 3, required = false, path = Seq("_1", "key_value", "value"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 4, Seq("_1", "key_value", "value", "_1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())), ParquetColumn( sparkType = ArrayType( StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false)))), descriptor = None, repetitionLevel = 1, definitionLevel = 4, required = false, path = Seq("_1", "key_value", "value", "_2"), children = Seq( ParquetColumn( sparkType = StructType(Seq( StructField("_1", IntegerType, nullable = false), StructField("_2", DoubleType, nullable = false))), descriptor = None, repetitionLevel = 2, definitionLevel = 6, required = false, path = Seq("_1", "key_value", "value", "_2", "list", "element"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 2, 6, Seq("_1", "key_value", "value", "_2", "list", "element", "_1")), primitiveParquetColumn(DoubleType, PrimitiveTypeName.DOUBLE, Repetition.REQUIRED, 2, 6, Seq("_1", "key_value", "value", "_2", "list", "element", "_2")))))))) )))))) testSchemaInference[(Option[Int], Map[Int, Option[Double]])]( "optional types", """ |message root { | optional int32 _1; | optional group _2 (MAP) { | repeated group key_value { | required int32 key; | optional double value; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField("_1", IntegerType), StructField("_2", MapType(IntegerType, DoubleType, valueContainsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = IntegerType, descriptor = Some(new ColumnDescriptor(Array("_1"), Types.optional(PrimitiveTypeName.INT32).named("_1"), 0, 1)), repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_1"), children = Seq()), ParquetColumn( sparkType = MapType(IntegerType, DoubleType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("_2"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("_2", "key_value", "key")), primitiveParquetColumn(DoubleType, PrimitiveTypeName.DOUBLE, Repetition.OPTIONAL, 1, 3, Seq("_2", "key_value", "value"))))) ))) } class ParquetSchemaSuite extends ParquetSchemaTest { test("DataType string parser compatibility") { // This is the generated string from previous versions of the Spark SQL, using the following: // val schema = StructType(List( // StructField("c1", IntegerType, false), // StructField("c2", BinaryType, true))) val caseClassString = "StructType(List(StructField(c1,IntegerType,false), StructField(c2,BinaryType,true)))" // scalastyle:off val jsonString = """{"type":"struct","fields":[{"name":"c1","type":"integer","nullable":false,"metadata":{}},{"name":"c2","type":"binary","nullable":true,"metadata":{}}]}""" // scalastyle:on val fromCaseClassString = StructType.fromString(caseClassString) val fromJson = StructType.fromString(jsonString) (fromCaseClassString, fromJson).zipped.foreach { (a, b) => assert(a.name == b.name) assert(a.dataType === b.dataType) assert(a.nullable === b.nullable) } } test("schema merging failure error message") { import testImplicits._ withTempPath { dir => val path = dir.getCanonicalPath spark.range(3).write.parquet(s"$path/p=1") spark.range(3).select('id cast IntegerType as 'id).write.parquet(s"$path/p=2") val message = intercept[SparkException] { spark.read.option("mergeSchema", "true").parquet(path).schema }.getMessage assert(message.contains("Failed merging schema")) } } // ======================================= // Tests for parquet schema mismatch error // ======================================= def testSchemaMismatch(path: String, vectorizedReaderEnabled: Boolean): SparkException = { import testImplicits._ var e: SparkException = null withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorizedReaderEnabled.toString) { // Create two parquet files with different schemas in the same folder Seq(("bcd", 2)).toDF("a", "b").coalesce(1).write.mode("overwrite").parquet(s"$path/parquet") Seq((1, "abc")).toDF("a", "b").coalesce(1).write.mode("append").parquet(s"$path/parquet") e = intercept[SparkException] { spark.read.parquet(s"$path/parquet").collect() } } e } test("schema mismatch failure error message for parquet reader") { withTempPath { dir => val e = testSchemaMismatch(dir.getCanonicalPath, vectorizedReaderEnabled = false) val expectedMessage = "Encountered error while reading file" assert(e.getCause.isInstanceOf[QueryExecutionException]) assert(e.getCause.getCause.isInstanceOf[ParquetDecodingException]) assert(e.getCause.getMessage.contains(expectedMessage)) } } test("schema mismatch failure error message for parquet vectorized reader") { withTempPath { dir => val e = testSchemaMismatch(dir.getCanonicalPath, vectorizedReaderEnabled = true) assert(e.getCause.isInstanceOf[QueryExecutionException]) assert(e.getCause.getCause.isInstanceOf[SchemaColumnConvertNotSupportedException]) // Check if the physical type is reporting correctly val errMsg = e.getCause.getMessage assert(errMsg.startsWith("Parquet column cannot be converted in file")) val file = errMsg.substring("Parquet column cannot be converted in file ".length, errMsg.indexOf(". ")) val col = spark.read.parquet(file).schema.fields.filter(_.name == "a") assert(col.length == 1) if (col(0).dataType == StringType) { assert(errMsg.contains("Column: [a], Expected: int, Found: BINARY")) } else { assert(errMsg.endsWith("Column: [a], Expected: string, Found: INT32")) } } } // ======================================================= // Tests for converting Parquet LIST to Catalyst ArrayType // ======================================================= testParquetToCatalyst( "Backwards-compatibility: LIST with nullable element type - 1 - standard", StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = true), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group list { | optional int32 element; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 1, 3, Seq("f1", "list", "element")))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with nullable element type - 2", StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = true), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group element { | optional int32 num; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 1, 3, Seq("f1", "element", "num")))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type - 1 - standard", StructType(Seq( StructField("f1", ArrayType(IntegerType, containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group list { | required int32 element; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = false)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "list", "element")))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type - 2", StructType(Seq( StructField("f1", ArrayType(IntegerType, containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group element { | required int32 num; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = false)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "element", "num")))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type - 3", StructType(Seq( StructField("f1", ArrayType(IntegerType, containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated int32 element; | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = false)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REPEATED, 1, 2, Seq("f1", "element")))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type - 4", StructType(Seq( StructField( "f1", ArrayType( StructType(Seq( StructField("str", StringType, nullable = false), StructField("num", IntegerType, nullable = false))), containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group element { | required binary str (UTF8); | required int32 num; | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType( StructType(Seq( StructField("str", StringType, nullable = false), StructField("num", IntegerType, nullable = false))), containsNull = false)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( ArrayType( StructType(Seq( StructField("str", StringType, nullable = false), StructField("num", IntegerType, nullable = false))), containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq(ParquetColumn( sparkType = StructType(Seq( StructField("str", StringType, nullable = false), StructField("num", IntegerType, nullable = false))), descriptor = None, repetitionLevel = 1, definitionLevel = 2, required = false, path = Seq("f1", "element"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.REQUIRED, 1, 2, Seq("f1", "element", "str"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())), primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "element", "num"))))) ))))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type - 5 - parquet-avro style", StructType(Seq( StructField( "f1", ArrayType( StructType(Seq( StructField("str", StringType, nullable = false))), containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group array { | required binary str (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType( StructType(Seq( StructField("str", StringType, nullable = false))), containsNull = false), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType( StructType(Seq( StructField("str", StringType, nullable = false))), containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq(ParquetColumn( sparkType = StructType(Seq( StructField("str", StringType, nullable = false))), descriptor = None, repetitionLevel = 1, definitionLevel = 2, required = false, path = Seq("f1", "array"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.REQUIRED, 1, 2, Seq("f1", "array", "str"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())))))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type - 6 - parquet-thrift style", StructType(Seq( StructField( "f1", ArrayType( StructType(Seq( StructField("str", StringType, nullable = false))), containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group f1_tuple { | required binary str (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType( StructType(Seq( StructField("str", StringType, nullable = false))), containsNull = false), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType( StructType(Seq( StructField("str", StringType, nullable = false))), containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq(ParquetColumn( sparkType = StructType(Seq( StructField("str", StringType, nullable = false))), descriptor = None, repetitionLevel = 1, definitionLevel = 2, required = false, path = Seq("f1", "f1_tuple"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.REQUIRED, 1, 2, Seq("f1", "f1_tuple", "str"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())))))) )))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type 7 - " + "parquet-protobuf primitive lists", new StructType() .add("f1", ArrayType(IntegerType, containsNull = false), nullable = false), """message root { | repeated int32 f1; |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = false), nullable = false))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType(IntegerType, containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = true, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REPEATED, 1, 1, Seq("f1"))))) ))) testParquetToCatalyst( "Backwards-compatibility: LIST with non-nullable element type 8 - " + "parquet-protobuf non-primitive lists", { val elementType = new StructType() .add("c1", StringType, nullable = true) .add("c2", IntegerType, nullable = false) new StructType() .add("f1", ArrayType(elementType, containsNull = false), nullable = false) }, """message root { | repeated group f1 { | optional binary c1 (UTF8); | required int32 c2; | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", ArrayType( new StructType() .add("c1", StringType, nullable = true) .add("c2", IntegerType, nullable = false), containsNull = false), nullable = false))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq(ParquetColumn( sparkType = ArrayType( new StructType() .add("c1", StringType, nullable = true) .add("c2", IntegerType, nullable = false), containsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = true, path = Seq("f1"), children = Seq( ParquetColumn( sparkType = new StructType() .add("c1", StringType, nullable = true) .add("c2", IntegerType, nullable = false), descriptor = None, repetitionLevel = 1, definitionLevel = 1, required = true, path = Seq("f1"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 2, Seq("f1", "c1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())), primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 1, Seq("f1", "c2"))))) ))))) // ======================================================= // Tests for converting Catalyst ArrayType to Parquet LIST // ======================================================= testCatalystToParquet( "Backwards-compatibility: LIST with nullable element type - 1 - standard", StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = true), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group list { | optional int32 element; | } | } |} """.stripMargin, writeLegacyParquetFormat = false) testCatalystToParquet( "Backwards-compatibility: LIST with nullable element type - 2 - prior to 1.4.x", StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = true), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group bag { | optional int32 array; | } | } |} """.stripMargin, writeLegacyParquetFormat = true) testCatalystToParquet( "Backwards-compatibility: LIST with non-nullable element type - 1 - standard", StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated group list { | required int32 element; | } | } |} """.stripMargin, writeLegacyParquetFormat = false) testCatalystToParquet( "Backwards-compatibility: LIST with non-nullable element type - 2 - prior to 1.4.x", StructType(Seq( StructField( "f1", ArrayType(IntegerType, containsNull = false), nullable = true))), """message root { | optional group f1 (LIST) { | repeated int32 array; | } |} """.stripMargin, writeLegacyParquetFormat = true) // ==================================================== // Tests for converting Parquet Map to Catalyst MapType // ==================================================== testParquetToCatalyst( "Backwards-compatibility: MAP with non-nullable value type - 1 - standard", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value { | required int32 key; | required binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "key")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())) )))))) testParquetToCatalyst( "Backwards-compatibility: MAP with non-nullable value type - 2", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), """message root { | optional group f1 (MAP_KEY_VALUE) { | repeated group key_value { | required int32 num; | required binary str (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "num")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "str"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())) )))))) testParquetToCatalyst( "Backwards-compatibility: MAP with non-nullable value type - 3 - prior to 1.4.x", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | required binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = false), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "key")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testParquetToCatalyst( "Backwards-compatibility: MAP with nullable value type - 1 - standard", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value { | required int32 key; | optional binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "key")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("f1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testParquetToCatalyst( "Backwards-compatibility: MAP with nullable value type - 2", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), """message root { | optional group f1 (MAP_KEY_VALUE) { | repeated group key_value { | required int32 num; | optional binary str (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "num")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("f1", "key_value", "str"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) testParquetToCatalyst( "Backwards-compatibility: MAP with nullable value type - 3 - parquet-avro style", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | optional binary value (UTF8); | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(IntegerType, StringType, valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "key")), primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 3, Seq("f1", "key_value", "value"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType()))) ))))) // ==================================================== // Tests for converting Catalyst MapType to Parquet Map // ==================================================== testCatalystToParquet( "Backwards-compatibility: MAP with non-nullable value type - 1 - standard", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value { | required int32 key; | required binary value (UTF8); | } | } |} """.stripMargin, writeLegacyParquetFormat = false) testCatalystToParquet( "Backwards-compatibility: MAP with non-nullable value type - 2 - prior to 1.4.x", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = false), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | required binary value (UTF8); | } | } |} """.stripMargin, writeLegacyParquetFormat = true) testCatalystToParquet( "Backwards-compatibility: MAP with nullable value type - 1 - standard", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value { | required int32 key; | optional binary value (UTF8); | } | } |} """.stripMargin, writeLegacyParquetFormat = false) testCatalystToParquet( "Backwards-compatibility: MAP with nullable value type - 3 - prior to 1.4.x", StructType(Seq( StructField( "f1", MapType(IntegerType, StringType, valueContainsNull = true), nullable = true))), """message root { | optional group f1 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | optional binary value (UTF8); | } | } |} """.stripMargin, writeLegacyParquetFormat = true) testParquetToCatalyst( "SPARK-36935: test case insensitive when converting Parquet schema", StructType(Seq(StructField("F1", ShortType))), """message root { | optional int32 f1; |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, sparkReadSchema = Some(StructType(Seq(StructField("F1", ShortType)))), expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq(StructField("F1", ShortType))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( primitiveParquetColumn(ShortType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 0, 1, Seq("f1")) ) ))) testParquetToCatalyst( "SPARK-36935: test case sensitive when converting Parquet schema", StructType(Seq(StructField("f1", IntegerType))), """message root { | optional int32 f1; |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, caseSensitive = true, sparkReadSchema = Some(StructType(Seq(StructField("F1", ShortType)))), expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq(StructField("f1", IntegerType))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( primitiveParquetColumn(IntegerType, PrimitiveTypeName.INT32, Repetition.OPTIONAL, 0, 1, Seq("f1")) ) ))) testParquetToCatalyst( "SPARK-36935: test Spark read schema with case sensitivity", StructType(Seq( StructField( "F1", MapType(ShortType, StructType(Seq( StructField("G1", StringType), StructField("G2", ArrayType( StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false))))))), valueContainsNull = true)))), """message root { | optional group f1 (MAP_KEY_VALUE) { | repeated group key_value { | required int32 key; | optional group value { | optional binary g1 (UTF8); | optional group g2 (LIST) { | repeated group list { | optional group element { | required int32 h1; | required double h2; | } | } | } | } | } | } |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, sparkReadSchema = Some(StructType(Seq( StructField( "F1", MapType(ShortType, StructType(Seq( StructField("G1", StringType), StructField("G2", ArrayType( StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false))))))), valueContainsNull = true))))), expectedParquetColumn = Some(ParquetColumn( sparkType = StructType(Seq( StructField( "F1", MapType(ShortType, StructType(Seq( StructField("G1", StringType), StructField("G2", ArrayType( StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false))))))), valueContainsNull = true)))), descriptor = None, repetitionLevel = 0, definitionLevel = 0, required = false, path = Seq(), children = Seq( ParquetColumn( sparkType = MapType(ShortType, StructType(Seq( StructField("G1", StringType), StructField("G2", ArrayType( StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false))))))), valueContainsNull = true), descriptor = None, repetitionLevel = 0, definitionLevel = 1, required = false, path = Seq("f1"), children = Seq( primitiveParquetColumn(ShortType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 1, 2, Seq("f1", "key_value", "key")), ParquetColumn( sparkType = StructType(Seq( StructField("G1", StringType), StructField("G2", ArrayType( StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false))))))), descriptor = None, repetitionLevel = 1, definitionLevel = 3, required = false, path = Seq("f1", "key_value", "value"), children = Seq( primitiveParquetColumn(StringType, PrimitiveTypeName.BINARY, Repetition.OPTIONAL, 1, 4, Seq("f1", "key_value", "value", "g1"), logicalTypeAnnotation = Some(LogicalTypeAnnotation.stringType())), ParquetColumn( sparkType = ArrayType( StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false)))), descriptor = None, repetitionLevel = 1, definitionLevel = 4, required = false, path = Seq("f1", "key_value", "value", "g2"), children = Seq( ParquetColumn( sparkType = StructType(Seq( StructField("H1", ByteType, nullable = false), StructField("H2", FloatType, nullable = false))), descriptor = None, repetitionLevel = 2, definitionLevel = 6, required = false, path = Seq("f1", "key_value", "value", "g2", "list", "element"), children = Seq( primitiveParquetColumn(ByteType, PrimitiveTypeName.INT32, Repetition.REQUIRED, 2, 6, Seq("f1", "key_value", "value", "g2", "list", "element", "h1")), primitiveParquetColumn(FloatType, PrimitiveTypeName.DOUBLE, Repetition.REQUIRED, 2, 6, Seq("f1", "key_value", "value", "g2", "list", "element", "h2")))))))) )))))) // ================================= // Tests for conversion for decimals // ================================= testSchema( "DECIMAL(1, 0) - standard", StructType(Seq(StructField("f1", DecimalType(1, 0)))), """message root { | optional int32 f1 (DECIMAL(1, 0)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false) testSchema( "DECIMAL(8, 3) - standard", StructType(Seq(StructField("f1", DecimalType(8, 3)))), """message root { | optional int32 f1 (DECIMAL(8, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false) testSchema( "DECIMAL(9, 3) - standard", StructType(Seq(StructField("f1", DecimalType(9, 3)))), """message root { | optional int32 f1 (DECIMAL(9, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false) testSchema( "DECIMAL(18, 3) - standard", StructType(Seq(StructField("f1", DecimalType(18, 3)))), """message root { | optional int64 f1 (DECIMAL(18, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false) testSchema( "DECIMAL(19, 3) - standard", StructType(Seq(StructField("f1", DecimalType(19, 3)))), """message root { | optional fixed_len_byte_array(9) f1 (DECIMAL(19, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = false) testSchema( "DECIMAL(1, 0) - prior to 1.4.x", StructType(Seq(StructField("f1", DecimalType(1, 0)))), """message root { | optional fixed_len_byte_array(1) f1 (DECIMAL(1, 0)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true) testSchema( "DECIMAL(8, 3) - prior to 1.4.x", StructType(Seq(StructField("f1", DecimalType(8, 3)))), """message root { | optional fixed_len_byte_array(4) f1 (DECIMAL(8, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true) testSchema( "DECIMAL(9, 3) - prior to 1.4.x", StructType(Seq(StructField("f1", DecimalType(9, 3)))), """message root { | optional fixed_len_byte_array(5) f1 (DECIMAL(9, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true) testSchema( "DECIMAL(18, 3) - prior to 1.4.x", StructType(Seq(StructField("f1", DecimalType(18, 3)))), """message root { | optional fixed_len_byte_array(8) f1 (DECIMAL(18, 3)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = true, writeLegacyParquetFormat = true) testSchema( "Timestamp written and read as INT64 with TIMESTAMP_MILLIS", StructType(Seq(StructField("f1", TimestampType))), """message root { | optional INT64 f1 (TIMESTAMP_MILLIS); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = false, writeLegacyParquetFormat = true, outputTimestampType = SQLConf.ParquetOutputTimestampType.TIMESTAMP_MILLIS) testSchema( "Timestamp written and read as INT64 with TIMESTAMP_MICROS", StructType(Seq(StructField("f1", TimestampType))), """message root { | optional INT64 f1 (TIMESTAMP_MICROS); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = false, writeLegacyParquetFormat = true, outputTimestampType = SQLConf.ParquetOutputTimestampType.TIMESTAMP_MICROS) testCatalystToParquet( "SPARK-36825: Year-month interval written and read as INT32", StructType(Seq(StructField("f1", YearMonthIntervalType()))), """message root { | optional INT32 f1; |} """.stripMargin, writeLegacyParquetFormat = false) testCatalystToParquet( "SPARK-36825: Day-time interval written and read as INT64", StructType(Seq(StructField("f1", DayTimeIntervalType()))), """message root { | optional INT64 f1; |} """.stripMargin, writeLegacyParquetFormat = false) // The behavior of reading/writing TimestampNTZ type is independent of the configurations // SQLConf.PARQUET_INT96_AS_TIMESTAMP and SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE Seq(true, false).foreach { int96AsTimestamp => Seq(INT96, TIMESTAMP_MILLIS, TIMESTAMP_MICROS).foreach { outputTsType => testSchema( s"TimestampNTZ written and read as INT64 with TIMESTAMP_MICROS - " + s"int96AsTimestamp as $int96AsTimestamp, outputTimestampType: $outputTsType", StructType(Seq(StructField("f1", TimestampNTZType))), """message root { | optional INT64 f1 (TIMESTAMP(MICROS,false)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = int96AsTimestamp, writeLegacyParquetFormat = true, outputTimestampType = outputTsType) } testParquetToCatalyst( s"TimestampNTZ read as INT64 with TIMESTAMP_MILLIS - " + s"int96AsTimestamp as $int96AsTimestamp", StructType(Seq(StructField("f1", TimestampNTZType))), """message root { | optional INT64 f1 (TIMESTAMP(MILLIS,false)); |} """.stripMargin, binaryAsString = true, int96AsTimestamp = int96AsTimestamp) } private def testSchemaClipping( testName: String, parquetSchema: String, catalystSchema: StructType, expectedSchema: String, caseSensitive: Boolean = true): Unit = { testSchemaClipping(testName, parquetSchema, catalystSchema, MessageTypeParser.parseMessageType(expectedSchema), caseSensitive) } private def testSchemaClipping( testName: String, parquetSchema: String, catalystSchema: StructType, expectedSchema: MessageType, caseSensitive: Boolean): Unit = { test(s"Clipping - $testName") { val actual = ParquetReadSupport.clipParquetSchema( MessageTypeParser.parseMessageType(parquetSchema), catalystSchema, caseSensitive) try { expectedSchema.checkContains(actual) actual.checkContains(expectedSchema) } catch { case cause: Throwable => fail( s"""Expected clipped schema: |$expectedSchema |Actual clipped schema: |$actual """.stripMargin, cause) } } } testSchemaClipping( "simple nested struct", parquetSchema = """message root { | required group f0 { | optional int32 f00; | optional int32 f01; | } |} """.stripMargin, catalystSchema = { val f0Type = new StructType().add("f00", IntegerType, nullable = true) new StructType() .add("f0", f0Type, nullable = false) .add("f1", IntegerType, nullable = true) }, expectedSchema = """message root { | required group f0 { | optional int32 f00; | } | optional int32 f1; |} """.stripMargin) testSchemaClipping( "parquet-protobuf style array", parquetSchema = """message root { | required group f0 { | repeated binary f00 (UTF8); | repeated group f01 { | optional int32 f010; | optional double f011; | } | } |} """.stripMargin, catalystSchema = { val f00Type = ArrayType(StringType, containsNull = false) val f01Type = ArrayType( new StructType() .add("f011", DoubleType, nullable = true), containsNull = false) val f0Type = new StructType() .add("f00", f00Type, nullable = false) .add("f01", f01Type, nullable = false) val f1Type = ArrayType(IntegerType, containsNull = true) new StructType() .add("f0", f0Type, nullable = false) .add("f1", f1Type, nullable = true) }, expectedSchema = """message root { | required group f0 { | repeated binary f00 (UTF8); | repeated group f01 { | optional double f011; | } | } | | optional group f1 (LIST) { | repeated group list { | optional int32 element; | } | } |} """.stripMargin) testSchemaClipping( "parquet-thrift style array", parquetSchema = """message root { | required group f0 { | optional group f00 (LIST) { | repeated binary f00_tuple (UTF8); | } | | optional group f01 (LIST) { | repeated group f01_tuple { | optional int32 f010; | optional double f011; | } | } | } |} """.stripMargin, catalystSchema = { val f01ElementType = new StructType() .add("f011", DoubleType, nullable = true) .add("f012", LongType, nullable = true) val f0Type = new StructType() .add("f00", ArrayType(StringType, containsNull = false), nullable = true) .add("f01", ArrayType(f01ElementType, containsNull = false), nullable = true) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = """message root { | required group f0 { | optional group f00 (LIST) { | repeated binary f00_tuple (UTF8); | } | | optional group f01 (LIST) { | repeated group f01_tuple { | optional double f011; | optional int64 f012; | } | } | } |} """.stripMargin) testSchemaClipping( "parquet-avro style array", parquetSchema = """message root { | required group f0 { | optional group f00 (LIST) { | repeated binary array (UTF8); | } | | optional group f01 (LIST) { | repeated group array { | optional int32 f010; | optional double f011; | } | } | } |} """.stripMargin, catalystSchema = { val f01ElementType = new StructType() .add("f011", DoubleType, nullable = true) .add("f012", LongType, nullable = true) val f0Type = new StructType() .add("f00", ArrayType(StringType, containsNull = false), nullable = true) .add("f01", ArrayType(f01ElementType, containsNull = false), nullable = true) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = """message root { | required group f0 { | optional group f00 (LIST) { | repeated binary array (UTF8); | } | | optional group f01 (LIST) { | repeated group array { | optional double f011; | optional int64 f012; | } | } | } |} """.stripMargin) testSchemaClipping( "parquet-hive style array", parquetSchema = """message root { | optional group f0 { | optional group f00 (LIST) { | repeated group bag { | optional binary array_element; | } | } | | optional group f01 (LIST) { | repeated group bag { | optional group array_element { | optional int32 f010; | optional double f011; | } | } | } | } |} """.stripMargin, catalystSchema = { val f01ElementType = new StructType() .add("f011", DoubleType, nullable = true) .add("f012", LongType, nullable = true) val f0Type = new StructType() .add("f00", ArrayType(StringType, containsNull = true), nullable = true) .add("f01", ArrayType(f01ElementType, containsNull = true), nullable = true) new StructType().add("f0", f0Type, nullable = true) }, expectedSchema = """message root { | optional group f0 { | optional group f00 (LIST) { | repeated group bag { | optional binary array_element; | } | } | | optional group f01 (LIST) { | repeated group bag { | optional group array_element { | optional double f011; | optional int64 f012; | } | } | } | } |} """.stripMargin) testSchemaClipping( "2-level list of required struct", parquetSchema = s"""message root { | required group f0 { | required group f00 (LIST) { | repeated group element { | required int32 f000; | optional int64 f001; | } | } | } |} """.stripMargin, catalystSchema = { val f00ElementType = new StructType() .add("f001", LongType, nullable = true) .add("f002", DoubleType, nullable = false) val f00Type = ArrayType(f00ElementType, containsNull = false) val f0Type = new StructType().add("f00", f00Type, nullable = false) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = s"""message root { | required group f0 { | required group f00 (LIST) { | repeated group element { | optional int64 f001; | required double f002; | } | } | } |} """.stripMargin) testSchemaClipping( "standard array", parquetSchema = """message root { | required group f0 { | optional group f00 (LIST) { | repeated group list { | required binary element (UTF8); | } | } | | optional group f01 (LIST) { | repeated group list { | required group element { | optional int32 f010; | optional double f011; | } | } | } | } |} """.stripMargin, catalystSchema = { val f01ElementType = new StructType() .add("f011", DoubleType, nullable = true) .add("f012", LongType, nullable = true) val f0Type = new StructType() .add("f00", ArrayType(StringType, containsNull = false), nullable = true) .add("f01", ArrayType(f01ElementType, containsNull = false), nullable = true) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = """message root { | required group f0 { | optional group f00 (LIST) { | repeated group list { | required binary element (UTF8); | } | } | | optional group f01 (LIST) { | repeated group list { | required group element { | optional double f011; | optional int64 f012; | } | } | } | } |} """.stripMargin) testSchemaClipping( "empty requested schema", parquetSchema = """message root { | required group f0 { | required int32 f00; | required int64 f01; | } |} """.stripMargin, catalystSchema = new StructType(), expectedSchema = ParquetSchemaConverter.EMPTY_MESSAGE, caseSensitive = true) testSchemaClipping( "disjoint field sets", parquetSchema = """message root { | required group f0 { | required int32 f00; | required int64 f01; | } |} """.stripMargin, catalystSchema = new StructType() .add( "f0", new StructType() .add("f02", FloatType, nullable = true) .add("f03", DoubleType, nullable = true), nullable = true), expectedSchema = """message root { | required group f0 { | optional float f02; | optional double f03; | } |} """.stripMargin) testSchemaClipping( "parquet-avro style map", parquetSchema = """message root { | required group f0 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | required group value { | required int32 value_f0; | required int64 value_f1; | } | } | } |} """.stripMargin, catalystSchema = { val valueType = new StructType() .add("value_f1", LongType, nullable = false) .add("value_f2", DoubleType, nullable = false) val f0Type = MapType(IntegerType, valueType, valueContainsNull = false) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = """message root { | required group f0 (MAP) { | repeated group key_value (MAP_KEY_VALUE) { | required int32 key; | required group value { | required int64 value_f1; | required double value_f2; | } | } | } |} """.stripMargin) testSchemaClipping( "standard map", parquetSchema = """message root { | required group f0 (MAP) { | repeated group key_value { | required int32 key; | required group value { | required int32 value_f0; | required int64 value_f1; | } | } | } |} """.stripMargin, catalystSchema = { val valueType = new StructType() .add("value_f1", LongType, nullable = false) .add("value_f2", DoubleType, nullable = false) val f0Type = MapType(IntegerType, valueType, valueContainsNull = false) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = """message root { | required group f0 (MAP) { | repeated group key_value { | required int32 key; | required group value { | required int64 value_f1; | required double value_f2; | } | } | } |} """.stripMargin) testSchemaClipping( "standard map with complex key", parquetSchema = """message root { | required group f0 (MAP) { | repeated group key_value { | required group key { | required int32 value_f0; | required int64 value_f1; | } | required int32 value; | } | } |} """.stripMargin, catalystSchema = { val keyType = new StructType() .add("value_f1", LongType, nullable = false) .add("value_f2", DoubleType, nullable = false) val f0Type = MapType(keyType, IntegerType, valueContainsNull = false) new StructType().add("f0", f0Type, nullable = false) }, expectedSchema = """message root { | required group f0 (MAP) { | repeated group key_value { | required group key { | required int64 value_f1; | required double value_f2; | } | required int32 value; | } | } |} """.stripMargin) testSchemaClipping( "case-insensitive resolution: no ambiguity", parquetSchema = """message root { | required group A { | optional int32 B; | } | optional int32 c; |} """.stripMargin, catalystSchema = { val nestedType = new StructType().add("b", IntegerType, nullable = true) new StructType() .add("a", nestedType, nullable = true) .add("c", IntegerType, nullable = true) }, expectedSchema = """message root { | required group A { | optional int32 B; | } | optional int32 c; |} """.stripMargin, caseSensitive = false) test("Clipping - case-insensitive resolution: more than one field is matched") { val parquetSchema = """message root { | required group A { | optional int32 B; | } | optional int32 c; | optional int32 a; |} """.stripMargin val catalystSchema = { val nestedType = new StructType().add("b", IntegerType, nullable = true) new StructType() .add("a", nestedType, nullable = true) .add("c", IntegerType, nullable = true) } assertThrows[RuntimeException] { ParquetReadSupport.clipParquetSchema( MessageTypeParser.parseMessageType(parquetSchema), catalystSchema, caseSensitive = false) } } }
shaneknapp/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
Scala
apache-2.0
95,371
/*start*/ <elem/>/*end*/; //Elem
ilinum/intellij-scala
testdata/typeInference/xml/EmptyElement.scala
Scala
apache-2.0
32
package frameless package ml package classification import frameless.ml.internals.TreesInputsChecker import frameless.ml.params.trees.FeatureSubsetStrategy import org.apache.spark.ml.classification.{RandomForestClassificationModel, RandomForestClassifier} import org.apache.spark.ml.linalg.Vector /** * <a href="http://en.wikipedia.org/wiki/Random_forest">Random Forest</a> learning algorithm for * classification. * It supports both binary and multiclass labels, as well as both continuous and categorical * features. */ final class TypedRandomForestClassifier[Inputs] private[ml]( rf: RandomForestClassifier, labelCol: String, featuresCol: String ) extends TypedEstimator[Inputs, TypedRandomForestClassifier.Outputs, RandomForestClassificationModel] { val estimator: RandomForestClassifier = rf .setLabelCol(labelCol) .setFeaturesCol(featuresCol) .setPredictionCol(AppendTransformer.tempColumnName) .setRawPredictionCol(AppendTransformer.tempColumnName2) .setProbabilityCol(AppendTransformer.tempColumnName3) def setNumTrees(value: Int): TypedRandomForestClassifier[Inputs] = copy(rf.setNumTrees(value)) def setMaxDepth(value: Int): TypedRandomForestClassifier[Inputs] = copy(rf.setMaxDepth(value)) def setMinInfoGain(value: Double): TypedRandomForestClassifier[Inputs] = copy(rf.setMinInfoGain(value)) def setMinInstancesPerNode(value: Int): TypedRandomForestClassifier[Inputs] = copy(rf.setMinInstancesPerNode(value)) def setMaxMemoryInMB(value: Int): TypedRandomForestClassifier[Inputs] = copy(rf.setMaxMemoryInMB(value)) def setSubsamplingRate(value: Double): TypedRandomForestClassifier[Inputs] = copy(rf.setSubsamplingRate(value)) def setFeatureSubsetStrategy(value: FeatureSubsetStrategy): TypedRandomForestClassifier[Inputs] = copy(rf.setFeatureSubsetStrategy(value.sparkValue)) def setMaxBins(value: Int): TypedRandomForestClassifier[Inputs] = copy(rf.setMaxBins(value)) private def copy(newRf: RandomForestClassifier): TypedRandomForestClassifier[Inputs] = new TypedRandomForestClassifier[Inputs](newRf, labelCol, featuresCol) } object TypedRandomForestClassifier { case class Outputs(rawPrediction: Vector, probability: Vector, prediction: Double) def apply[Inputs](implicit inputsChecker: TreesInputsChecker[Inputs]): TypedRandomForestClassifier[Inputs] = { new TypedRandomForestClassifier(new RandomForestClassifier(), inputsChecker.labelCol, inputsChecker.featuresCol) } }
adelbertc/frameless
ml/src/main/scala/frameless/ml/classification/TypedRandomForestClassifier.scala
Scala
apache-2.0
2,485
package com.seanshubin.web.sync.domain import java.nio.file.{Path, Paths} object IntegrationTestUtil { val sampleDataPath = Paths.get("target", "test", "integration", "generated") def pathFor(target: String): Path = { sampleDataPath.resolve(target) } }
SeanShubin/web-sync
domain/src/test/scala/com/seanshubin/web/sync/domain/IntegrationTestUtil.scala
Scala
unlicense
266
/* * Copyright 2011-2018 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.recorder.scenario import java.util.Locale import io.gatling.http.HeaderNames private[scenario] object ProtocolDefinition { val BaseHeaders = Map( HeaderNames.Accept.toLowerCase(Locale.ROOT) -> "acceptHeader", HeaderNames.AcceptCharset.toLowerCase(Locale.ROOT) -> "acceptCharsetHeader", HeaderNames.AcceptEncoding.toLowerCase(Locale.ROOT) -> "acceptEncodingHeader", HeaderNames.AcceptLanguage.toLowerCase(Locale.ROOT) -> "acceptLanguageHeader", HeaderNames.Authorization.toLowerCase(Locale.ROOT) -> "authorizationHeader", HeaderNames.Connection.toLowerCase(Locale.ROOT) -> "connectionHeader", HeaderNames.ContentType.toLowerCase(Locale.ROOT) -> "contentTypeHeader", HeaderNames.DNT.toLowerCase(Locale.ROOT) -> "doNotTrackHeader", HeaderNames.UserAgent.toLowerCase(Locale.ROOT) -> "userAgentHeader", HeaderNames.UpgradeInsecureRequests.toLowerCase(Locale.ROOT) -> "upgradeInsecureRequestsHeader" ) } private[scenario] case class ProtocolDefinition(baseUrl: String, headers: Map[String, String])
wiacekm/gatling
gatling-recorder/src/main/scala/io/gatling/recorder/scenario/ProtocolDefinition.scala
Scala
apache-2.0
1,679
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming.sources import javax.annotation.concurrent.GuardedBy import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.util.control.NonFatal import org.apache.spark.internal.Logging import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, Statistics} import org.apache.spark.sql.catalyst.streaming.InternalOutputModes.{Append, Complete, Update} import org.apache.spark.sql.execution.streaming.Sink import org.apache.spark.sql.sources.v2.{DataSourceV2, DataSourceV2Options} import org.apache.spark.sql.sources.v2.streaming.{ContinuousWriteSupport, MicroBatchWriteSupport} import org.apache.spark.sql.sources.v2.streaming.writer.ContinuousWriter import org.apache.spark.sql.sources.v2.writer._ import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.StructType /** * A sink that stores the results in memory. This [[Sink]] is primarily intended for use in unit * tests and does not provide durability. */ class MemorySinkV2 extends DataSourceV2 with MicroBatchWriteSupport with ContinuousWriteSupport with Logging { override def createMicroBatchWriter( queryId: String, batchId: Long, schema: StructType, mode: OutputMode, options: DataSourceV2Options): java.util.Optional[DataSourceV2Writer] = { java.util.Optional.of(new MemoryWriter(this, batchId, mode)) } override def createContinuousWriter( queryId: String, schema: StructType, mode: OutputMode, options: DataSourceV2Options): java.util.Optional[ContinuousWriter] = { java.util.Optional.of(new ContinuousMemoryWriter(this, mode)) } private case class AddedData(batchId: Long, data: Array[Row]) /** An order list of batches that have been written to this [[Sink]]. */ @GuardedBy("this") private val batches = new ArrayBuffer[AddedData]() /** Returns all rows that are stored in this [[Sink]]. */ def allData: Seq[Row] = synchronized { batches.flatMap(_.data) } def latestBatchId: Option[Long] = synchronized { batches.lastOption.map(_.batchId) } def latestBatchData: Seq[Row] = synchronized { batches.lastOption.toSeq.flatten(_.data) } def toDebugString: String = synchronized { batches.map { case AddedData(batchId, data) => val dataStr = try data.mkString(" ") catch { case NonFatal(e) => "[Error converting to string]" } s"$batchId: $dataStr" }.mkString("\\n") } def write(batchId: Long, outputMode: OutputMode, newRows: Array[Row]): Unit = { val notCommitted = synchronized { latestBatchId.isEmpty || batchId > latestBatchId.get } if (notCommitted) { logDebug(s"Committing batch $batchId to $this") outputMode match { case Append | Update => val rows = AddedData(batchId, newRows) synchronized { batches += rows } case Complete => val rows = AddedData(batchId, newRows) synchronized { batches.clear() batches += rows } case _ => throw new IllegalArgumentException( s"Output mode $outputMode is not supported by MemorySink") } } else { logDebug(s"Skipping already committed batch: $batchId") } } def clear(): Unit = synchronized { batches.clear() } override def toString(): String = "MemorySink" } case class MemoryWriterCommitMessage(partition: Int, data: Seq[Row]) extends WriterCommitMessage {} class MemoryWriter(sink: MemorySinkV2, batchId: Long, outputMode: OutputMode) extends DataSourceV2Writer with Logging { override def createWriterFactory: MemoryWriterFactory = MemoryWriterFactory(outputMode) def commit(messages: Array[WriterCommitMessage]): Unit = { val newRows = messages.flatMap { case message: MemoryWriterCommitMessage => message.data } sink.write(batchId, outputMode, newRows) } override def abort(messages: Array[WriterCommitMessage]): Unit = { // Don't accept any of the new input. } } class ContinuousMemoryWriter(val sink: MemorySinkV2, outputMode: OutputMode) extends ContinuousWriter { override def createWriterFactory: MemoryWriterFactory = MemoryWriterFactory(outputMode) override def commit(epochId: Long, messages: Array[WriterCommitMessage]): Unit = { val newRows = messages.flatMap { case message: MemoryWriterCommitMessage => message.data } sink.write(epochId, outputMode, newRows) } override def abort(messages: Array[WriterCommitMessage]): Unit = { // Don't accept any of the new input. } } case class MemoryWriterFactory(outputMode: OutputMode) extends DataWriterFactory[Row] { def createDataWriter(partitionId: Int, attemptNumber: Int): DataWriter[Row] = { new MemoryDataWriter(partitionId, outputMode) } } class MemoryDataWriter(partition: Int, outputMode: OutputMode) extends DataWriter[Row] with Logging { private val data = mutable.Buffer[Row]() override def write(row: Row): Unit = { data.append(row) } override def commit(): MemoryWriterCommitMessage = { val msg = MemoryWriterCommitMessage(partition, data.clone()) data.clear() msg } override def abort(): Unit = {} } /** * Used to query the data that has been written into a [[MemorySink]]. */ case class MemoryPlanV2(sink: MemorySinkV2, override val output: Seq[Attribute]) extends LeafNode { private val sizePerRow = output.map(_.dataType.defaultSize).sum override def computeStats(): Statistics = Statistics(sizePerRow * sink.allData.size) }
saltstar/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/memoryV2.scala
Scala
apache-2.0
6,471
package im.actor.server.migrations import akka.actor.ActorSystem import akka.util.Timeout import im.actor.server.commons.KeyValueMappings import im.actor.server.group.GroupErrors.NoBotFound import im.actor.server.group.{ GroupExtension, GroupOffice, GroupViewRegion } import im.actor.server.persist import shardakka.keyvalue.SimpleKeyValue import shardakka.{ IntCodec, ShardakkaExtension } import slick.driver.PostgresDriver import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } object IntegrationTokenMigrator extends Migration { override protected def migrationName: String = "2015-08-21-IntegrationTokenMigration" override protected def migrationTimeout: Duration = 1.hour protected override def startMigration()( implicit system: ActorSystem, db: PostgresDriver.api.Database, ec: ExecutionContext ): Future[Unit] = { implicit val kv = ShardakkaExtension(system).simpleKeyValue[Int](KeyValueMappings.IntegrationTokens, IntCodec) implicit val viewRegion = GroupExtension(system).viewRegion db.run(persist.Group.allIds) flatMap { ids ⇒ system.log.debug("Going to migrate integration tokens for groups: {}", ids) Future.sequence(ids map (groupId ⇒ migrateSingle(groupId) recover { case NoBotFound ⇒ system.log.warning("No bot found for groupId: {}", groupId) case e ⇒ system.log.error(e, "Failed to migrate token for groupId: {}", groupId) throw e })) } map (_ ⇒ ()) } private def migrateSingle(groupId: Int)( implicit system: ActorSystem, ec: ExecutionContext, viewRegion: GroupViewRegion, kv: SimpleKeyValue[Int] ): Future[Unit] = { implicit val timeout = Timeout(40.seconds) for { optToken ← GroupOffice.getIntegrationToken(groupId) _ ← optToken map { token ⇒ kv.upsert(token, groupId) } getOrElse { system.log.warning("Could not find integration token in group {}", groupId) Future.successful(()) } } yield { system.log.info("Integration token migrated for group {}", groupId) () } } }
winiceo/actor-platform
actor-server/actor-core/src/main/scala/im/actor/server/migrations/IntegrationTokenMigrator.scala
Scala
mit
2,169
/* * La Trobe University - Distributed Deep Learning System * Copyright 2016 Matthias Langer (t3l@threelights.de) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.latrobe import org.json4s.JsonAST._ import scala.collection._ import scala.util.hashing._ final class TensorLayoutBuffer(override val banks: SortedMap[Int, TensorLayoutBank]) extends BufferEx[TensorLayoutBuffer, TensorLayoutBank, IndependentTensorLayout] { require(!banks.exists(_._2 == null)) override def toString : String = s"LayoutBuffer[${banks.size}]" override def hashCode() : Int = MurmurHash3.mix(super.hashCode(), banks.hashCode()) override def canEqual(that: Any) : Boolean = that.isInstanceOf[TensorLayoutBuffer] override protected def doEquals(other: Equatable) : Boolean = super.doEquals(other) && (other match { case other: TensorLayoutBuffer => banks == other.banks case _ => false }) def noValues : Long = foldLeftBanks( 0L )(_ + _.noValues) /* def apply(bankNo: Int) : TensorLayoutBank = banks.getOrElse( bankNo, TensorLayoutBank.empty ) */ // --------------------------------------------------------------------------- // Operations // --------------------------------------------------------------------------- /* def foreachSegment(fn: (Int, Int, Int) => Unit) : Unit = foreachGroup( (i, g) => g.foreachSegment(fn(i, _, _)) ) */ /* def +(other: ParameterBufferLayout) : ParameterBufferLayout = ParameterBufferLayout( MapEx.zipValuesEx(groups, other.groups)( (a, b) => a + b, a => a, b => b ) ) def +(other: (Int, ParameterGroupLayout)): ParameterBufferLayout = { val kv = groups.get(other._1) if (kv.isDefined) { val newGroup = (other._1, kv.get + other._2) ParameterBufferLayout(groups + newGroup) } else { ParameterBufferLayout(groups + other) } } */ // --------------------------------------------------------------------------- // Conversion // --------------------------------------------------------------------------- override protected def doCreateView(banks: SortedMap[Int, TensorLayoutBank]) : TensorLayoutBuffer = TensorLayoutBuffer(banks) def toParameterBuffer(convertFn: IndependentTensorLayout => RealTensor) : ValueTensorBuffer = { val result = mapBanks(_.toRealTensorBank(convertFn)) ValueTensorBuffer(result) } } object TensorLayoutBuffer { final def apply(banks: SortedMap[Int, TensorLayoutBank]) : TensorLayoutBuffer = new TensorLayoutBuffer(banks) final def derive(bankNo: Int, layout: TensorLayoutBank) : TensorLayoutBuffer = derive((bankNo, layout)) final def derive(bank0: (Int, TensorLayoutBank)) : TensorLayoutBuffer = apply(SortedMap(bank0)) final def derive(json: JValue) : TensorLayoutBuffer = derive(json.asInstanceOf[JObject]) final def derive(json: JObject) : TensorLayoutBuffer = { val fields = json.obj.toMap val result = Json.toSortedMap( fields("banks"), (json: JValue) => Json.toInt(json), (json: JValue) => TensorLayoutBank.derive(json) ) apply(result) } final val empty : TensorLayoutBuffer = apply(SortedMap.empty) } final class TensorLayoutBufferBuilder extends BufferExBuilder[TensorLayoutBuffer, TensorLayoutBank, IndependentTensorLayout] { override protected def doRegister(bankNo: Int, segmentNo: Int, item: IndependentTensorLayout) : Int = { val bank = banks.getOrElseUpdate(bankNo, TensorLayoutBankBuilder()) bank.register(segmentNo, item) } override def result() : TensorLayoutBuffer = TensorLayoutBuffer(toSortedMap) } object TensorLayoutBufferBuilder { final def apply() : TensorLayoutBufferBuilder = new TensorLayoutBufferBuilder }
bashimao/ltudl
base/src/main/scala/edu/latrobe/TensorLayoutBuffer.scala
Scala
apache-2.0
4,389
package aecor.kafkadistributedprocessing import cats.effect.{ Sync, Timer } import fs2.Stream.retry import scala.concurrent.duration._ object Supervision { type Supervision[F[_]] = F[Unit] => F[Unit] def exponentialBackoff[F[_]: Timer: Sync](minBackoff: FiniteDuration = 2.seconds, maxBackoff: FiniteDuration = 10.seconds, randomFactor: Double = 0.2, maxAttempts: Int = Int.MaxValue): Supervision[F] = { def nextDelay(in: FiniteDuration): FiniteDuration = FiniteDuration((in.toMillis * (1 + randomFactor)).toLong, MILLISECONDS).min(maxBackoff) fa => retry(fa, minBackoff, nextDelay, maxAttempts, Function.const(true)).compile.drain } def noop[F[_]]: Supervision[F] = identity }
notxcain/aecor
modules/kafka-distributed-processing/src/main/scala/aecor/kafkadistributedprocessing/Supervision.scala
Scala
mit
840
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.matchers import org.scalatest._ import org.scalatest.prop.Checkers import org.scalacheck._ import Arbitrary._ import Prop._ import org.scalatest.exceptions.TestFailedException class ShouldFullyMatchSpec extends Spec with ShouldMatchers with Checkers with ReturnsNormallyThrowsAssertion { /* s should include substring t s should include regex t s should startWith substring t s should startWith regex t s should endWith substring t s should endWith regex t s should fullyMatch regex t */ object `The fullyMatch regex syntax` { val decimal = """(-)?(\\d+)(\\.\\d*)?""" val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r object `(when the regex is specified by a string)` { def `should do nothing if the string fully matches the regular expression specified as a string` { "1.7" should fullyMatch regex ("1.7") "1.7" should fullyMatch regex (decimal) "-1.8" should fullyMatch regex (decimal) "8" should fullyMatch regex (decimal) "1." should fullyMatch regex (decimal) } def `should do nothing if the string does not fully match the regular expression specified as a string when used with not` { "eight" should not { fullyMatch regex (decimal) } "1.eight" should not { fullyMatch regex (decimal) } "one.8" should not { fullyMatch regex (decimal) } "eight" should not fullyMatch regex (decimal) "1.eight" should not fullyMatch regex (decimal) "one.8" should not fullyMatch regex (decimal) "1.8-" should not fullyMatch regex (decimal) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression` { "1.7" should (fullyMatch regex (decimal) and (fullyMatch regex (decimal))) "1.7" should ((fullyMatch regex (decimal)) and (fullyMatch regex (decimal))) "1.7" should (fullyMatch regex (decimal) and fullyMatch regex (decimal)) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression` { "1.7" should (fullyMatch regex ("hello") or (fullyMatch regex (decimal))) "1.7" should ((fullyMatch regex ("hello")) or (fullyMatch regex (decimal))) "1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimal)) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression with not` { "fred" should (not (fullyMatch regex ("bob")) and not (fullyMatch regex (decimal))) "fred" should ((not fullyMatch regex ("bob")) and (not fullyMatch regex (decimal))) "fred" should (not fullyMatch regex ("bob") and not fullyMatch regex (decimal)) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression with not` { "fred" should (not (fullyMatch regex ("fred")) or not (fullyMatch regex (decimal))) "fred" should ((not fullyMatch regex ("fred")) or (not fullyMatch regex (decimal))) "fred" should (not fullyMatch regex ("fred") or not fullyMatch regex (decimal)) } def `should throw TestFailedException if the string does not match the regular expression specified as a string` { val caught1 = intercept[TestFailedException] { "1.7" should fullyMatch regex ("1.78") } assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.78") val caught2 = intercept[TestFailedException] { "1.7" should fullyMatch regex ("21.7") } assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 21.7") val caught3 = intercept[TestFailedException] { "-1.eight" should fullyMatch regex (decimal) } assert(caught3.getMessage === "\\"-1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught6 = intercept[TestFailedException] { "eight" should fullyMatch regex (decimal) } assert(caught6.getMessage === "\\"eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught7 = intercept[TestFailedException] { "1.eight" should fullyMatch regex (decimal) } assert(caught7.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught8 = intercept[TestFailedException] { "one.8" should fullyMatch regex (decimal) } assert(caught8.getMessage === "\\"one.8\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught9 = intercept[TestFailedException] { "1.8-" should fullyMatch regex (decimal) } assert(caught9.getMessage === "\\"1.8-\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string does matches the regular expression specified as a string when used with not` { val caught1 = intercept[TestFailedException] { "1.7" should not { fullyMatch regex ("1.7") } } assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression 1.7") val caught2 = intercept[TestFailedException] { "1.7" should not { fullyMatch regex (decimal) } } assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught3 = intercept[TestFailedException] { "-1.8" should not { fullyMatch regex (decimal) } } assert(caught3.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught4 = intercept[TestFailedException] { "8" should not { fullyMatch regex (decimal) } } assert(caught4.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught5 = intercept[TestFailedException] { "1." should not { fullyMatch regex (decimal) } } assert(caught5.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught11 = intercept[TestFailedException] { "1.7" should not fullyMatch regex ("1.7") } assert(caught11.getMessage === "\\"1.7\\" fully matched the regular expression 1.7") val caught12 = intercept[TestFailedException] { "1.7" should not fullyMatch regex (decimal) } assert(caught12.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught13 = intercept[TestFailedException] { "-1.8" should not fullyMatch regex (decimal) } assert(caught13.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught14 = intercept[TestFailedException] { "8" should not fullyMatch regex (decimal) } assert(caught14.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught15 = intercept[TestFailedException] { "1." should not fullyMatch regex (decimal) } assert(caught15.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression` { val caught1 = intercept[TestFailedException] { "1.7" should (fullyMatch regex (decimal) and (fullyMatch regex ("1.8"))) } assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8") val caught2 = intercept[TestFailedException] { "1.7" should ((fullyMatch regex (decimal)) and (fullyMatch regex ("1.8"))) } assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8") val caught3 = intercept[TestFailedException] { "1.7" should (fullyMatch regex (decimal) and fullyMatch regex ("1.8")) } assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8") // Check to make sure the error message "short circuits" (i.e., just reports the left side's failure) val caught4 = intercept[TestFailedException] { "1.eight" should (fullyMatch regex (decimal) and (fullyMatch regex ("1.8"))) } assert(caught4.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught5 = intercept[TestFailedException] { "1.eight" should ((fullyMatch regex (decimal)) and (fullyMatch regex ("1.8"))) } assert(caught5.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught6 = intercept[TestFailedException] { "1.eight" should (fullyMatch regex (decimal) and fullyMatch regex ("1.8")) } assert(caught6.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression` { val caught1 = intercept[TestFailedException] { "1.seven" should (fullyMatch regex (decimal) or (fullyMatch regex ("1.8"))) } assert(caught1.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8") val caught2 = intercept[TestFailedException] { "1.seven" should ((fullyMatch regex (decimal)) or (fullyMatch regex ("1.8"))) } assert(caught2.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8") val caught3 = intercept[TestFailedException] { "1.seven" should (fullyMatch regex (decimal) or fullyMatch regex ("1.8")) } assert(caught3.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression used with not` { val caught1 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex ("1.8") and (not fullyMatch regex (decimal))) } assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught2 = intercept[TestFailedException] { "1.7" should ((not fullyMatch regex ("1.8")) and (not fullyMatch regex (decimal))) } assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught3 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex ("1.8") and not fullyMatch regex (decimal)) } assert(caught3.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression used with not` { val caught1 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex (decimal) or (not fullyMatch regex ("1.7"))) } assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") val caught2 = intercept[TestFailedException] { "1.7" should ((not fullyMatch regex (decimal)) or (not fullyMatch regex ("1.7"))) } assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") val caught3 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex (decimal) or not fullyMatch regex ("1.7")) } assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") val caught4 = intercept[TestFailedException] { "1.7" should (not (fullyMatch regex (decimal)) or not (fullyMatch regex ("1.7"))) } assert(caught4.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") } } object `(when the regex is specified by an actual Regex)` { def `should do nothing if the string fully matches the regular expression specified as a string` { "1.7" should fullyMatch regex ("1.7") "1.7" should fullyMatch regex (decimalRegex) "-1.8" should fullyMatch regex (decimalRegex) "8" should fullyMatch regex (decimalRegex) "1." should fullyMatch regex (decimalRegex) } def `should do nothing if the string does not fully match the regular expression specified as a string when used with not` { "eight" should not { fullyMatch regex (decimalRegex) } "1.eight" should not { fullyMatch regex (decimalRegex) } "one.8" should not { fullyMatch regex (decimalRegex) } "eight" should not fullyMatch regex (decimalRegex) "1.eight" should not fullyMatch regex (decimalRegex) "one.8" should not fullyMatch regex (decimalRegex) "1.8-" should not fullyMatch regex (decimalRegex) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression` { "1.7" should (fullyMatch regex (decimalRegex) and (fullyMatch regex (decimalRegex))) "1.7" should ((fullyMatch regex (decimalRegex)) and (fullyMatch regex (decimalRegex))) "1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex (decimalRegex)) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression` { "1.7" should (fullyMatch regex ("hello") or (fullyMatch regex (decimalRegex))) "1.7" should ((fullyMatch regex ("hello")) or (fullyMatch regex (decimalRegex))) "1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimalRegex)) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression with not` { "fred" should (not (fullyMatch regex ("bob")) and not (fullyMatch regex (decimalRegex))) "fred" should ((not fullyMatch regex ("bob")) and (not fullyMatch regex (decimalRegex))) "fred" should (not fullyMatch regex ("bob") and not fullyMatch regex (decimalRegex)) } def `should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression with not` { "fred" should (not (fullyMatch regex ("fred")) or not (fullyMatch regex (decimalRegex))) "fred" should ((not fullyMatch regex ("fred")) or (not fullyMatch regex (decimalRegex))) "fred" should (not fullyMatch regex ("fred") or not fullyMatch regex (decimalRegex)) } def `should throw TestFailedException if the string does not match the regular expression specified as a string` { val caught1 = intercept[TestFailedException] { "1.7" should fullyMatch regex ("1.78") } assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.78") val caught2 = intercept[TestFailedException] { "1.7" should fullyMatch regex ("21.7") } assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 21.7") val caught3 = intercept[TestFailedException] { "-1.eight" should fullyMatch regex (decimalRegex) } assert(caught3.getMessage === "\\"-1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught6 = intercept[TestFailedException] { "eight" should fullyMatch regex (decimalRegex) } assert(caught6.getMessage === "\\"eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught7 = intercept[TestFailedException] { "1.eight" should fullyMatch regex (decimalRegex) } assert(caught7.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught8 = intercept[TestFailedException] { "one.8" should fullyMatch regex (decimalRegex) } assert(caught8.getMessage === "\\"one.8\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught9 = intercept[TestFailedException] { "1.8-" should fullyMatch regex (decimalRegex) } assert(caught9.getMessage === "\\"1.8-\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string does matches the regular expression specified as a string when used with not` { val caught1 = intercept[TestFailedException] { "1.7" should not { fullyMatch regex ("1.7") } } assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression 1.7") val caught2 = intercept[TestFailedException] { "1.7" should not { fullyMatch regex (decimalRegex) } } assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught3 = intercept[TestFailedException] { "-1.8" should not { fullyMatch regex (decimalRegex) } } assert(caught3.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught4 = intercept[TestFailedException] { "8" should not { fullyMatch regex (decimalRegex) } } assert(caught4.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught5 = intercept[TestFailedException] { "1." should not { fullyMatch regex (decimalRegex) } } assert(caught5.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught11 = intercept[TestFailedException] { "1.7" should not fullyMatch regex ("1.7") } assert(caught11.getMessage === "\\"1.7\\" fully matched the regular expression 1.7") val caught12 = intercept[TestFailedException] { "1.7" should not fullyMatch regex (decimalRegex) } assert(caught12.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught13 = intercept[TestFailedException] { "-1.8" should not fullyMatch regex (decimalRegex) } assert(caught13.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught14 = intercept[TestFailedException] { "8" should not fullyMatch regex (decimalRegex) } assert(caught14.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught15 = intercept[TestFailedException] { "1." should not fullyMatch regex (decimalRegex) } assert(caught15.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression` { val caught1 = intercept[TestFailedException] { "1.7" should (fullyMatch regex (decimalRegex) and (fullyMatch regex ("1.8"))) } assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8") val caught2 = intercept[TestFailedException] { "1.7" should ((fullyMatch regex (decimalRegex)) and (fullyMatch regex ("1.8"))) } assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8") val caught3 = intercept[TestFailedException] { "1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex ("1.8")) } assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8") // Check to make sure the error message "short circuits" (i.e., just reports the left side's failure) val caught4 = intercept[TestFailedException] { "1.eight" should (fullyMatch regex (decimalRegex) and (fullyMatch regex ("1.8"))) } assert(caught4.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught5 = intercept[TestFailedException] { "1.eight" should ((fullyMatch regex (decimalRegex)) and (fullyMatch regex ("1.8"))) } assert(caught5.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught6 = intercept[TestFailedException] { "1.eight" should (fullyMatch regex (decimalRegex) and fullyMatch regex ("1.8")) } assert(caught6.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression` { val caught1 = intercept[TestFailedException] { "1.seven" should (fullyMatch regex (decimalRegex) or (fullyMatch regex ("1.8"))) } assert(caught1.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8") val caught2 = intercept[TestFailedException] { "1.seven" should ((fullyMatch regex (decimalRegex)) or (fullyMatch regex ("1.8"))) } assert(caught2.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8") val caught3 = intercept[TestFailedException] { "1.seven" should (fullyMatch regex (decimalRegex) or fullyMatch regex ("1.8")) } assert(caught3.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression used with not` { val caught1 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex ("1.8") and (not fullyMatch regex (decimalRegex))) } assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught2 = intercept[TestFailedException] { "1.7" should ((not fullyMatch regex ("1.8")) and (not fullyMatch regex (decimalRegex))) } assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") val caught3 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex ("1.8") and not fullyMatch regex (decimalRegex)) } assert(caught3.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?") } def `should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression used with not` { val caught1 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex (decimalRegex) or (not fullyMatch regex ("1.7"))) } assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") val caught2 = intercept[TestFailedException] { "1.7" should ((not fullyMatch regex (decimalRegex)) or (not fullyMatch regex ("1.7"))) } assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") val caught3 = intercept[TestFailedException] { "1.7" should (not fullyMatch regex (decimalRegex) or not fullyMatch regex ("1.7")) } assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") val caught4 = intercept[TestFailedException] { "1.7" should (not (fullyMatch regex (decimalRegex)) or not (fullyMatch regex ("1.7"))) } assert(caught4.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7") } } } }
travisbrown/scalatest
src/test/scala/org/scalatest/matchers/ShouldFullyMatchSpec.scala
Scala
apache-2.0
26,860
import scala.quoted._ import scala.quoted.staging._ object Test { given Toolbox = Toolbox.make(getClass.getClassLoader) def main(args: Array[String]): Unit = run { def test[T: Type](clazz: java.lang.Class[T]) = { val lclazz = Expr(clazz) val name = '{ ($lclazz).getCanonicalName } println(name.show) '{ println($name) } } // class Array[Object] '{ ${test(classOf[Array[Any]])} ${test(classOf[Array[AnyVal]])} ${test(classOf[Array[AnyRef]])} ${test(classOf[Array[Object]])} } } } class Foo
som-snytt/dotty
tests/run-staging/i3947f.scala
Scala
apache-2.0
570
package zzb.xmpp.ebus import java.util.concurrent.ConcurrentHashMap import akka.actor.ActorRef import akka.event.{EventBus, SubchannelClassification} import akka.util.Subclassification import org.jivesoftware.smack.Chat import org.jivesoftware.smack.packet.Message import zzb.xmpp.XPresence /** * Created by Simon on 2014/8/8 */ object XmppEventBus { private val xmppEventBus = new XmppEventBus implicit def default = xmppEventBus def apply(name: String): XmppEventBus = if (namedBus.contains(name)) namedBus.get(name) else { val ebus = new XmppEventBus namedBus.put(name, ebus) ebus } private val namedBus = new ConcurrentHashMap[String, XmppEventBus]() } /** * Xmpp 消息的事件总线 */ class XmppEventBus private[ebus] extends EventBus with SubchannelClassification { type Event = BusEvent type Classifier = String type Subscriber = ActorRef override protected implicit def subclassification: Subclassification[Classifier] = new PathSubclassification override protected def publish(event: Event, subscriber: Subscriber): Unit = subscriber ! event override protected def classify(event: Event): Classifier = event.path } class PathSubclassification extends Subclassification[String] { override def isEqual(x: String, y: String): Boolean = x == y override def isSubclass(x: String, y: String): Boolean = x.startsWith(y) || { val pattern = y.r pattern.findFirstIn(x) match { case Some(v) if v == x => true case _ => false } } } trait BusEvent { val path: String } case class XStatus(path: String, jid: String, status: XPresence) extends BusEvent case class XMessageIn(path: String, from: String, to: String, body: String, subject: String = null, thread: String = null) extends BusEvent private[xmpp] case class XMessageOut(from: String, to: String, body: String, subject: String = null, thread: String = null) private[xmpp] case class XRoomPrivateOut(from: String, to: String, body: String, subject: String = null, thread: String = null) private[xmpp] case class ChatIn(chat: Chat, msg: Message)
stepover/zzb-xmpp
src/main/scala/zzb/xmpp/ebus/XmppEventBus.scala
Scala
mit
2,128
package com.atomist.util.yaml import java.util import java.util.Properties import org.scalatest.{FlatSpec, Matchers} class PropertiesToMapStructureParserTest extends FlatSpec with Matchers { import PropertiesToMapStructureParser._ import YamlTestUtils._ "YamlUtils" should "construct a valid structure for a valid single period-scoped property" in { val properties = new Properties properties.put(namePropertyKeyPath, namePropertyValue) val resultingYamlMap = constructYamlMapForProperties(properties) val springEntry = resultingYamlMap.get(springPropertyPathElement) val applicationEntry = springEntry.asInstanceOf[util.HashMap[String, Object]].get(applicationPropertyPathElement) val name = applicationEntry.asInstanceOf[util.HashMap[String, Object]].get(namePropertyPathElement) assertResult(namePropertyValue)(name) } "YamlUtils" should "construct a valid structure for a couple of nested period-scoped properties" in { val properties = new Properties properties.put(namePropertyKeyPath, namePropertyValue) properties.put(idPropertyKeyPath, idPropertyValue) val resultingYamlMap = constructYamlMapForProperties(properties) val springEntry = resultingYamlMap.get(springPropertyPathElement) val applicationEntry = springEntry.asInstanceOf[util.HashMap[String, Object]].get(applicationPropertyPathElement) val name = applicationEntry.asInstanceOf[util.HashMap[String, Object]].get(namePropertyPathElement) assertResult(namePropertyValue)(name) val id = applicationEntry.asInstanceOf[util.HashMap[String, Object]].get("id") assertResult(idPropertyValue)(id) } "YamlUtils" should "construct a valid structure for no period-scoped properties at all" in { val properties = new Properties properties.put(singleUnNestedPropertyKey, singleUnNestedPropertyValue) val resultingYamlMap = constructYamlMapForProperties(properties) val singlePropertyEntryValue = resultingYamlMap.get(singleUnNestedPropertyKey) assertResult(singleUnNestedPropertyValue)(singlePropertyEntryValue) } }
atomist/rug
src/test/scala/com/atomist/util/yaml/PropertiesToMapStructureParserTest.scala
Scala
gpl-3.0
2,083
/** benchmark for testing equality. * Mix: == between non-numbers ith Object.equals as equality: 66% * 50% of these are tests where eq is true. * == between boxed integers: 17% * == between boxed characters: 5% * == between boxed bytes: 5% * == between boxed longs: 5% * == between boxed shorts: < 1% * == between boxed floats: < 1% * == between boxed doubles: < 1% * In all cases 50% of the tests return true. */ object eqeq extends testing.Benchmark { def eqeqtest[T](creator: Int => T, n: Int): Int = { val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef]) var sum = 0 var i = 0 while (i < n) { var j = 0 while (j < n) { if (elems(i) == elems(j)) sum += 1 j += 1 } i += 1 } sum } val obj1 = new Object val obj2 = new Object def run() { var sum = 0 sum += eqeqtest(x => if (x == 0) obj1 else obj2, 2000) sum += eqeqtest(x => x, 1000) sum += eqeqtest(x => x.toChar, 550) sum += eqeqtest(x => x.toByte, 550) sum += eqeqtest(x => x.toLong, 550) sum += eqeqtest(x => x.toShort, 100) sum += eqeqtest(x => x.toFloat, 100) sum += eqeqtest(x => x.toDouble, 100) assert(sum == 2968750) } }
felixmulder/scala
test/files/bench/equality/eqeq.scala
Scala
bsd-3-clause
1,292
package org.jetbrains.plugins.scala package codeInspection.booleans import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder} import com.intellij.openapi.project.Project import com.intellij.psi.PsiElement import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection} import org.jetbrains.plugins.scala.extensions.PsiElementExt import org.jetbrains.plugins.scala.lang.completion.ScalaKeyword import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral import org.jetbrains.plugins.scala.lang.psi.api.expr._ import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.{createExpressionFromText, createExpressionWithContextFromText} import org.jetbrains.plugins.scala.lang.psi.types.result._ import org.jetbrains.plugins.scala.lang.psi.types.{ScTypeExt, api} import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil.getShortText /** * Nikolay.Tropin * 4/23/13 * */ class SimplifyBooleanInspection extends AbstractInspection("SimplifyBoolean", "Simplify boolean expression") { override protected def actionFor(implicit holder: ProblemsHolder): PartialFunction[PsiElement, Any] = { case _: ScParenthesisedExpr => //do nothing to avoid many similar expressions case expr: ScExpression if SimplifyBooleanUtil.canBeSimplified(expr) => holder.registerProblem(expr, "Simplify boolean expression", ProblemHighlightType.GENERIC_ERROR_OR_WARNING, new SimplifyBooleanQuickFix(expr)) } } class SimplifyBooleanQuickFix(expr: ScExpression) extends AbstractFixOnPsiElement("Simplify " + getShortText(expr), expr) { def doApplyFix(project: Project) { val scExpr = getElement if (scExpr.isValid && SimplifyBooleanUtil.canBeSimplified(scExpr)) { val simplified = SimplifyBooleanUtil.simplify(scExpr) scExpr.replaceExpression(simplified, removeParenthesis = true) } } } object SimplifyBooleanUtil { val boolInfixOperations = Set("==", "!=", "&&", "&", "||", "|", "^") def canBeSimplified(expr: ScExpression, isTopLevel: Boolean = true): Boolean = { expr match { case _: ScLiteral if !isTopLevel => booleanConst(expr).isDefined case ScParenthesisedExpr(e) => canBeSimplified(e, isTopLevel) case expression: ScExpression => val children = getScExprChildren(expr) isBooleanOperation(expression) && isOfBooleanType(expr) && children.exists(canBeSimplified(_, isTopLevel = false)) } } def simplify(expr: ScExpression, isTopLevel: Boolean = true): ScExpression = { if (canBeSimplified(expr, isTopLevel) && booleanConst(expr).isEmpty) { val exprCopy = createExpressionWithContextFromText(expr.getText, expr.getContext, expr) val children = getScExprChildren(exprCopy) children.foreach(child => exprCopy.getNode.replaceChild(child.getNode, simplify(child, isTopLevel = false).getNode)) simplifyTrivially(exprCopy) } else expr } def isBooleanOperation(expression: ScExpression): Boolean = expression match { case ScPrefixExpr(operation, operand) => operation.refName == "!" && isOfBooleanType(operand) case ScInfixExpr(left, oper, right) => boolInfixOperations.contains(oper.refName) && isOfBooleanType(left) && isOfBooleanType(right) case _ => false } def isOfBooleanType(expr: ScExpression): Boolean = { import expr.projectContext expr.`type`().getOrAny.weakConforms(api.Boolean) } private def getScExprChildren(expr: ScExpression) = expr.children.collect { case expr: ScExpression => expr }.toList private def booleanConst(expr: ScExpression): Option[Boolean] = expr match { case literal: ScLiteral => literal.getText match { case "true" => Some(true) case "false" => Some(false) case _ => None } case _ => None } private def simplifyTrivially(expr: ScExpression): ScExpression = expr match { case parenthesized: ScParenthesisedExpr => val copy = parenthesized.copy.asInstanceOf[ScParenthesisedExpr] copy.replaceExpression(copy.expr.getOrElse(copy), removeParenthesis = true) case ScPrefixExpr(operation, operand) => if (operation.refName != "!") expr else { booleanConst(operand) match { case Some(bool: Boolean) => createExpressionFromText((!bool).toString)(expr.getManager) case None => expr } } case ScInfixExpr(leftExpr, operation, rightExpr) => val operName = operation.refName if (!boolInfixOperations.contains(operName)) expr else { booleanConst(leftExpr) match { case Some(bool: Boolean) => simplifyInfixWithLiteral(bool, operName, rightExpr) case None => booleanConst(rightExpr) match { case Some(bool: Boolean) => simplifyInfixWithLiteral(bool, operName, leftExpr) case None => expr } } } case _ => expr } private def simplifyInfixWithLiteral(value: Boolean, operation: String, expr: ScExpression): ScExpression = { implicit val projectContext = expr.projectContext val text: String = booleanConst(expr) match { case Some(bool: Boolean) => val result: Boolean = operation match { case "==" => bool == value case "!=" | "^" => bool != value case "&&" | "&" => bool && value case "||" | "|" => bool || value } result.toString case _ => (value, operation) match { case (true, "==") | (false, "!=") | (false, "^") | (true, "&&") | (true, "&") | (false, "||") | (false, "|") => expr.getText case (false, "==") | (true, "!=") | (true, "^") => val negated: ScPrefixExpr = createExpressionFromText("!a").asInstanceOf[ScPrefixExpr] val copyExpr = expr.copy.asInstanceOf[ScExpression] negated.operand.replaceExpression(copyExpr, removeParenthesis = true) negated.getText case (true, "||") | (true, "|") => ScalaKeyword.TRUE case (false, "&&") | (false, "&") => ScalaKeyword.FALSE case _ => throw new IllegalArgumentException("Wrong operation") } } createExpressionFromText(text) } }
triplequote/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/booleans/SimplifyBooleanInspection.scala
Scala
apache-2.0
6,185
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.master.ui import javax.servlet.http.HttpServletRequest import scala.concurrent.Await import scala.xml.Node import akka.pattern.ask import org.apache.spark.deploy.ExecutorState import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState} import org.apache.spark.deploy.master.ExecutorDesc import org.apache.spark.ui.{UIUtils, WebUIPage} import org.apache.spark.util.Utils private[ui] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app") { private val master = parent.masterActorRef private val timeout = parent.timeout /** Executor details for a particular application */ def render(request: HttpServletRequest): Seq[Node] = { val appId = request.getParameter("appId") val stateFuture = (master ? RequestMasterState)(timeout).mapTo[MasterStateResponse] val state = Await.result(stateFuture, timeout) val app = state.activeApps.find(_.id == appId).getOrElse({ state.completedApps.find(_.id == appId).getOrElse(null) }) if (app == null) { val msg = <div class="row-fluid">No running application with ID {appId}</div> return UIUtils.basicSparkPage(msg, "Not Found") } val executorHeaders = Seq("ExecutorID", "Worker", "Cores", "Memory", "State", "Logs") val allExecutors = (app.executors.values ++ app.removedExecutors).toSet.toSeq // This includes executors that are either still running or have exited cleanly val executors = allExecutors.filter { exec => !ExecutorState.isFinished(exec.state) || exec.state == ExecutorState.EXITED } val removedExecutors = allExecutors.diff(executors) val executorsTable = UIUtils.listingTable(executorHeaders, executorRow, executors) val removedExecutorsTable = UIUtils.listingTable(executorHeaders, executorRow, removedExecutors) val content = <div class="row-fluid"> <div class="span12"> <ul class="unstyled"> <li><strong>ID:</strong> {app.id}</li> <li><strong>Name:</strong> {app.desc.name}</li> <li><strong>User:</strong> {app.desc.user}</li> <li><strong>Cores:</strong> { if (app.desc.maxCores.isEmpty) { "Unlimited (%s granted)".format(app.coresGranted) } else { "%s (%s granted, %s left)".format( app.desc.maxCores.get, app.coresGranted, app.coresLeft) } } </li> <li> <strong>Executor Memory:</strong> {Utils.megabytesToString(app.desc.memoryPerExecutorMB)} </li> <li><strong>Submit Date:</strong> {app.submitDate}</li> <li><strong>State:</strong> {app.state}</li> <li><strong><a href={app.desc.appUiUrl}>Application Detail UI</a></strong></li> </ul> </div> </div> <div class="row-fluid"> <!-- Executors --> <div class="span12"> <h4> Executor Summary </h4> {executorsTable} { if (removedExecutors.nonEmpty) { <h4> Removed Executors </h4> ++ removedExecutorsTable } } </div> </div>; UIUtils.basicSparkPage(content, "Application: " + app.desc.name) } private def executorRow(executor: ExecutorDesc): Seq[Node] = { <tr> <td>{executor.id}</td> <td> <a href={executor.worker.webUiAddress}>{executor.worker.id}</a> </td> <td>{executor.cores}</td> <td>{executor.memory}</td> <td>{executor.state}</td> <td> <a href={"%s/logPage?appId=%s&executorId=%s&logType=stdout" .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stdout</a> <a href={"%s/logPage?appId=%s&executorId=%s&logType=stderr" .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stderr</a> </td> </tr> } }
andrewor14/iolap
core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
Scala
apache-2.0
4,776
/* * Copyright 2010 LinkedIn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import kafka.utils.TestUtils import java.io.File import kafka.utils.Utils import kafka.api.FetchRequest import kafka.integration.ProducerConsumerTestHarness import kafka.producer.{SyncProducer, SyncProducerConfig} import kafka.consumer.SimpleConsumer import java.util.Properties import org.scalatest.junit.JUnitSuite import junit.framework.{Assert, TestCase} import org.junit.{After, Before, Test} import junit.framework.Assert._ import kafka.message.{NoCompressionCodec, Message, ByteBufferMessageSet} class ServerShutdownTest extends JUnitSuite { val port = 9999 @Test def testCleanShutdown() { val props = TestUtils.createBrokerConfig(0, port) val config = new KafkaConfig(props) { override val enableZookeeper = false } val host = "localhost" val topic = "test" val sent1 = new ByteBufferMessageSet(NoCompressionCodec, new Message("hello".getBytes()), new Message("there".getBytes())) val sent2 = new ByteBufferMessageSet(NoCompressionCodec, new Message("more".getBytes()), new Message("messages".getBytes())) { val producer = new SyncProducer(getProducerConfig(host, port, 64*1024, 100000, 10000)) val consumer = new SimpleConsumer(host, port, 1000000, 64*1024) val server = new KafkaServer(config) server.startup() // send some messages producer.send(topic, sent1) sent1.getBuffer.rewind Thread.sleep(200) // do a clean shutdown server.shutdown() val cleanShutDownFile = new File(new File(config.logDir), server.CLEAN_SHUTDOWN_FILE) assertTrue(cleanShutDownFile.exists) } { val producer = new SyncProducer(getProducerConfig(host, port, 64*1024, 100000, 10000)) val consumer = new SimpleConsumer(host, port, 1000000, 64*1024) val server = new KafkaServer(config) server.startup() // bring the server back again and read the messages var fetched: ByteBufferMessageSet = null while(fetched == null || fetched.validBytes == 0) fetched = consumer.fetch(new FetchRequest(topic, 0, 0, 10000)) TestUtils.checkEquals(sent1.iterator, fetched.iterator) val newOffset = fetched.validBytes // send some more messages producer.send(topic, sent2) sent2.getBuffer.rewind Thread.sleep(200) fetched = null while(fetched == null || fetched.validBytes == 0) fetched = consumer.fetch(new FetchRequest(topic, 0, newOffset, 10000)) TestUtils.checkEquals(sent2.map(m => m.message).iterator, fetched.map(m => m.message).iterator) server.shutdown() Utils.rm(server.config.logDir) } } private def getProducerConfig(host: String, port: Int, bufferSize: Int, connectTimeout: Int, reconnectInterval: Int): SyncProducerConfig = { val props = new Properties() props.put("host", host) props.put("port", port.toString) props.put("buffer.size", bufferSize.toString) props.put("connect.timeout.ms", connectTimeout.toString) props.put("reconnect.interval", reconnectInterval.toString) new SyncProducerConfig(props) } }
tcrayford/hafka
kafka/core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala
Scala
bsd-3-clause
4,424
package tuner.gui.util import java.awt.Graphics2D import javax.media.opengl.{GL,GL2,DebugGL2,GL2GL3,GL2ES1} import javax.media.opengl.fixedfunc.GLMatrixFunc import tuner.gui.P5Panel import com.jogamp.opengl.util.awt.TextRenderer import scala.collection.mutable.ListBuffer object TextAlign { sealed trait TextVAlign case object Top extends TextVAlign case object Middle extends TextVAlign case object Bottom extends TextVAlign sealed trait TextHAlign case object Left extends TextHAlign case object Center extends TextHAlign case object Right extends TextHAlign } object FontLib { import TextAlign._ val horizStrings = new ListBuffer[(String,Int,Int)] val vertStrings = new ListBuffer[(String,Int,Int)] def textWidth(g:Graphics2D, str:String) = { val metrics = g.getFontMetrics(g.getFont) metrics.stringWidth(str) } def begin(renderer:TextRenderer) = { horizStrings.clear vertStrings.clear } def end(gl2:GL2, renderer:TextRenderer, screenW:Int, screenH:Int) = { renderer.beginRendering(screenW, screenH) horizStrings.foreach {case (str,x,y) => renderer.draw(str, x, y)} renderer.flush // vertical strings are more complex vertStrings.foreach {case (str,x,y) => gl2.glMatrixMode(GLMatrixFunc.GL_MODELVIEW) gl2.glPushMatrix gl2.glLoadIdentity gl2.glTranslatef(x, y, 0) gl2.glRotatef(90, 0, 0, 1) renderer.draw(str, 0, 0) renderer.flush gl2.glPopMatrix } renderer.endRendering } def drawString(g:Graphics2D, str:String, x:Int, y:Int, hAlign:TextHAlign, vAlign:TextVAlign) = { val metrics = g.getFontMetrics(g.getFont) val height = metrics.getAscent val width = metrics.stringWidth(str) val xx = hAlign match { case Left => x case Center => x - (width / 2) case Right => x - width } val yy = vAlign match { case Top => y + height case Middle => y + (height / 2) case Bottom => y } g.drawString(str, xx, yy) } def drawString(renderer:TextRenderer, str:String, x:Int, y:Int, hAlign:TextHAlign, vAlign:TextVAlign, screenW:Int, screenH:Int) = { val stringBounds = renderer.getFont.getStringBounds( str, renderer.getFontRenderContext) val (width, height) = (stringBounds.getWidth, stringBounds.getHeight) val xx = hAlign match { case Left => x case Center => x - (width / 2) case Right => x - width } val yy = vAlign match { case Top => y + height case Middle => y + (height / 2) case Bottom => y } horizStrings.append((str, xx.toInt, screenH-yy.toInt)) } def drawVString(g:Graphics2D, str:String, x:Int, y:Int, hAlign:TextHAlign, vAlign:TextVAlign) = { // Need to compute these before rotation val metrics = g.getFontMetrics(g.getFont) val height = metrics.getAscent val width = metrics.stringWidth(str) val xx = hAlign match { case Left => x + height case Center => x + (height / 2) case Right => x } val yy = vAlign match { case Top => y case Middle => y + (width / 2) case Bottom => y + width } val oldFont = g.getFont val rotFont = oldFont.deriveFont( java.awt.geom.AffineTransform.getRotateInstance(-3.1415926 / 2.0)) g.setFont(rotFont) g.drawString(str, xx, yy) g.setFont(oldFont) } def drawVString(gl2:GL2, renderer:TextRenderer, str:String, x:Int, y:Int, hAlign:TextHAlign, vAlign:TextVAlign, screenW:Int, screenH:Int) = { // Need to compute these before rotation val stringBounds = renderer.getFont.getStringBounds( str, renderer.getFontRenderContext) val (width, height) = (stringBounds.getWidth, stringBounds.getHeight) val xx = hAlign match { case Left => x + height case Center => x + (height / 2) case Right => x } val yy = vAlign match { case Top => y case Middle => y + (width / 2) case Bottom => y + width } vertStrings.append((str, xx.toInt, (screenH-yy).toInt)) } }
gabysbrain/tuner
src/main/scala/tuner/gui/util/FontLib.scala
Scala
mit
4,215
package stepping object ForComprehensionListObject { def main(args: Array[String]): Unit = { val l= List(new Object(), "deux", "quatre", "huit") for (n <- l) { n } foo(l) new ForComprehensionListObject(l).bar } def foo(l: List[Object]): Unit = { for (n <- l) { n } } } class ForComprehensionListObject (l: List[Object]) { for (n <- l) { n } def bar(): Unit = { for (n <- l) { n } } }
Kwestor/scala-ide
org.scala-ide.sdt.debug.tests/test-workspace/debug/src/stepping/ForComprehensionListObject.scala
Scala
bsd-3-clause
472
/* * Copyright 2009-2010 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb { package json { import _root_.org.scalacheck._ import _root_.org.scalacheck.Prop._ import _root_.org.specs.Specification import _root_.org.specs.runner.{Runner, JUnit} import _root_.org.specs.ScalaCheck class ParserTest extends Runner(ParserSpec) with JUnit object ParserSpec extends Specification with JValueGen with ScalaCheck { import JsonAST._ import JsonParser._ import Printer._ "Any valid json can be parsed" in { val parsing = (json: JValue) => { parse(Printer.pretty(render(json))); true } forAll(parsing) must pass } "Buffer size does not change parsing result" in { val bufSize = Gen.choose(2, 64) val parsing = (x: JValue, s1: Int, s2: Int) => { parseVal(x, s1) == parseVal(x, s2) } forAll(genObject, bufSize, bufSize)(parsing) must pass } "Parsing is thread safe" in { import java.util.concurrent._ val json = Examples.person val executor = Executors.newFixedThreadPool(100) val results = (0 to 100).map(_ => executor.submit(new Callable[JValue] { def call = parse(json) })).toList.map(_.get) results.zip(results.tail).forall(pair => pair._1 == pair._2) mustEqual true } "All valid string escape characters can be parsed" in { parse("[\"abc\\\"\\\\\\/\\b\\f\\n\\r\\t\\u00a0\"]") must_== JArray(JString("abc\"\\/\b\f\n\r\t\u00a0")::Nil) } implicit def arbJValue: Arbitrary[JValue] = Arbitrary(genObject) private def parseVal(json: JValue, bufSize: Int) = { val existingSize = JsonParser.Segments.segmentSize try { JsonParser.Segments.segmentSize = bufSize JsonParser.Segments.clear JsonParser.parse(compact(render(json))) } finally { JsonParser.Segments.segmentSize = existingSize } } } } }
jeppenejsum/liftweb
framework/lift-base/lift-json/src/test/scala/net/liftweb/json/ParserTest.scala
Scala
apache-2.0
2,369
/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.utils.instrumentation import com.netflix.servo.monitor.Monitor import com.netflix.servo.tag.Tags import java.io.{ PrintWriter, StringWriter } import org.scalatest.FunSuite class MonitorTableSuite extends FunSuite { test("Table is rendered correctly") { val headers = Array( new TableHeader(name = "Col1", valueExtractor = ValueExtractor.forTagValueWithKey("TagKey1"), None, alignment = Alignment.Left), new TableHeader(name = "Col2", valueExtractor = ValueExtractor.forMonitorMatchingTag(ServoTimer.TotalTimeTag), formatFunction = Some(formatFunction1)), new TableHeader(name = "Col3", valueExtractor = ValueExtractor.forMonitorValue())) val rows = Array[Monitor[_]]( new ServoTimer("timer1"), new ServoTimer("timer2")) rows(0).asInstanceOf[ServoTimer].addTag(Tags.newTag("TagKey1", "Col1Value1")) rows(1).asInstanceOf[ServoTimer].addTag(Tags.newTag("TagKey1", "Col1Value2 A Bit Longer")) rows(0).asInstanceOf[ServoTimer].recordNanos(100) rows(1).asInstanceOf[ServoTimer].recordNanos(200000) val monitorTable = new MonitorTable(headers, rows) val renderedTable = getRenderedTable(monitorTable) println(renderedTable) assert(renderedTable === expectedTable) } private def getRenderedTable(table: MonitorTable): String = { val stringWriter = new StringWriter() val out = new PrintWriter(stringWriter) table.print(out) out.flush() stringWriter.getBuffer.toString } private def formatFunction1(value: Any): String = { value.toString + " nanoseconds" } val expectedTable = """+-------------------------+--------------------+--------+ #| Col1 | Col2 | Col3 | #+-------------------------+--------------------+--------+ #| Col1Value1 | 100 nanoseconds | 100 | #| Col1Value2 A Bit Longer | 200000 nanoseconds | 200000 | #+-------------------------+--------------------+--------+ #""".stripMargin('#') }
tdanford/bdg-utils
utils-metrics/src/test/scala/org/bdgenomics/utils/instrumentation/MonitorTableSuite.scala
Scala
apache-2.0
2,829
/* * Copyright 2015 Foundational Development * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package pro.foundev.commons.test_support import pro.foundev.commons.benchmarking.Timer class MockTimer extends Timer { private var duration: Long = 0 def setDuration(nanos: Long) = { duration = nanos } def profile[R](callback: () => R):R = { _lastProfile = duration callback() } }
rssvihla/datastax_work
spark_commons/commons/src/test/scala/pro/foundev/commons/test_support/MockTimer.scala
Scala
apache-2.0
915
package com.bstek.designer.editor import com.bstek.designer.core.surface.DoradoDesignerEditorPanel import com.bstek.designer.core.{DoradoToggleEditorModeAction, DoradoComponentTreeToolWindowContent, DoradoComponentTreeToolWindowManager, DoradoToolWindowContent} import com.intellij.openapi.actionSystem.{DefaultActionGroup, ActionGroup} import com.intellij.openapi.fileEditor.FileEditorManager import com.intellij.openapi.project.Project import com.intellij.openapi.wm.ToolWindowAnchor /** * Created by robin on 14-7-24. */ //--伴生对象 object ViewComponentTreeToolWindowManager { def getInstance(designer: DoradoDesignerEditorPanel): DoradoComponentTreeToolWindowContent = { val manager: ViewComponentTreeToolWindowManager = getInstance(designer.getProject) if (manager.isEditorMode) { return manager.getContent(designer).asInstanceOf[DoradoComponentTreeToolWindowContent] } return manager.doradoToolWindowContent } def getInstance(project: Project): ViewComponentTreeToolWindowManager = { return project.getComponent(classOf[ViewComponentTreeToolWindowManager]) } } class ViewComponentTreeToolWindowManager(project: Project, fileEditorManager: FileEditorManager) extends DoradoComponentTreeToolWindowManager(project, fileEditorManager) { override def createDoradoToolWindowContent: DoradoComponentTreeToolWindowContent = { return new ViewComponentTreeToolWindowContent(project, true) } override def createRightDoradoToggleEditorModeAction: DoradoToggleEditorModeAction = { return new ViewToggleEditorModeAction(this, project, ToolWindowAnchor.RIGHT) } override def createLeftDoradoToggleEditorModeAction: DoradoToggleEditorModeAction = { return new ViewToggleEditorModeAction(this, project, ToolWindowAnchor.LEFT) } }
OuYuBin/IDEADorado
dorado-editor/src/com/bstek/designer/editor/ViewComponentTreeToolWindowManager.scala
Scala
apache-2.0
1,793
package com.toolkit.parser.apache import java.io.File import java.util.Scanner import java.util.regex.{Matcher, Pattern} /** * Created by rahul kumar on 09/12/14. */ class ApacheAccessLogParser extends Serializable { private val ddd = "\\d{1,3}" private val ip = s"($ddd\\.$ddd\\.$ddd\\.$ddd)?" private val client = "(\\S+)" private val user = "(\\S+)" private val dateTime = "(\\[.+?\\])" private val request = "\"(.*?)\"" private val status = "(\\d{3})" private val bytes = "(\\S+)" private val referer = "\"(.*?)\"" private val agent = "\"(.*?)\"" private val regex = s"$ip $client $user $dateTime $request $status $bytes $referer $agent" private val p = Pattern.compile(regex) def parseRecord(record: String): Option[ApacheAccessLogRecord] = { val matcher = p.matcher(record) if (matcher.find) { Some(buildAccessLogRecord(matcher)) } else { None } } def parseRecordReturningNullObjectOnFailure(record: String): Option[ApacheAccessLogRecord] = { println(record) val matcher = p.matcher(record) if (matcher.find) { Some(buildAccessLogRecord(matcher)) } else { Some(ApacheAccessLogParser.nullObjectAccessLogRecord) } } private def buildAccessLogRecord(matcher: Matcher) = { ApacheAccessLogRecord( matcher.group(1), matcher.group(2), matcher.group(3), matcher.group(4), matcher.group(5), matcher.group(6), matcher.group(7), matcher.group(8), matcher.group(9)) } def newLogParser(file: File) = { val scanner = new Scanner(new File("file")).useDelimiter(",") } } object ApacheAccessLogParser { val nullObjectAccessLogRecord = ApacheAccessLogRecord("-", "-", "-", "-", "-", "-", "-", "-", "-") }
rahulkumar-aws/logManagementToolkit
src/main/scala/com/toolkit/parser/apache/ApacheAccessLogParser.scala
Scala
apache-2.0
1,777
package org.scalajs.core.ir import java.security.{MessageDigest, DigestOutputStream} import java.io.{OutputStream, DataOutputStream} import java.util.Arrays import Trees._ import Types._ import Tags._ object Hashers { def hashMethodDef(methodDef: MethodDef): MethodDef = { if (methodDef.hash.isDefined) methodDef else { val hasher = new TreeHasher() val MethodDef(static, name, args, resultType, body) = methodDef hasher.mixPos(methodDef.pos) hasher.mixBoolean(static) hasher.mixPropertyName(name) hasher.mixTrees(args) hasher.mixType(resultType) hasher.mixTree(body) hasher.mixInt(methodDef.optimizerHints.bits) val hash = hasher.finalizeHash() MethodDef(static, name, args, resultType, body)( methodDef.optimizerHints, Some(hash))(methodDef.pos) } } /** Hash definitions from a ClassDef where applicable */ def hashDefs(defs: List[Tree]): List[Tree] = defs map { case methodDef: MethodDef => hashMethodDef(methodDef) case otherDef => otherDef } /** Hash the definitions in a ClassDef (where applicable) */ def hashClassDef(classDef: ClassDef): ClassDef = { classDef.copy(defs = hashDefs(classDef.defs))( classDef.optimizerHints)(classDef.pos) } def hashesEqual(x: TreeHash, y: TreeHash, considerPos: Boolean): Boolean = { Arrays.equals(x.treeHash, y.treeHash) && (!considerPos || Arrays.equals(x.posHash, y.posHash)) } def hashAsVersion(hash: TreeHash, considerPos: Boolean): String = { // 2 chars per byte, 20 bytes per hash val size = 2 * (if (considerPos) 2 else 1) * 20 val builder = new StringBuilder(size) def hexDigit(digit: Int): Char = Character.forDigit(digit, 16) def append(hash: Array[Byte]): Unit = { for (b <- hash) builder.append(hexDigit(b >> 4)).append(hexDigit(b & 0xF)) } append(hash.treeHash) if (considerPos) append(hash.posHash) builder.toString } private final class TreeHasher { private def newDigest = MessageDigest.getInstance("SHA-1") private def newDigestStream(digest: MessageDigest) = { val out = new OutputStream { def write(b: Int): Unit = () } val digOut = new DigestOutputStream(out, digest) new DataOutputStream(digOut) } private[this] val treeDigest = newDigest private[this] val treeStream = newDigestStream(treeDigest) private[this] val posDigest = newDigest private[this] val posStream = newDigestStream(posDigest) def finalizeHash(): TreeHash = new TreeHash(treeDigest.digest(), posDigest.digest()) def mixTree(tree: Tree): Unit = { mixPos(tree.pos) tree match { case EmptyTree => mixTag(TagEmptyTree) case VarDef(ident, vtpe, mutable, rhs) => mixTag(TagVarDef) mixIdent(ident) mixType(vtpe) mixBoolean(mutable) mixTree(rhs) case ParamDef(ident, ptpe, mutable, rest) => mixTag(TagParamDef) mixIdent(ident) mixType(ptpe) mixBoolean(mutable) /* TODO Remove this test in the next major release. * In 0.6.x we need this test so that the hash of a non-rest ParamDef * emitted in 0.6.3 format is the same as an (implicitly non-rest) * ParamDef emitted in 0.6.0 format. */ if (rest) mixBoolean(rest) case Skip() => mixTag(TagSkip) case Block(stats) => mixTag(TagBlock) mixTrees(stats) case Labeled(label, tpe, body) => mixTag(TagLabeled) mixIdent(label) mixType(tpe) mixTree(body) case Assign(lhs, rhs) => mixTag(TagAssign) mixTree(lhs) mixTree(rhs) case Return(expr, label) => mixTag(TagReturn) mixTree(expr) mixOptIdent(label) case If(cond, thenp, elsep) => mixTag(TagIf) mixTree(cond) mixTree(thenp) mixTree(elsep) mixType(tree.tpe) case While(cond, body, label) => mixTag(TagWhile) mixTree(cond) mixTree(body) mixOptIdent(label) case DoWhile(body, cond, label) => mixTag(TagDoWhile) mixTree(body) mixTree(cond) mixOptIdent(label) case Try(block, errVar, handler, finalizer) => mixTag(TagTry) mixTree(block) mixIdent(errVar) mixTree(handler) mixTree(finalizer) mixType(tree.tpe) case Throw(expr) => mixTag(TagThrow) mixTree(expr) case Continue(label) => mixTag(TagContinue) mixOptIdent(label) case Match(selector, cases, default) => mixTag(TagMatch) mixTree(selector) cases foreach { case (patterns, body) => mixTrees(patterns) mixTree(body) } mixTree(default) mixType(tree.tpe) case Debugger() => mixTag(TagDebugger) case New(cls, ctor, args) => mixTag(TagNew) mixType(cls) mixIdent(ctor) mixTrees(args) case LoadModule(cls) => mixTag(TagLoadModule) mixType(cls) case StoreModule(cls, value) => mixTag(TagStoreModule) mixType(cls) mixTree(value) case Select(qualifier, item) => mixTag(TagSelect) mixTree(qualifier) mixIdent(item) mixType(tree.tpe) case Apply(receiver, method, args) => mixTag(TagApply) mixTree(receiver) mixIdent(method) mixTrees(args) mixType(tree.tpe) case ApplyStatically(receiver, cls, method, args) => mixTag(TagApplyStatically) mixTree(receiver) mixType(cls) mixIdent(method) mixTrees(args) mixType(tree.tpe) case ApplyStatic(cls, method, args) => mixTag(TagApplyStatic) mixType(cls) mixIdent(method) mixTrees(args) mixType(tree.tpe) case UnaryOp(op, lhs) => mixTag(TagUnaryOp) mixInt(op) mixTree(lhs) case BinaryOp(op, lhs, rhs) => mixTag(TagBinaryOp) mixInt(op) mixTree(lhs) mixTree(rhs) case NewArray(tpe, lengths) => mixTag(TagNewArray) mixType(tpe) mixTrees(lengths) case ArrayValue(tpe, elems) => mixTag(TagArrayValue) mixType(tpe) mixTrees(elems) case ArrayLength(array) => mixTag(TagArrayLength) mixTree(array) case ArraySelect(array, index) => mixTag(TagArraySelect) mixTree(array) mixTree(index) mixType(tree.tpe) case RecordValue(tpe, elems) => mixTag(TagRecordValue) mixType(tpe) mixTrees(elems) case IsInstanceOf(expr, cls) => mixTag(TagIsInstanceOf) mixTree(expr) mixType(cls) case AsInstanceOf(expr, cls) => mixTag(TagAsInstanceOf) mixTree(expr) mixType(cls) case Unbox(expr, charCode) => mixTag(TagUnbox) mixTree(expr) mixInt(charCode) case GetClass(expr) => mixTag(TagGetClass) mixTree(expr) case CallHelper(helper, args) => mixTag(TagCallHelper) mixString(helper) mixTrees(args) mixType(tree.tpe) case JSNew(ctor, args) => mixTag(TagJSNew) mixTree(ctor) mixTrees(args) case JSDotSelect(qualifier, item) => mixTag(TagJSDotSelect) mixTree(qualifier) mixIdent(item) case JSBracketSelect(qualifier, item) => mixTag(TagJSBracketSelect) mixTree(qualifier) mixTree(item) case JSFunctionApply(fun, args) => mixTag(TagJSFunctionApply) mixTree(fun) mixTrees(args) case JSDotMethodApply(receiver, method, args) => mixTag(TagJSDotMethodApply) mixTree(receiver) mixIdent(method) mixTrees(args) case JSBracketMethodApply(receiver, method, args) => mixTag(TagJSBracketMethodApply) mixTree(receiver) mixTree(method) mixTrees(args) case JSSuperBracketSelect(cls, qualifier, item) => mixTag(TagJSSuperBracketSelect) mixType(cls) mixTree(qualifier) mixTree(item) case JSSuperBracketCall(cls, receiver, method, args) => mixTag(TagJSSuperBracketCall) mixType(cls) mixTree(receiver) mixTree(method) mixTrees(args) case JSSuperConstructorCall(args) => mixTag(TagJSSuperConstructorCall) mixTrees(args) case LoadJSConstructor(cls) => mixTag(TagLoadJSConstructor) mixType(cls) case LoadJSModule(cls) => mixTag(TagLoadJSModule) mixType(cls) case JSSpread(items) => mixTag(TagJSSpread) mixTree(items) case JSDelete(prop) => mixTag(TagJSDelete) mixTree(prop) case JSUnaryOp(op, lhs) => mixTag(TagJSUnaryOp) mixInt(op) mixTree(lhs) case JSBinaryOp(op, lhs, rhs) => mixTag(TagJSBinaryOp) mixInt(op) mixTree(lhs) mixTree(rhs) case JSArrayConstr(items) => mixTag(TagJSArrayConstr) mixTrees(items) case JSObjectConstr(fields) => mixTag(TagJSObjectConstr) fields foreach { case (pn, value) => mixPropertyName(pn) mixTree(value) } case JSEnvInfo() => mixTag(TagJSEnvInfo) case JSLinkingInfo() => mixTag(TagJSLinkingInfo) case Undefined() => mixTag(TagUndefined) case UndefinedParam() => mixTag(TagUndefinedParam) mixType(tree.tpe) case Null() => mixTag(TagNull) case BooleanLiteral(value) => mixTag(TagBooleanLiteral) mixBoolean(value) case IntLiteral(value) => mixTag(TagIntLiteral) mixInt(value) case LongLiteral(value) => mixTag(TagLongLiteral) mixLong(value) case FloatLiteral(value) => mixTag(TagFloatLiteral) mixFloat(value) case DoubleLiteral(value) => mixTag(TagDoubleLiteral) mixDouble(value) case StringLiteral(value) => mixTag(TagStringLiteral) mixString(value) case ClassOf(cls) => mixTag(TagClassOf) mixType(cls) case VarRef(ident) => mixTag(TagVarRef) mixIdent(ident) mixType(tree.tpe) case This() => mixTag(TagThis) mixType(tree.tpe) case Closure(captureParams, params, body, captureValues) => mixTag(TagClosure) mixTrees(captureParams) mixTrees(params) mixTree(body) mixTrees(captureValues) case _ => sys.error(s"Unable to hash tree of class ${tree.getClass}") } } def mixTrees(trees: List[Tree]): Unit = trees.foreach(mixTree) def mixType(tpe: Type): Unit = tpe match { case AnyType => mixTag(TagAnyType) case NothingType => mixTag(TagNothingType) case UndefType => mixTag(TagUndefType) case BooleanType => mixTag(TagBooleanType) case IntType => mixTag(TagIntType) case LongType => mixTag(TagLongType) case FloatType => mixTag(TagFloatType) case DoubleType => mixTag(TagDoubleType) case StringType => mixTag(TagStringType) case NullType => mixTag(TagNullType) case NoType => mixTag(TagNoType) case tpe: ClassType => mixTag(TagClassType) mixString(tpe.className) case tpe: ArrayType => mixTag(TagArrayType) mixString(tpe.baseClassName) mixInt(tpe.dimensions) case RecordType(fields) => mixTag(TagRecordType) for (RecordType.Field(name, originalName, tpe, mutable) <- fields) { mixString(name) originalName.foreach(mixString) mixType(tpe) mixBoolean(mutable) } } def mixIdent(ident: Ident): Unit = { mixPos(ident.pos) mixString(ident.name) ident.originalName.foreach(mixString) } def mixOptIdent(optIdent: Option[Ident]): Unit = optIdent.foreach(mixIdent) def mixPropertyName(name: PropertyName): Unit = name match { case name: Ident => mixIdent(name) case name: StringLiteral => mixTree(name) } def mixPos(pos: Position): Unit = { posStream.writeUTF(pos.source.toString) posStream.writeInt(pos.line) posStream.writeInt(pos.column) } @inline final def mixTag(tag: Int): Unit = mixInt(tag) @inline final def mixString(str: String): Unit = treeStream.writeUTF(str) @inline final def mixInt(i: Int): Unit = treeStream.writeInt(i) @inline final def mixLong(l: Long): Unit = treeStream.writeLong(l) @inline final def mixBoolean(b: Boolean): Unit = treeStream.writeBoolean(b) @inline final def mixFloat(f: Float): Unit = treeStream.writeFloat(f) @inline final def mixDouble(d: Double): Unit = treeStream.writeDouble(d) } }
CapeSepias/scala-js
ir/src/main/scala/org/scalajs/core/ir/Hashers.scala
Scala
bsd-3-clause
13,742
package com.alexitc.coinalerts.models import play.api.libs.json._ sealed abstract class Exchange(val string: String) object Exchange { case object BINANCE extends Exchange("BINANCE") case object BITSO extends Exchange("BITSO") case object BITTREX extends Exchange("BITTREX") case object COINMARKETCAP extends Exchange("COINMARKETCAP") case object HITBTC extends Exchange("HITBTC") case object KUCOIN extends Exchange("KUCOIN") case class UNKNOWN(override val string: String) extends Exchange(string) private val fromStringPF: PartialFunction[String, Exchange] = { case BINANCE.string => BINANCE case BITSO.string => BITSO case BITTREX.string => BITTREX case COINMARKETCAP.string => COINMARKETCAP case HITBTC.string => HITBTC case KUCOIN.string => KUCOIN } def fromString(string: String): Option[Exchange] = { if (fromStringPF.isDefinedAt(string)) { Some(fromStringPF(string)) } else { None } } def fromDatabaseString(string: String): Exchange = { if (fromStringPF.isDefinedAt(string)) fromStringPF(string) else UNKNOWN(string) } implicit val reads: Reads[Exchange] = { JsPath.read[String].collect(JsonValidationError("error.exchange.unknown"))(fromStringPF) } implicit val writes: Writes[Exchange] = Writes[Exchange] { market => JsString(market.string) } }
AlexITC/crypto-coin-alerts
alerts-server/app/com/alexitc/coinalerts/models/Exchange.scala
Scala
gpl-3.0
1,375
package blended.updater import blended.util.config.Implicits._ import com.typesafe.config.Config object UpdaterConfig { val default : UpdaterConfig = { UpdaterConfig( serviceInfoIntervalMSec = 0, serviceInfoLifetimeMSec = 0 ) } def fromConfig(cfg : Config) : UpdaterConfig = { UpdaterConfig( serviceInfoIntervalMSec = cfg.getLong("serviceInfoIntervalMSec", default.serviceInfoIntervalMSec), serviceInfoLifetimeMSec = cfg.getLong("serviceInfoLifetimeMSec", default.serviceInfoLifetimeMSec) ) } } /** * Configuration for [Updater] actor. * * @param serviceInfoIntervalMSec Interval in milliseconds to publish a ServiceInfo message to the Akka event stream. * An value of zero (0) or below indicates that no such information should be published. * @param serviceInfoLifetimeMSec The lifetime a serviceInfo message should be valid. */ case class UpdaterConfig( serviceInfoIntervalMSec : Long, serviceInfoLifetimeMSec : Long, )
woq-blended/blended
blended.updater/src/main/scala/blended/updater/UpdaterConfig.scala
Scala
apache-2.0
994
package org.dele.text.maen.matchers /** * Created by jiaji on 2016-02-25. */ trait TAtomDistanceCalc { def calc(atomRange1:Range, atomRange2:Range):TAtomDistance }
new2scala/text-util
maen/src/main/scala/org/dele/text/maen/matchers/TAtomDistanceCalc.scala
Scala
apache-2.0
171
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package iht.views.registration.kickout import iht.views.ViewTestHelper import iht.views.html.registration.kickout.kickout_template_simple class KickoutTemplateSimpleViewTest extends ViewTestHelper{ val returnLinkUrl = iht.controllers.registration.applicant.routes.ExecutorOfEstateController.onPageLoad val Contents = messagesApi("page.iht.registration.notAnExecutor.kickout.p1") lazy val kickoutTemplateSimpleView: kickout_template_simple = app.injector.instanceOf[kickout_template_simple] def kickOutTemplateView() = { implicit val request = createFakeRequest() val view = kickoutTemplateSimpleView(returnLinkUrl, "Change your answer")(Contents).toString() asDocument(view) } "KickoutTemplateView View" must { "have the correct title" in { val view = kickOutTemplateView() titleShouldBeCorrect(view.toString, messagesApi("iht.notPossibleToUseService")) browserTitleShouldBeCorrect(view.toString, messagesApi("iht.notPossibleToUseService")) messagesShouldBePresent(view.toString, Contents) } "have the contents" in { val view = kickOutTemplateView view.toString must include(Contents) } "have details are correct button " in { val view = kickOutTemplateView val detailsAreCorrectButton = view.getElementById("finish") detailsAreCorrectButton.attr("value") mustBe messagesApi("iht.exitToGovUK") } "have return link with correct text" in { val view = kickOutTemplateView val changeYourAnswerLink = view.getElementById("return-button") changeYourAnswerLink.attr("href") mustBe returnLinkUrl.url changeYourAnswerLink.text mustBe "Change your answer" } } }
hmrc/iht-frontend
test/iht/views/registration/kickout/KickoutTemplateSimpleViewTest.scala
Scala
apache-2.0
2,308
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.net.URI import org.apache.spark.SparkContext import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd} import org.apache.spark.sql.carbondata.execution.datasources.CarbonFileIndexReplaceRule import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, ExternalCatalogWithListener, SessionCatalog} import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference, AttributeSet, Expression, ExpressionSet, ExprId, NamedExpression, ScalaUDF, SubqueryExpression} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.optimizer.Optimizer import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation} import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.execution.command.ExplainCommand import org.apache.spark.sql.hive.{CarbonMVRules, HiveExternalCatalog} import org.apache.spark.sql.optimizer.{CarbonIUDRule, CarbonUDFTransformRule} import org.apache.spark.sql.secondaryindex.optimizer.CarbonSITransformationRule import org.apache.spark.sql.types.{DataType, Metadata} object CarbonToSparkAdapter { def addSparkListener(sparkContext: SparkContext) = { sparkContext.addSparkListener(new SparkListener { override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd): Unit = { SparkSession.setDefaultSession(null) } }) } def createAttributeReference( name: String, dataType: DataType, nullable: Boolean, metadata: Metadata, exprId: ExprId, qualifier: Option[String], attrRef : NamedExpression = null): AttributeReference = { val qf = if (qualifier.nonEmpty) Seq(qualifier.get) else Seq.empty AttributeReference( name, dataType, nullable, metadata)(exprId, qf) } def createAttributeReference( name: String, dataType: DataType, nullable: Boolean, metadata: Metadata, exprId: ExprId, qualifier: Seq[String]): AttributeReference = { AttributeReference( name, dataType, nullable, metadata)(exprId, qualifier) } def createAttributeReference(attr: AttributeReference, attrName: String, newSubsume: String): AttributeReference = { AttributeReference(attrName, attr.dataType)( exprId = attr.exprId, qualifier = newSubsume.split("\\n").map(_.trim)) } def createScalaUDF(s: ScalaUDF, reference: AttributeReference) = { ScalaUDF(s.function, s.dataType, Seq(reference), s.inputsNullSafe, s.inputTypes) } def createExprCode(code: String, isNull: String, value: String, dataType: DataType) = { ExprCode( code"$code", JavaCode.isNullVariable(isNull), JavaCode.variable(value, dataType)) } def createAliasRef( child: Expression, name: String, exprId: ExprId = NamedExpression.newExprId, qualifier: Seq[String] = Seq.empty, explicitMetadata: Option[Metadata] = None, namedExpr: Option[NamedExpression] = None) : Alias = { Alias(child, name)(exprId, qualifier, explicitMetadata) } def createAliasRef( child: Expression, name: String, exprId: ExprId, qualifier: Option[String]) : Alias = { Alias(child, name)(exprId, if (qualifier.isEmpty) Seq.empty else Seq(qualifier.get), None) } // Create the aliases using two plan outputs mappings. def createAliases(mappings: Seq[(NamedExpression, NamedExpression)]): Seq[NamedExpression] = { mappings.map{ case (o1, o2) => o2 match { case al: Alias if o1.name == o2.name && o1.exprId != o2.exprId => Alias(al.child, o1.name)(exprId = o1.exprId) case other => if (o1.name != o2.name || o1.exprId != o2.exprId) { Alias(o2, o1.name)(exprId = o1.exprId) } else { o2 } } } } def getExplainCommandObj() : ExplainCommand = { ExplainCommand(OneRowRelation()) } /** * As a part of SPARK-24085 Hive tables supports scala subquery for * parition tables, so Carbon also needs to supports * @param partitionSet * @param filterPredicates * @return */ def getPartitionKeyFilter( partitionSet: AttributeSet, filterPredicates: Seq[Expression]): ExpressionSet = { ExpressionSet( ExpressionSet(filterPredicates) .filterNot(SubqueryExpression.hasSubquery) .filter(_.references.subsetOf(partitionSet))) } // As per SPARK-22520 OptimizeCodegen is removed in 2.3.1 def getOptimizeCodegenRule(): Seq[Rule[LogicalPlan]] = { Seq.empty } def getUpdatedStorageFormat(storageFormat: CatalogStorageFormat, map: Map[String, String], tablePath: String): CatalogStorageFormat = { storageFormat.copy(properties = map, locationUri = Some(new URI(tablePath))) } def getHiveExternalCatalog(sparkSession: SparkSession) = sparkSession.sessionState.catalog.externalCatalog .asInstanceOf[ExternalCatalogWithListener] .unwrapped .asInstanceOf[HiveExternalCatalog] } class CarbonOptimizer( session: SparkSession, catalog: SessionCatalog, optimizer: Optimizer) extends Optimizer(catalog) { private lazy val iudRule = Batch("IUD Optimizers", fixedPoint, Seq(new CarbonIUDRule(), new CarbonUDFTransformRule(), new CarbonFileIndexReplaceRule()): _*) private lazy val secondaryIndexRule = Batch("SI Optimizers", Once, Seq(new CarbonSITransformationRule(session)): _*) override def defaultBatches: Seq[Batch] = { convertedBatch() :+ iudRule :+ secondaryIndexRule } def convertedBatch(): Seq[Batch] = { optimizer.batches.map { batch => Batch( batch.name, batch.strategy match { case optimizer.Once => Once case _: optimizer.FixedPoint => fixedPoint }, batch.rules: _* ) } } }
jackylk/incubator-carbondata
integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
Scala
apache-2.0
6,798
package scalaandroid import android.app.{ Service => AService } import android.content.{ Intent => AIntent } import android.os.IBinder import collection.mutable.ListBuffer trait Service { self: AService => type BindCallback = PartialFunction[AIntent, IBinder] lazy val bindCallbacks = new ListBuffer[BindCallback] def bind(f: BindCallback) = bindCallbacks += f def notBound: PartialFunction[AIntent, IBinder] = { case _ => null} def onBind(intent: AIntent): IBinder = ((bindCallbacks reduceLeft (_ orElse _)) orElse notBound)(intent) }
sdb/cloudr
sdroid/src/Service.scala
Scala
gpl-3.0
558
package net.sansa_stack.owl.spark.writers import java.io.{BufferedWriter, ByteArrayOutputStream, OutputStreamWriter, PrintWriter} import java.util.Collections import net.sansa_stack.owl.spark.rdd.OWLAxiomsRDD import org.semanticweb.owlapi.apibinding.OWLManager import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat import org.semanticweb.owlapi.model._ import org.semanticweb.owlapi.rdf.rdfxml.renderer.RDFXMLRenderer import scala.collection.JavaConverters._ protected class SANSARDFXMLRenderer( ont: OWLOntology, writer: PrintWriter, docFormat: OWLDocumentFormat) extends RDFXMLRenderer(ont, writer, docFormat) { override def beginDocument(): Unit = pending.clear() override def endDocument(): Unit = pending.clear() override def writeBanner(name: String): Unit = None override def writeAnnotationPropertyComment(prop: OWLAnnotationProperty): Unit = None override def writeClassComment(cls: OWLClass): Unit = None override def writeDataPropertyComment(prop: OWLDataProperty): Unit = None override def writeIndividualComments(ind: OWLNamedIndividual): Unit = None override def writeDatatypeComment(datatype: OWLDatatype): Unit = None override def writeObjectPropertyComment(prop: OWLObjectProperty): Unit = None override def renderOntologyHeader(): Unit = None } object RDFXMLWriter extends OWLWriterBase { override def save(filePath: String, owlAxioms: OWLAxiomsRDD): Unit = owlAxioms.mapPartitions(partition => if (partition.hasNext) { val os = new ByteArrayOutputStream() val osWriter = new OutputStreamWriter(os) val buffPrintWriter = new PrintWriter(new BufferedWriter(osWriter)) partition.foreach(axiom => { val man = OWLManager.createOWLOntologyManager() val config = new OWLOntologyWriterConfiguration config.withBannersEnabled(false) config.withIndenting(false) config.withLabelsAsBanner(false) config.withUseNamespaceEntities(false) man.setOntologyWriterConfiguration(config) assert(man.getOntologyWriterConfiguration == config) val ont = man.createOntology(Seq(axiom).asJava) assert(ont.getOWLOntologyManager.getOntologyWriterConfiguration == config) val renderer = new SANSARDFXMLRenderer( ont, buffPrintWriter, new RDFXMLDocumentFormat) renderer.render() }) buffPrintWriter.flush() Collections.singleton(os.toString("UTF-8").trim).iterator().asScala } else { Iterator() }).saveAsTextFile(filePath) }
SANSA-Stack/SANSA-RDF
sansa-owl/sansa-owl-spark/src/main/scala/net/sansa_stack/owl/spark/writers/RDFXMLWriter.scala
Scala
apache-2.0
2,654
package calc import scala.collection.immutable.IndexedSeq object Calc2 extends App { val nums: Iterator[IndexedSeq[Int]] = (0 to 9).permutations def rule(set: IndexedSeq[Int]): Boolean = { val a = set(0) * set(1) * set(2) val b = set(1) * set(6) * set(4) val c = set(3) * set(4) * set(5) a == b && b == c } println(nums.find(rule)) }
ebowman/calc
src/main/scala/calc/Calc2.scala
Scala
unlicense
364
/* JPA Scala Support for Play Framework 2 Copyright (C) 2014 Radim Kolar This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.filez.play2.scalajpa import _root_.org.scala_libs.jpa.{ScalaEMFactory} import javax.persistence.{EntityManager,EntityManagerFactory} /** * Scala enhanced EntityManagerFactory */ class ScalaEntityManagerFactory(val name : String, val factory: EntityManagerFactory) extends ScalaEMFactory { override def openEM() = { factory.createEntityManager() } override def closeEM(em: EntityManager) = { em.close() } override def getUnitName = name }
hsn10/playjpa
src/main/scala/scalajpa/ScalaEntityManagerFactory.scala
Scala
agpl-3.0
1,248
package unfiltered.netty.request import org.specs2.mutable.Specification import unfiltered.netty.{async, cycle} import unfiltered.request.{&, POST, Path => UFPath} import unfiltered.response.{Html, ResponseString} import unfiltered.specs2.netty.Served import java.io.{File => JFile} import okhttp3.MediaType object MixedPlanSpec extends Specification with Served { val html = <html> <head><title>unfiltered file netty uploads test</title></head> <body> <p>hello</p> </body> </html> def setup = { _.plan(cycle.MultiPartDecoder { case POST(UFPath("/cycle/passnotfound")) => cycle.MultipartPlan.Pass case POST(UFPath("/cycle/pass")) => cycle.MultipartPlan.Pass case POST(UFPath("/cycle/disk") & MultiPart(req)) => { case Decode(binding) => val disk = MultiPartParams.Disk(binding) (disk.files("f"), disk.params("p")) match { case (Seq(f, _*), p) => ResponseString( "cycle disk read file f named %s with content type %s and param p %s" format( f.name, f.contentType, p)) case _ => ResponseString("what's f?") } } case POST(UFPath("/cycle/stream") & MultiPart(req)) => { case Decode(binding) => val stream = MultiPartParams.Streamed(binding) (stream.files("f"), stream.params("p")) match { case (Seq(f, _*), p) => ResponseString( "cycle stream read file f is named %s with content type %s and param p %s" format( f.name, f.contentType, p)) case _ => ResponseString("what's f?") } } case POST(UFPath("/cycle/mem") & MultiPart(req)) => { case Decode(binding) => val mem = MultiPartParams.Memory(binding) (mem.files("f"), mem.params("p")) match { case (Seq(f, _*), p) => ResponseString( "cycle memory read file f is named %s with content type %s and param p %s" format( f.name, f.contentType, p)) case _ => ResponseString("what's f?") } } }).plan(cycle.MultiPartDecoder { case POST(UFPath("/cycle/pass")) & MultiPart(req) => { case Decode(binding) => val disk = MultiPartParams.Disk(binding) (disk.files("f"), disk.params("p")) match { case (Seq(f, _*), p) => ResponseString( "cycle disk read file f named %s with content type %s and param p %s" format( f.name, f.contentType, p)) case _ => ResponseString("what's f?") } } }).plan(async.MultiPartDecoder { case POST(UFPath("/async/passnotfound")) => async.MultipartPlan.Pass case POST(UFPath("/async/pass")) => async.MultipartPlan.Pass case POST(UFPath("/async/disk") & MultiPart(req)) => { case Decode(binding) => val disk = MultiPartParams.Disk(binding) (disk.files("f"), disk.params("p")) match { case (Seq(f, _*), p) => binding.respond(ResponseString( "async disk read file f named %s with content type %s and param p" format( f.name, f.contentType, p))) case _ => binding.respond(ResponseString("what's f?")) } } case POST(UFPath("/async/stream") & MultiPart(req)) => { case Decode(binding) => val stream = MultiPartParams.Streamed(binding) (stream.files("f"), stream.params("p")) match { case (Seq(f, _*), p) => binding.respond(ResponseString( "async stream read file f is named %s with content type %s and param p %s" format( f.name, f.contentType, p))) case _ => binding.respond(ResponseString("what's f?")) } } case POST(UFPath("/async/mem") & MultiPart(req)) => { case Decode(binding) => val mem = MultiPartParams.Memory(binding) (mem.files("f"), mem.params("p")) match { case (Seq(f, _*), p) => binding.respond(ResponseString( "async memory read file f is named %s with content type %s and param p %s" format( f.name, f.contentType, p))) case _ => binding.respond(ResponseString("what's f?")) } } }).plan(async.MultiPartDecoder{ case POST(UFPath("/async/pass") & MultiPart(req)) => { case Decode(binding) => val disk = MultiPartParams.Disk(binding) (disk.files("f"), disk.params("p")) match { case (Seq(f, _*), p) => binding.respond(ResponseString( "async disk read file f named %s with content type %s and param p" format( f.name, f.contentType, p))) case _ => binding.respond(ResponseString("what's f?")) } } }).plan(cycle.Planify{ case POST(UFPath("/end")) & MultiPart(req) => ResponseString("") case UFPath("/") => Html(html) }) } "Netty MultiPartDecoder cycle and async plans, when used in the same pipeline" should { step { val out = new JFile("netty-upload-test-out.txt") if (out.exists) out.delete } "pass GET request upstream" in { http(host).as_string must_== html.toString } // General "respond with a 404 when passing non-parameterised content type value" in { val code = httpx(req(host / "ignored").POST("f", MediaType.parse("application/x-www-form-urlencoded"))).code code must_== 404 } // Cycle "respond with 404 when posting to a non-existent url" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true val code = httpx(req(host / "cycle" / "notexists") <<* ("f", file, "text/plain")).code code must_== 404 } "handle cycle file uploads disk" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "cycle" / "disk") <<* ("f", file, "text/plain")).as_string must_== "cycle disk read file f named netty-upload-big-text-test.txt with content type text/plain and param p List()" } "handle cycle file uploads streamed" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "cycle" / "stream") <<* ("f", file, "text/plain")).as_string must_== "cycle stream read file f is named netty-upload-big-text-test.txt with content type text/plain and param p List()" } "handle cycle file uploads memory" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "cycle" / "mem") <<* ("f", file, "text/plain")).as_string must_== "cycle memory read file f is named netty-upload-big-text-test.txt with content type text/plain and param p List()" } "handle passed cycle file uploads to disk" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "cycle" / "pass") <<* ("f", file, "text/plain")).as_string must_== "cycle disk read file f named netty-upload-big-text-test.txt with content type text/plain and param p List()" } "respond with a 404 when passing in a cycle plan with no matching intent" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true val code = httpx(req(host / "cycle" / "passnotfound") <<* ("f", file, "text/plain")).code code must_== 404 } // Async "handle async file uploads to disk" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "async" / "disk") <<* ("f", file, "text/plain")).as_string must_== "async disk read file f named netty-upload-big-text-test.txt with content type text/plain and param p" } "handle async file uploads streamed" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "async" / "stream") <<* ("f", file, "text/plain")).as_string must_== "async stream read file f is named netty-upload-big-text-test.txt with content type text/plain and param p List()" } "handle async file uploads memory" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "async" / "mem") <<* ("f", file, "text/plain")).as_string must_== "async memory read file f is named netty-upload-big-text-test.txt with content type text/plain and param p List()" } "handle passed async file uploads to disk" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true http(req(host / "async" / "pass") <<* ("f", file, "text/plain")).as_string must_== "async disk read file f named netty-upload-big-text-test.txt with content type text/plain and param p" } "respond with a 404 when passing in an async plan with no matching intent" in { val file = new JFile(getClass.getResource("/netty-upload-big-text-test.txt").toURI) file.exists must_==true val code = httpx(req(host / "async" / "passnotfound") <<* ("f", file, "text/plain")).code code must_== 404 } } }
hamnis/unfiltered
netty-uploads/src/test/scala/MixedPlanSpec.scala
Scala
mit
9,574
package org.oxygen.morecommands.compiler import org.objectweb.asm.{Label, ClassWriter, MethodVisitor, Opcodes} import scala.collection.immutable import scala.language.postfixOps object PAssembler { final val Int : Int = 0 final val Float : Int = 1 final val String : Int = 2 final val Boolean : Int = 3 final val OP_SET : Int = 0 final val OP_ADD : Int = 1 final val OP_SUB : Int = 2 final val OP_MUL : Int = 3 final val OP_DIV : Int = 4 final val OP_MOD : Int = 5 final val OP_AND : Int = 6 final val OP_OR : Int = 7 final val OP_XOR : Int = 8 final val OP_SHL : Int = 9 final val OP_SHR : Int = 10 final val OP_USHR : Int = 11 final val Sender : String = "net/minecraft/command/ICommandSender" final val Storage : String = "org/oxygen/morecommands/runtime/Storage" final val Variable : String = "org/oxygen/morecommands/runtime/Variable" final val UNARIES = immutable.HashMap[String, String]( "$~" -> "unary_$tilde", "$!" -> "unary_$bang", "$-" -> "unary_$minus") final val BINARIES = immutable.HashMap[String, String]( "+" -> "$plus", "-" -> "$minus", "*" -> "$times", "/" -> "$div", "%" -> "$percent", "&" -> "$amp", "|" -> "$bar", "^" -> "$up", "**" -> "$times$times", "<<" -> "$less$less", ">>" -> "$greater$greater", ">>>" -> "$greater$greater$greater", ">" -> "$greater", "<" -> "$less", "==" -> "$eq$eq", "!=" -> "$bang$eq", ">=" -> "$greater$eq", "<=" -> "$less$eq") final val OPERATORS = immutable.HashMap[String, Int]( "=" -> OP_SET, "+=" -> OP_ADD, "-=" -> OP_SUB, "*=" -> OP_MUL, "/=" -> OP_DIV, "%=" -> OP_MOD, "&=" -> OP_AND, "|=" -> OP_OR, "^=" -> OP_XOR, "<<=" -> OP_SHL, ">>=" -> OP_SHR, ">>>=" -> OP_USHR ) class Expr(val left: Expr, val op: String, val right: Expr) { def +(v: Expr): Expr = new Expr(this, "+", v) def -(v: Expr): Expr = new Expr(this, "-", v) def *(v: Expr): Expr = new Expr(this, "*", v) def /(v: Expr): Expr = new Expr(this, "/", v) def %(v: Expr): Expr = new Expr(this, "%", v) def &(v: Expr): Expr = new Expr(this, "&", v) def |(v: Expr): Expr = new Expr(this, "|", v) def ^(v: Expr): Expr = new Expr(this, "^", v) def **(v: Expr): Expr = new Expr(this, "**", v) def &&(v: Expr): Expr = new Expr(this, "&&", v) def ||(v: Expr): Expr = new Expr(this, "||", v) def <<(v: Expr): Expr = new Expr(this, "<<", v) def >>(v: Expr): Expr = new Expr(this, ">>", v) def >>>(v: Expr): Expr = new Expr(this, ">>>", v) def >(v: Expr): Expr = new Expr(this, ">", v) def <(v: Expr): Expr = new Expr(this, "<", v) def ==(v: Expr): Expr = new Expr(this, "==", v) def !=(v: Expr): Expr = new Expr(this, "!=", v) def >=(v: Expr): Expr = new Expr(this, ">=", v) def <=(v: Expr): Expr = new Expr(this, "<=", v) def unary_~ : Expr = new Expr(null, "$~", this) def unary_! : Expr = new Expr(null, "$!", this) def unary_- : Expr = new Expr(null, "$-", this) def assemble(method: MethodVisitor): Unit = op match { case "&&" | "||" => val label = new Label left.assemble(method) method.visitInsn(Opcodes.DUP) method.visitMethodInsn(Opcodes.INVOKEVIRTUAL, Variable, "isTrue", "()Z", false) method.visitJumpInsn(if (op == "&&") Opcodes.IFEQ else Opcodes.IFNE, label) method.visitInsn(Opcodes.POP) right.assemble(method) method.visitLabel(label) case _ => left.eq(null) match { case true => right.assemble(method) applyUnary(method, op) case false => left.assemble(method) right.assemble(method) applyBinary(method, op) } } override def toString: String = s"{Expr $left $op $right}" } class Name(val name: String) extends Expr(null, "$=>", null) { def <++ : Expr = new Save(name, "+=", new Value(1)) def <-- : Expr = new Save(name, "-=", new Value(1)) def >++ : Expr = new Expr(new Save(name, "+=", new Value(1)), "-", new Value(1)) def >-- : Expr = new Expr(new Save(name, "-=", new Value(1)), "+", new Value(1)) override def toString: String = s"{Name $name}" override def assemble(method: MethodVisitor): Unit = { method.visitLdcInsn(name) method.visitMethodInsn(Opcodes.INVOKESTATIC, PAssembler.Storage, "resolveName", s"(Ljava/lang/String;)L$Variable;", false) } } class Call(val name: String, val args: List[Expr]) extends Expr(null, "$()", null) { override def toString: String = args.isEmpty match { case true => s"{Call $name}" case false => s"{Call $name [${args.map(_.toString).reduceLeft(_ + ", " + _)}}]}" } override def assemble(method: MethodVisitor): Unit = { method.visitLdcInsn(name) method.visitVarInsn(Opcodes.ALOAD, 0) method.visitLdcInsn(args.length) method.visitTypeInsn(Opcodes.ANEWARRAY, PAssembler.Variable) for ((expr, i) <- args zipWithIndex) { method.visitInsn(Opcodes.DUP) method.visitLdcInsn(i) args(i).assemble(method) method.visitInsn(Opcodes.AASTORE) } method.visitMethodInsn(Opcodes.INVOKESTATIC, Storage, "invokeFunction", s"(Ljava/lang/String;L${PAssembler.Sender};[L${PAssembler.Variable};)L${PAssembler.Variable};", false) } } class Save(val name: String, val action: String, val value: Expr) extends Expr(null, "$<=", value) { override def toString: String = s"{$action $name $value}" override def assemble(method: MethodVisitor): Unit = { method.visitLdcInsn(name) method.visitLdcInsn(OPERATORS(action)) value.assemble(method) method.visitMethodInsn(Opcodes.INVOKESTATIC, PAssembler.Storage, "replaceName", s"(Ljava/lang/String;IL$Variable;)L$Variable;", false) } } class Value extends Expr(null, "$=", null) { var vtype : Int = 0 var intValue : Long = 0 var floatValue : Double = 0.0 var stringValue : String = "" var booleanValue: Boolean = false def this(value: Long) = { this() vtype = Int intValue = value } def this(value: Double) = { this() vtype = Float floatValue = value } def this(value: String) = { this() vtype = String stringValue = value } def this(value: Boolean) = { this() vtype = Boolean booleanValue = value } override def toString: String = vtype match { case Int => s"{=i $intValue}" case Float => s"{=f $floatValue}" case String => s"{=s $stringValue}" case Boolean => s"{=b $booleanValue}" } override def assemble(method: MethodVisitor): Unit = { method.visitTypeInsn(Opcodes.NEW, PAssembler.Variable) method.visitInsn(Opcodes.DUP) vtype match { case Int => method.visitLdcInsn(intValue) method.visitMethodInsn(Opcodes.INVOKESPECIAL, PAssembler.Variable, "<init>", "(J)V", false) case Float => method.visitLdcInsn(floatValue) method.visitMethodInsn(Opcodes.INVOKESPECIAL, PAssembler.Variable, "<init>", "(D)V", false) case String => method.visitLdcInsn(stringValue) method.visitMethodInsn(Opcodes.INVOKESPECIAL, PAssembler.Variable, "<init>", "(Ljava/lang/String;)V", false) case Boolean => method.visitLdcInsn(booleanValue) method.visitMethodInsn(Opcodes.INVOKESPECIAL, PAssembler.Variable, "<init>", "(Z)V", false) } } } def applyUnary(method: MethodVisitor, operator: String): Unit = UNARIES.get(operator) match { case None => throw new RuntimeException(s"Invalid unary operator '$operator'") case Some(func) => method.visitMethodInsn(Opcodes.INVOKEVIRTUAL, Variable, func, s"()L$Variable;", false) } def applyBinary(method: MethodVisitor, operator: String): Unit = BINARIES.get(operator) match { case None => throw new RuntimeException(s"Invalid binary operator '$operator'") case Some(func) => method.visitMethodInsn(Opcodes.INVOKEVIRTUAL, Variable, func, s"(L$Variable;)L$Variable;", false) } def assembleClass(name: String, assemble: => (MethodVisitor) => Unit): Class[_] = { val writter = new ClassWriter(ClassWriter.COMPUTE_MAXS | ClassWriter.COMPUTE_FRAMES) writter.visit(Opcodes.V1_8, Opcodes.ACC_PUBLIC, name.replace('.', '/'), null, "java/lang/Object", null) val method = writter.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "eval", s"(L$Sender;)L$Variable;", null, null) method.visitCode() assemble(method) method.visitMaxs(0, 0) method.visitEnd() writter.visitEnd() val code = writter.toByteArray val loader = getClass.getClassLoader val injector = classOf[ClassLoader].getDeclaredMethod("defineClass", classOf[String], classOf[Array[Byte]], classOf[Int], classOf[Int]) injector.setAccessible(true) injector.invoke(loader, name, code, 0: Integer, code.length: Integer).asInstanceOf[Class[_]] } }
chenzhuoyu/MoreCommands
src/main/scala/org/oxygen/morecommands/compiler/PAssembler.scala
Scala
lgpl-2.1
8,638
package zamblauskas.csv.parser import zamblauskas.functional._ final case class ColumnBuilder(name: String) { def as[T](implicit r: Reads[T]): ColumnReads[T] = new ColumnReads[T] { override def isHeaderValid(names: Seq[String]): Boolean = names.contains(name) override def read(line: Seq[Column]): ReadResult[T] = { line.find(_.name == name) .map(r.read) .getOrElse(ReadFailure(s"Column '$name' does not exist.")) } } def asOpt[T](implicit r: Reads[T]): ColumnReads[Option[T]] = new ColumnReads[Option[T]] { override def isHeaderValid(names: Seq[String]): Boolean = true override def read(line: Seq[Column]): ReadResult[Option[T]] = { line.find(column => column.name == name && column.value.nonEmpty) .map(r.read(_).map(Some(_))) .getOrElse(ReadSuccess(None)) } } }
zamblauskas/scala-csv-parser
src/main/scala/zamblauskas/csv/parser/ColumnBuilder.scala
Scala
mit
845
package fr.acinq.eclair import fr.acinq.eclair.FeatureSupport.{Mandatory, Optional} import scodec.bits.{BitVector, ByteVector} sealed trait FeatureSupport object FeatureSupport { case object Mandatory extends FeatureSupport { override def toString: String = "mandatory" } case object Optional extends FeatureSupport { override def toString: String = "optional" } } sealed trait FeatureScope trait InitFeature extends FeatureScope trait NodeFeature extends FeatureScope trait InvoiceFeature extends FeatureScope trait Feature { me: FeatureScope => def supportBit(support: FeatureSupport): Int = support match { case Mandatory => mandatory case Optional => optional } def mandatory: Int def optional: Int = mandatory + 1 def rfcName: String } case class UnknownFeature(bitIndex: Int) case class Features(activated: Map[Feature, FeatureSupport], unknown: Set[UnknownFeature] = Set.empty) { def hasFeature(feature: Feature, support: Option[FeatureSupport] = None): Boolean = support match { case Some(sup) => activated.get(feature).contains(sup) case None => activated.contains(feature) } def areSupported(remoteFeatures: Features): Boolean = { val knownFeaturesOk = remoteFeatures.activated.forall { case (feature, Mandatory) => hasFeature(feature) case (_, Optional) => true } val unknownFeaturesOk = remoteFeatures.unknown.forall(1 == _.bitIndex % 2) unknownFeaturesOk && knownFeaturesOk } def initFeatures: Features = Features(activated.collect { case (feature: InitFeature, support) => (feature: Feature, support) }, unknown) def nodeAnnouncementFeatures: Features = Features(activated.collect { case (feature: NodeFeature, support) => (feature: Feature, support) }, unknown) def invoiceFeatures: Map[Feature with InvoiceFeature, FeatureSupport] = activated.collect { case (feature: InvoiceFeature, support) => (feature, support) } def toByteVector: ByteVector = { val unknownIndexes = for (feature <- unknown) yield feature.bitIndex val activatedIndexes = activated.map { case (feature, sup) => feature supportBit sup } val activatedBytes = toByteVectorFromIndex(activatedIndexes.toSet) val unknownBytes = toByteVectorFromIndex(unknownIndexes) val max = activatedBytes.size.max(unknownBytes.size) activatedBytes.padLeft(max) | unknownBytes.padLeft(max) } private def toByteVectorFromIndex(indexes: Set[Int] = Set.empty): ByteVector = { if (indexes.isEmpty) return ByteVector.empty var buf = BitVector.fill(indexes.max + 1)(high = false).bytes.bits indexes.foreach { index => buf = buf set index } buf.reverse.bytes } } object Features { val empty: Features = { val noFeatures = Map.empty[Feature, FeatureSupport] Features(noFeatures) } def apply(features: (Feature, FeatureSupport)*): Features = Features(features.toMap) def apply(bytes: ByteVector): Features = apply(bytes.bits) def apply(bits: BitVector): Features = { val all = bits.toIndexedSeq.reverse.zipWithIndex.collect { case (true, idx) if knownFeatures.exists(_.optional == idx) => Right((knownFeatures.find(_.optional == idx).get, Optional)) case (true, idx) if knownFeatures.exists(_.mandatory == idx) => Right((knownFeatures.find(_.mandatory == idx).get, Mandatory)) case (true, idx) => Left(UnknownFeature(idx)) } Features( activated = all.collect { case Right((feature, support)) => feature -> support }.toMap, unknown = all.collect { case Left(inf) => inf }.toSet ) } case object OptionDataLossProtect extends Feature with InitFeature with NodeFeature { val rfcName = "Data loss protect" val mandatory = 0 } case object InitialRoutingSync extends Feature with InitFeature { val rfcName = "Initial routing sync" val mandatory = 2 } case object ChannelRangeQueries extends Feature with InitFeature with NodeFeature { val rfcName = "Basic gossip queries" val mandatory = 6 } case object VariableLengthOnion extends Feature with InitFeature with NodeFeature with InvoiceFeature { val rfcName = "Advanced onion" val mandatory = 8 } case object ChannelRangeQueriesExtended extends Feature with InitFeature with NodeFeature { val rfcName = "Fast graph sync" val mandatory = 10 } case object StaticRemoteKey extends Feature with InitFeature with NodeFeature { val rfcName = "Direct refund" val mandatory = 12 } case object PaymentSecret extends Feature with InitFeature with NodeFeature with InvoiceFeature { val rfcName = "Payment secret" val mandatory = 14 } case object BasicMultiPartPayment extends Feature with InitFeature with NodeFeature with InvoiceFeature { val rfcName = "Multipart payments" val mandatory = 16 } case object Wumbo extends Feature with InitFeature with NodeFeature { val rfcName = "Large channels" val mandatory = 18 } case object ShutdownAnySegwit extends Feature with InitFeature with NodeFeature { val rfcName = "Any shutdown script" val mandatory = 26 } case object PaymentMetadata extends Feature with InvoiceFeature { val rfcName = "Payment invoice metadata" val mandatory = 48 } case object TrampolinePayment extends Feature with InitFeature with NodeFeature with InvoiceFeature { val rfcName = "Trampoline payments" val mandatory = 50 } case object ChainSwap extends Feature with InitFeature with NodeFeature { val rfcName = "Chain swaps" val mandatory = 32770 } case object HostedChannels extends Feature with InitFeature with NodeFeature { val rfcName = "Hosted channels" val mandatory = 32972 } case object ResizeableHostedChannels extends Feature with InitFeature with NodeFeature { val rfcName = "Resizeable Hosted channels" val mandatory = 32974 } val knownFeatures: Set[Feature] = Set(ChannelRangeQueriesExtended, OptionDataLossProtect, BasicMultiPartPayment, ChannelRangeQueries, VariableLengthOnion, InitialRoutingSync, ShutdownAnySegwit, PaymentMetadata, TrampolinePayment, StaticRemoteKey, HostedChannels, ResizeableHostedChannels, PaymentSecret, ChainSwap, Wumbo) // Returns true if both feature sets are compatible def areCompatible(ours: Features, theirs: Features): Boolean = ours.areSupported(theirs) && theirs.areSupported(ours) // returns true if both have at least optional support def canUseFeature(localFeatures: Features, remoteFeatures: Features, feature: Feature): Boolean = localFeatures.hasFeature(feature) && remoteFeatures.hasFeature(feature) }
btcontract/wallet
app/src/main/java/fr/acinq/eclair/Features.scala
Scala
apache-2.0
6,636
package mesosphere.marathon.integration.facades import akka.actor.ActorSystem import mesosphere.marathon.integration.setup.RestResult import spray.client.pipelining._ import spray.httpx.PlayJsonSupport import mesosphere.marathon.integration.setup.SprayHttpResponse._ import scala.concurrent.Await._ import scala.concurrent.duration._ object MesosFacade { /** * Corresponds to parts of `state.json`. */ case class ITMesosState( version: String, gitTag: Option[String], agents: Iterable[ITAgent]) case class ITAgent( id: String, resources: ITResources, usedResources: ITResources, offeredResources: ITResources, reservedResourcesByRole: Map[String, ITResources], unreservedResources: ITResources) case class ITResources(resources: Map[String, ITResourceValue]) { def isEmpty: Boolean = resources.isEmpty || resources.values.forall(_.isEmpty) override def toString: String = { "{" + resources.toSeq.sortBy(_._1).map { case (k, v) => s"$k: $v" }.mkString(", ") + " }" } } object ITResources { def empty: ITResources = new ITResources(Map.empty) def apply(vals: (String, Any)*): ITResources = { val resources = vals.toMap.mapValues { case value: Double => ITResourceScalarValue(value) case portsString: String => ITResourcePortValue(portsString) } ITResources(resources) } } sealed trait ITResourceValue { def isEmpty: Boolean } case class ITResourceScalarValue(value: Double) extends ITResourceValue { override def isEmpty: Boolean = value == 0 override def toString: String = value.toString } case class ITResourcePortValue(portString: String) extends ITResourceValue { override def isEmpty: Boolean = false override def toString: String = '"' + portString + '"' } } class MesosFacade(url: String, waitTime: Duration = 30.seconds)(implicit val system: ActorSystem) extends PlayJsonSupport { import system.dispatcher import MesosFacade._ import MesosFormats._ def state: RestResult[ITMesosState] = { val pipeline = sendReceive ~> read[ITMesosState] result(pipeline(Get(s"$url/state.json")), waitTime) } }
timcharper/marathon
src/test/scala/mesosphere/marathon/integration/facades/MesosFacade.scala
Scala
apache-2.0
2,202
/* * Artificial Intelligence for Humans * Volume 2: Nature Inspired Algorithms * Java Version * http://www.aifh.org * http://www.jeffheaton.com * * Code repository: * https://github.com/jeffheaton/aifh * * Copyright 2014 by Jeff Heaton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package com.heatonresearch.aifh.randomize /** * Provides a foundation for most random number generation. This allows the nextDouble to generate * the other types. */ abstract class AbstractGenerateRandom extends GenerateRandom { override def nextInt(low: Int, high: Int): Int = low + (nextDouble() * (high - low)).asInstanceOf[Int] override def nextDouble(high: Double): Double = nextDouble(0, high) override def nextDouble(low: Double, high: Double): Double = low + (nextDouble() * (high - low)) override def nextInt(range: Int): Int = nextInt(0, range) }
PeterLauris/aifh
vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/randomize/AbstractGenerateRandom.scala
Scala
apache-2.0
1,516
package nsmc.sql import com.mongodb.DBObject import nsmc._ import nsmc.conversion.types.{InternalAndSchema, StructureType} import nsmc.conversion.{RecordConverter, SchemaAccumulator} import nsmc.mongo._ import nsmc.rdd.partitioner.MongoRDDPartition import nsmc.rdd.{CollectionProxy, SQLMongoRDD} import org.apache.spark.sql.types._ import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame, SQLContext} import org.apache.spark.sql.catalyst.expressions.Row import org.apache.spark.sql.sources._ import scala.collection.Iterator import scala.collection.immutable.HashMap object PartitionRecordConverter { // each partition gets its own converter as there's no reason for the record // converters to communicate with each other def convert(internalSchema: StructureType)(in: Iterator[DBObject]) : Iterator[Row] = { val rc = new RecordConverter(internalSchema) in.map(mo => rc.getSchemaRecord(mo)) } } object RowProjector { def projectRow(wholeRow: Row, positionalMap: Map[String, Int], requiredColumns: Array[String]): Row = { val projected = requiredColumns.map(colName => { val pos = positionalMap(colName) wholeRow(pos) }) Row(projected:_*) } } class InferenceWrapper(proxy: CollectionProxy) extends Serializable { def inferType(part: MongoRDDPartition) : StructureType = { val conn = proxy.getPartitionConnector(part) val st = inferType(conn) conn.close() st } private def inferType(conn: MongoConnector) : StructureType = { val in = conn.getData val accum = new SchemaAccumulator() in.foreach(mo => accum.considerDatum(mo)) accum.getInternal.asInstanceOf[StructureType] } } case class MongoTableScan(database: String, collection: String, suppliedSchema: Option[StructType]) (@transient val sqlContext: SQLContext) extends BaseRelation with PrunedFilteredScan with InsertableRelation with Logging { // TODO: make sure we clean up if there's an error logInfo(s"Registering MongoDB collection '$collection' in database '$database'") // TODO: plumb indexedKeys val collectionConfig = new CollectionConfig(MongoConnectorConf(sqlContext.sparkContext.getConf), database, collection, Seq()) val proxy = new CollectionProxy(collectionConfig) val (inferredSchema, internalSchema) = suppliedSchema match { case None => { val splits = proxy.getPartitions.map(s => s.asInstanceOf[MongoRDDPartition]) val partitionRDD = sqlContext.sparkContext.parallelize(splits, splits.size) val inference = new InferenceWrapper(proxy) val partitionSchemas = partitionRDD.map(inference.inferType) val partialSchemas = partitionSchemas.collect() val accum = new SchemaAccumulator() accum.accumulate(partialSchemas.iterator) val inferredSchema = accum.getSchema logDebug(s"Computed schema for collection '$collection' in database '$database'") val internalSchema = accum.getInternal (inferredSchema, internalSchema) } case Some(tupleSchema) => { val internalSchema = InternalAndSchema.toInternal(tupleSchema) (tupleSchema, internalSchema) } } val schema: StructType = StructType(inferredSchema) private def makePositionalMap(fields: Seq[StructField]) : Map[String, Int] = { HashMap[String, Int](fields.map(f => f.name).zipWithIndex:_*) } def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = { // NOTE: it turns out that Spark [at least 1.2.0] sometimes calls this with an empty array of RequiredColumns, for // example in order to execute RDD.count() regardless of whether it has yet had a reason to access // the actual columns, and thus it produces potentially redundant queries. Presumably it doesn't trust the // size of the result set to be invariant with respect the choice of projection columns. logDebug(s"Scaning '$database'/'$collection' with columns ${requiredColumns.mkString("[",";","]")}") val schema = internalSchema val converter = PartitionRecordConverter.convert(schema.asInstanceOf[StructureType]) _ val queryGenerator = new QueryGenerator() val mongoFilter = queryGenerator.makeFilter(filters) val mongoProjection = queryGenerator.makeProjection(requiredColumns) val queryData = new SQLMongoRDD(sqlContext.sparkContext, proxy, mongoFilter, mongoProjection) val allRows = queryData.mapPartitions(converter, preservesPartitioning = true) val positionalMap = makePositionalMap(inferredSchema) val projected = allRows.map(r => RowProjector.projectRow(r, positionalMap, requiredColumns)) projected } def insert(data: DataFrame, overwrite: Boolean): Unit = { logDebug(s"Inserting into '$database'/'$collection' with overwrite=$overwrite") proxy.insert(data, overwrite) } } // supports both inferred and user-specified schema class MongoRelationProvider extends RelationProvider with SchemaRelationProvider { def createRelation(sqlContext: SQLContext, parameters: Map[String, String]) : BaseRelation = { MongoTableScan(parameters("db"), parameters("collection"), None)(sqlContext) } def createRelation(sqlContext: SQLContext, parameters: Map[String, String], schema: StructType) : BaseRelation = { MongoTableScan(parameters("db"), parameters("collection"), Some(schema))(sqlContext) } }
shotishu/spark-mongodb-connector
src/main/scala/nsmc/sql/MongoRelationProvider.scala
Scala
apache-2.0
5,419
/* * Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com> */ package play.api.libs.json.util import scala.language.higherKinds trait LazyHelper[M[_], T] { def lazyStuff: M[T] } object LazyHelper { def apply[M[_], T](stuff: M[T]) = new LazyHelper[M, T] { override lazy val lazyStuff = stuff } }
7thsense/play-json-extra
play-json-extra/js/src/main/scala/play/api/libs/json/Util.scala
Scala
apache-2.0
320
/* * Copyright 2014–2017 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.fs.mount.module import slamdata.Predef._ import quasar._ import quasar.fp.numeric._ import quasar.contrib.pathy._ import quasar.contrib.scalaz.eitherT._ import quasar.effect.{Failure, LiftedOps} import quasar.fs._ import quasar.fs.mount._ import quasar.sql._ import matryoshka.data.Fix import matryoshka.implicits._ import monocle._ import scalaz._, Scalaz._ import scalaz.stream.Process import pathy.Path._ sealed abstract class Module[A] object Module { final case class ResultHandle(run: Long) extends scala.AnyVal object ResultHandle { implicit val show: Show[ResultHandle] = Show.showFromToString implicit val order: Order[ResultHandle] = Order.orderBy(_.run) } sealed trait Error type ErrorT[M[_], A] = EitherT[M, Error, A] type Failure[A] = quasar.effect.Failure[Error, A] object Error { final case class FSError(fsErr: FileSystemError) extends Error final case class SemErrors(semErrs: NonEmptyList[SemanticError]) extends Error final case class ArgumentsMissing(missing: List[CIName]) extends Error val fsError = Prism.partial[Error, FileSystemError] { case FSError(fsErr) => fsErr } (FSError(_)) val semErrors = Prism.partial[Error, NonEmptyList[SemanticError]] { case SemErrors(semErr) => semErr } (SemErrors(_)) val argumentsMissing = Prism.partial[Error, List[CIName]] { case ArgumentsMissing(missing) => missing } (ArgumentsMissing(_)) implicit val show: Show[Error] = Show.shows { case FSError(e) => e.shows case SemErrors(e) => s"Encountered the following semantic errors while attempting to invoke function: ${e.shows}" case ArgumentsMissing(missing) => s"The following arguments are missing: $missing" } } final case class InvokeModuleFunction(path: AFile, args: Map[String, Fix[Sql]], offset: Natural, limit: Option[Positive]) extends Module[Error \\/ (List[Data] \\/ ResultHandle)] final case class More(handle: ResultHandle) extends Module[FileSystemError \\/ Vector[Data]] final case class Close(h: ResultHandle) extends Module[Unit] /** Low-level, unsafe operations. Clients are responsible for resource-safety * when using these. */ final class Unsafe[S[_]](implicit S: Module :<: S) extends LiftedOps[Module, S] { type M[A] = ErrorT[FreeS, A] def invokeFunction(path: AFile, args: Map[String, Fix[Sql]], offset: Natural, limit: Option[Positive]): M[List[Data] \\/ ResultHandle] = EitherT(lift(InvokeModuleFunction(path, args, offset, limit))) /** Read a chunk of data from the file represented by the given handle. * * An empty `Vector` signals that all data has been read. */ def more(h: ResultHandle): FileSystemErrT[FreeS, Vector[Data]] = EitherT(lift(More(h))) /** Closes the given read handle, freeing any resources it was using. */ def close(h: ResultHandle): FreeS[Unit] = lift(Close(h)) } object Unsafe { implicit def apply[S[_]](implicit S: Module :<: S): Unsafe[S] = new Unsafe[S] } final class Ops[S[_]](implicit val unsafe: Unsafe[S]) { type M[A] = unsafe.M[A] /** Returns the result of evaluating the function specified by the file path provided with the supplied * args */ def invokeFunction(path: AFile, args: Map[String, Fix[Sql]], offset: Natural, limit: Option[Positive]): Process[M, Data] = { // TODO: use DataCursor.process for the appropriate cursor type def closeHandle(dataOrHandle: List[Data] \\/ ResultHandle): Process[M, Nothing] = dataOrHandle.fold(_ => Process.empty, h => Process.eval_[M, Unit](unsafe.close(h).liftM[ErrorT])) @SuppressWarnings(Array("org.wartremover.warts.Recursion")) def readUntilEmpty(h: ResultHandle): Process[M, Data] = Process.await(unsafe.more(h).leftMap(Error.fsError(_))) { data => if (data.isEmpty) Process.halt else Process.emitAll(data) ++ readUntilEmpty(h) } Process.bracket(unsafe.invokeFunction(path, args, offset, limit))(closeHandle) { dataOrHandle => dataOrHandle.fold( data => Process.emitAll(data), handle => readUntilEmpty(handle)) } } def invokeFunction_(path: AFile, args: Map[String, Fix[Sql]], offset: Natural, limit: Option[Positive])(implicit SO: Failure :<: S ): Process[Free[S, ?], Data] = { val nat: M ~> Free[S, ?] = λ[M ~> Free[S, ?]] { x => Failure.Ops[Error, S].unattempt(x.run) } invokeFunction(path, args, offset, limit).translate(nat) } } object Ops { implicit def apply[S[_]](implicit S: Module :<: S): Ops[S] = new Ops[S] } object impl { import Error._ import FileSystemError._ import PathError._ def default[S[_]](implicit query: QueryFile.Unsafe[S], mount: Mounting.Ops[S]): Module ~> Free[S, ?] = λ[Module ~> Free[S, ?]] { case InvokeModuleFunction(file, args, offset, limit) => val notFoundError = fsError(pathErr(pathNotFound(file))) // case insensitive args val iArgs = args.map{ case (key, value) => (CIName(key), value)} val currentDir = fileParent(file) (for { moduleConfig <- EitherT(mount.lookupModuleConfig(currentDir) .leftMap(e => semErrors(SemanticError.genericError(e.shows).wrapNel)) .run.toRight(notFoundError).run.map(_.join)) name = fileName(file).value funcDec <- EitherT(moduleConfig.declarations.find(_.name.value ≟ name) .toRightDisjunction(notFoundError).point[Free[S, ?]]) maybeAllArgs = funcDec.args.map(arg => iArgs.get(arg)).sequence missingArgs = funcDec.args.filter(arg => !iArgs.contains(arg)) allArgs <- EitherT(maybeAllArgs.toRightDisjunction(argumentsMissing(missingArgs)).point[Free[S, ?]]) scopedExpr = ScopedExpr(invokeFunction[Fix[Sql]](CIName(name), allArgs).embed, moduleConfig.statements) sql <- EitherT(resolveImports_(scopedExpr, currentDir).leftMap(e => semErrors(e.wrapNel)).run.leftMap(fsError(_))).flattenLeft dataOrLP <- EitherT(quasar.queryPlan(sql, Variables.empty, basePath = currentDir, offset, limit) .run.value.leftMap(semErrors(_)).point[Free[S, ?]]) dataOrHandle <- dataOrLP.traverse(lp => EitherT(query.eval(lp).run.value).leftMap(fsError(_))) } yield dataOrHandle.map(h => ResultHandle(h.run))).run case More(handle) => query.more(QueryFile.ResultHandle(handle.run)).run case Close(handle) => query.close(QueryFile.ResultHandle(handle.run)) } } }
drostron/quasar
core/src/main/scala/quasar/fs/mount/module/Module.scala
Scala
apache-2.0
7,421
/** * For copyright information see the LICENSE document. */ package entice.server.controllers import entice.server._ import entice.server.test._ import entice.server.utils._ import entice.server.database._ import entice.protocol._ import akka.actor._ import akka.testkit._ import com.typesafe.config.ConfigFactory import org.scalatest._ import org.scalatest.matchers._ class LoginSpec extends TestKit(ActorSystem( "login-spec", config = ConfigFactory.parseString(""" akka { loglevel = WARNING } """))) with WordSpec with MustMatchers with BeforeAndAfterAll with OneInstancePerTest with ImplicitSender { // actor under test val login = TestActorRef[Login] // given val clients = ClientRegistryExtension(system) val acc1 = Account(email = "loginspec1@entice.org", password = "test") val acc2 = Account(email = "loginspec2@entice.org", password = "test") val noacc = Account(email = "nonexisting@entice.org", password = "test") override def beforeAll { // given an existing acc Account.create(acc1) Account.create(acc2) // given a nonexisting acc Account.create(noacc) Account.delete(noacc) } override def afterAll { Account.delete(acc1) Account.delete(acc2) TestKit.shutdownActorSystem(system) } "A login controller" must { "accept clients with a valid login request, and reply with a login success" in { val probe = TestProbe() fakePub(login, probe.ref, LoginRequest("loginspec1@entice.org", "test")) probe.expectMsgClass(classOf[LoginSuccess]) probe.expectNoMsg } "detect multi-account logins, and reply with an error" in { val probe = TestProbe() clients.add(Client(self, Account(email = "loginspec2@entice.org", password = "test"), null, null)) fakePub(login, probe.ref, LoginRequest("loginspec2@entice.org", "test")) probe.expectMsgClass(classOf[Failure]) probe.expectNoMsg } "reply to any invalid login requests with an error" in { val probe = TestProbe() fakePub(login, probe.ref, LoginRequest("nonexisting@entice.org", "test")) probe.expectMsgClass(classOf[Failure]) probe.expectNoMsg } } }
entice/old-server
src/test/scala/entice/server/controllers/LoginSpec.scala
Scala
bsd-3-clause
2,418
package ch.hevs.medred import org.apache.jena.datatypes.xsd.XSDDatatype import org.joda.time.DateTime case class Study(id:String,name:String,notes:String,description:String,protocolName:String, version:String,created:DateTime,modified:DateTime,instruments:Seq[Instrument]) { def copy(newInstruments:Seq[Instrument])=Study(id,name,notes,description,protocolName, version,created,modified,newInstruments) } case class Instrument(name:String,items:Seq[Item]) trait Item{ def name:String def label:String def note:String } case class Choice(value:Any,label:String) case class Section(name:String,label:String,note:String,items:Seq[Item],matrix:Boolean=false) extends Item case class Question(name:String,label:String,note:String,field:Field,choices:Seq[Choice]) extends Item case class Operation(name:String,label:String,note:String,field:Field) extends Item case class Field(fieldName:String,control:Control,variable:Variable, validation:Option[ValidationShape],computation:Option[String]) case class Note(name:String,label:String,note:String="") extends Item case class Variable(varName:String,varType:XSDDatatype) trait PropertyShape case class ValidationShape(propShapes:Seq[PropertyShape]) case class MaxInclShape(maxVal:Double) extends PropertyShape case class MaxExclShape(maxVal:Double) extends PropertyShape case class MinInclShape(minVal:Double) extends PropertyShape case class MinExclShape(minVal:Double) extends PropertyShape case class Record(recordId:String,varNames:Seq[String],fields:Seq[Any])
jpcik/medred
rdfizer/src/main/scala/ch/hevs/medred/Structures.scala
Scala
mit
1,563
package com.igorvovk.telegram.botapi import javax.inject.{Inject, Provider, Singleton} import play.api.Configuration object TelegramApiClientConfiguration { object Token { def fromConfig(config: Configuration): Option[Token] = { for { id <- config.getLong("id") secret <- config.getString("secret") } yield Token(id, secret) } } case class Token(id: Long, secret: String) { lazy val str = s"$id:$secret" } def fromConfig(config: Configuration): TelegramApiClientConfiguration = { val token = config.getConfig("bot.token").flatMap(Token.fromConfig).get TelegramApiClientConfiguration( credentials = token, apiBaseUrl = config.getString("apiUrl").get + "/bot" + token.str, downloadBaseUrl = config.getString("apiUrl").get + "/file/bot" + token.str, receiveUpdatesTimeout = Some(90) ) } } case class TelegramApiClientConfiguration(credentials: TelegramApiClientConfiguration.Token, apiBaseUrl: String, downloadBaseUrl: String, receiveUpdatesTimeout: Option[Int]) @Singleton class TelegramApiClientConfigurationProvider @Inject()(config: Configuration) extends Provider[TelegramApiClientConfiguration] { lazy val get = TelegramApiClientConfiguration.fromConfig(config.getConfig("telegram").get) }
igorynia/TelegramBotApi
src/main/scala/com/igorvovk/telegram/botapi/TelegramApiClientConfiguration.scala
Scala
mit
1,418
/*********************************************************************** * Copyright (c) 2013-2022 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.features import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty package object serialization { val NULL_BYTE : Byte = 0 val NOT_NULL_BYTE: Byte = 1 val GeometryNestingThreshold: SystemProperty = SystemProperty("geomesa.geometry.nesting.max", "3") val GeometryLengthThreshold : SystemProperty = SystemProperty("geomesa.geometry.length.max") type PrimitiveWriter = AnyRef { def writeInt(value: Int): Unit def writeLong(value: Long): Unit def writeFloat(value: Float): Unit def writeDouble(value: Double): Unit def writeBoolean(value: Boolean): Unit def writeString(value: String): Unit } type PrimitiveReader = AnyRef { def readInt(): Int def readLong(): Long def readFloat(): Float def readDouble(): Double def readBoolean(): Boolean def readString(): String } type NumericWriter = AnyRef { def writeInt(value: Int, optimizePositive: Boolean): Int def writeDouble(value: Double): Unit def writeByte(value: Byte): Unit } type NumericReader = AnyRef { def readInt(optimizePositive: Boolean): Int def readDouble(): Double def readByte(): Byte } }
locationtech/geomesa
geomesa-features/geomesa-feature-common/src/main/scala/org/locationtech/geomesa/features/serialization/package.scala
Scala
apache-2.0
1,675
class Foo { def foo(l: List[Int]): Int = { try l foreach { _ => return 5 } catch { case x => 11 } 22 } val pf: PartialFunction[Throwable, Unit] = { case x if false => () } def bar(l: List[Int]): Int = { try l foreach { _ => return 5 } catch pf finally println() 22 } }
AlexSikia/dotty
tests/untried/neg/nonlocal-warning.scala
Scala
bsd-3-clause
315
package org.eichelberger.sfc.utils import com.typesafe.scalalogging.Logging import org.eichelberger.sfc.SpaceFillingCurve.{Composable, OrdinalNumber, OrdinalVector, SpaceFillingCurve} import org.eichelberger.sfc._ import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import scala.util.parsing.combinator.RegexParsers /* * Examples: * - R(2,3) * - R(2,3,4) * - H(2, Z(7)) * - Z(R(4,H(2,2)),8) * * Examples: * R(t(15), Z(x(10), y(5))) * R(Z(x(10), y(5)), t(15)) */ object CompositionParser extends RegexParsers { val dtf = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") val LPAREN = """\\(""".r val RPAREN = """\\)""".r val COMMA = "," val R_CURVE_NAME = """R""".r val Z_CURVE_NAME = """Z""".r val H_CURVE_NAME = """H""".r def intLiteral: Parser[Int] = """\\d+""".r ^^ { _.toInt } def doubleLiteral: Parser[Double] = """[+\\-]?[0-9]*\\.?[0-9]+""".r ^^ { _.toDouble } def dateLiteral: Parser[DateTime] = """\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\dZ""".r ^^ { s => dtf.parseDateTime(s) } def longitude: Parser[Dimension[Double]] = """x""".r ~> LPAREN ~> intLiteral ~ opt(COMMA ~ doubleLiteral ~ COMMA ~ doubleLiteral) <~ RPAREN ^^ { case p ~ None => DefaultDimensions.createLongitude(p.toInt) case p ~ Some(_ ~ min ~ _ ~ max) => Dimension("x", min, isMinIncluded = true, max, isMaxIncluded = true, p.toLong) } def latitude: Parser[Dimension[Double]] = """y""".r ~> LPAREN ~> intLiteral ~ opt(COMMA ~ doubleLiteral ~ COMMA ~ doubleLiteral) <~ RPAREN ^^ { case p ~ None => DefaultDimensions.createLatitude(p.toInt) case p ~ Some(_ ~ min ~ _ ~ max) => Dimension("y", min, isMinIncluded = true, max, isMaxIncluded = true, p.toLong) } case class Bounds(min: String, max: String) //Dimension("t", MinDate, isMinIncluded = true, MaxDate, isMaxIncluded = true, 60L) def dateTime: Parser[Dimension[DateTime]] = """t""".r ~> LPAREN ~> intLiteral ~ opt(COMMA ~ dateLiteral ~ COMMA ~ dateLiteral) <~ RPAREN ^^ { case p ~ None => DefaultDimensions.createDateTime(p.toInt) case p ~ Some(_ ~ minDate ~ _ ~ maxDate) => Dimension("t", minDate, isMinIncluded = true, maxDate, isMaxIncluded = true, p.toLong) } def longDimName: Parser[String] = "u".r | "v".r | "w".r def longDim: Parser[Dimension[Long]] = longDimName ~ LPAREN ~ intLiteral ~ opt(COMMA ~ doubleLiteral ~ COMMA ~ doubleLiteral) <~ RPAREN ^^ { case name ~ _ ~ p ~ None => Dimension[Long](name, 0L, isMinIncluded=true, (1L << p.toLong) - 1L, isMaxIncluded=true, p.toLong) case name ~ _ ~ p ~ Some(_ ~ min ~ _ ~ max) => Dimension[Long](name, min.toLong, isMinIncluded=true, max.toLong, isMaxIncluded=true, p.toLong) } def dimension: Parser[Dimension[_]] = longitude | latitude | dateTime | longDim def curveName: Parser[String] = R_CURVE_NAME | Z_CURVE_NAME | H_CURVE_NAME ^^ { _.toString } def precision: Parser[Dimension[Long]] = intLiteral ^^ { p => DefaultDimensions.createIdentityDimension(p) } def childArg: Parser[Composable] = dimension | precision | curveParser def curveParser: Parser[ComposedCurve] = curveName ~ LPAREN ~ repsep(childArg, COMMA) ~ RPAREN ^^ { case name ~ _ ~ children ~ _ => val precisions = OrdinalVector(children.map { case c: SpaceFillingCurve => c.precisions.sum case d: Dimension[_] => d.precision }:_*) val curve = name match { case s: String if s.matches(R_CURVE_NAME.toString()) => new RowMajorCurve(precisions) case s: String if s.matches(Z_CURVE_NAME.toString()) => new ZCurve(precisions) case s: String if s.matches(H_CURVE_NAME.toString()) => new CompactHilbertCurve(precisions) } new ComposedCurve(curve, children) } def buildWholeNumberCurve(s: String): ComposedCurve = parse(curveParser, s).get } case class CompositionParserException(msg: String) extends Exception(msg)
cne1x/sfseize
src/main/scala/org/eichelberger/sfc/utils/CompositionParser.scala
Scala
apache-2.0
3,909
package us.illyohs.civilmagiks.client.gui import net.minecraft.client.gui.GuiScreen import us.illyohs.civilmagiks.api.CivilMagicksApi object ManaGui extends GuiScreen { val mana = CivilMagicksApi.MANA override def drawScreen(mouseX: Int, mouseY: Int, partialTicks: Float): Unit = { super.drawScreen(mouseX, mouseY, partialTicks) val man = mana.getValues.get(0) mc.fontRendererObj.drawString(man.getLocalizedName, 20, 20, man.getColor) // mc.fontRendererObj.drawString(man.getLocalizedName, 20, 20, man.getColor) for (manaList <- 0 to mana.getValues.size()) { // mc.fontRendererObj.drawString(ma,20, 20) } } }
Illyohs/CivilMagicks
src/main/scala/us/illyohs/civilmagiks/client/gui/ManaGui.scala
Scala
bsd-2-clause
654
/* * Copyright (c) 2011 Belmont Technology Pty Ltd. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sodatest.junit import org.junit.runners.ParentRunner import collection.JavaConversions import org.junit.runner.Description import org.junit.runner.notification.RunNotifier import org.junit.internal.AssumptionViolatedException import org.sodatest.runtime.processing.SodaTestContext import java.util.Collections._ import org.junit.internal.runners.model.{MultipleFailureException, EachTestNotifier} import org.junit.runners.model.{Statement, InitializationError} import org.sodatest.runtime.processing.formatting.xhtml.XhtmlSodaTestResultWriter import org.sodatest.runtime.processing.running.{SodaFileRunner, PathUtils} import org.sodatest.runtime.ConsoleLog import java.io.{IOException, FileNotFoundException, File} import org.sodatest.runtime.data.results.{ParseErrorBlockResult, EventBlockResult, ReportBlockResult, SodaTestResult} /** * A JUnit runner for executing SodaTests. You should not use this Runner directly, but should * instead create a subclass of {org.sodatest.junit.JUnitSodaTestLauncherTestBase}. */ class JUnitSodaTestRunner(testClass: Class[_ <: JUnitSodaTestLauncherTestBase]) extends ParentRunner[File](testClass) { private val logLevel: ConsoleLog.Level.Value = System.getProperty("JUnitSodaTestRunner.log.level", "Info").toLowerCase match { case "error" => ConsoleLog.Level.Error case "info" => ConsoleLog.Level.Info case "debug" => ConsoleLog.Level.Debug case "verbose" => ConsoleLog.Level.Verbose case value => { System.err.println("WARNING: SodaTest: Unrecognised value '" + value + "' for system property JUnitSodaTestRunner.log.level. Using 'Info' instead.") ConsoleLog.Level.Info } } private val log = new ConsoleLog(logLevel) private val baseDirName = testClass.getAnnotation(classOf[JUnitSodaTestLauncherBaseDir]).value private val filePattern = testClass.getAnnotation(classOf[JUnitSodaTestLauncherFilePattern]).value private val outputDirName = testClass.getAnnotation(classOf[JUnitSodaTestLauncherOutputDirectory]).value private val fixtureRoot = testClass.getAnnotation(classOf[JUnitSodaTestLauncherFixtureRoot]) match { case null => { log.debug("No JUnitSodaTestLauncherFixtureRoot annotation found. Using the test class' package of '" + testClass.getPackage.getName + "' as the fixture root.") testClass.getPackage.getName } case a: JUnitSodaTestLauncherFixtureRoot => a.value } private val baseDir = new File(baseDirName) if (!baseDir.exists()) throw new FileNotFoundException(baseDir.getAbsolutePath) private val outputDir = new File(outputDirName) if (!outputDir.exists() && !outputDir.mkdirs()) throw new InitializationError("Failed to create output directory " + outputDir.getAbsolutePath) private val testSearchDir: File = new File(baseDir, testClass.getPackage.getName.replaceAll("\\\\.", "/")) if (!testSearchDir.exists()) new FileNotFoundException(baseDir.getAbsolutePath) private val filePatternRegex = filePattern.r private var results: List[(File, SodaTestResult)] = Nil private implicit val context = new SodaTestContext(fixtureRoot, log) def getChildren: java.util.List[File] = { if (!testSearchDir.exists) throw new FileNotFoundException("SodaTest search path does not exist: " + testSearchDir.getAbsolutePath) if (!testSearchDir.isDirectory) throw new IOException("SodaTest search path is not a directory: " + testSearchDir.getAbsolutePath) val files = PathUtils.collectFilesRecursive(testSearchDir, file => {filePatternRegex.unapplySeq(file.getName) != None}) XhtmlSodaTestResultWriter.createOutputDirectories(baseDir, files, outputDir) JavaConversions.asJavaList(files) } private implicit def throwable2ThrowableList(t: Throwable): java.util.List[Throwable] = singletonList(t) def describeChild(child: File): Description = Description.createTestDescription(testClass, child.getParent match { case null => child.getName; case parent => child.getName + " (" + parent + ")"}) override protected def childrenInvoker(notifier: RunNotifier): Statement = { val superInvoker: Statement = super.childrenInvoker(notifier) new Statement { def evaluate(): Unit = { superInvoker.evaluate() XhtmlSodaTestResultWriter.writeResultsFiles(results, baseDir, outputDir) } } } def runChild(testFile: File, notifier: RunNotifier) { val eachNotifier: EachTestNotifier = new EachTestNotifier(notifier, describeChild(testFile)) eachNotifier.fireTestStarted() try { val result: SodaTestResult = runTest(testFile) results = results :+ (testFile, result) if (!result.passed) eachNotifier.addFailure(new MultipleFailureException(JavaConversions.asJavaList(getJUnitExceptions(result)))) } catch { case e: AssumptionViolatedException => eachNotifier.addFailedAssumption(e) case e: Throwable => eachNotifier.addFailure(e) } finally { eachNotifier.fireTestFinished() } } private def getJUnitExceptions(result: SodaTestResult): scala.List[scala.Throwable] = { val errors: List[Option[List[Throwable]]] = result.blockResults.map(blockResult => { if (blockResult.succeeded) None.asInstanceOf[Option[List[Throwable]]] else Some( blockResult.blockError match { case Some(blockError) => List(new ExecutionErrorException(blockError.toString)) case None => blockResult match { case rbr: ReportBlockResult => reportBlockErrors(rbr) case ebr: EventBlockResult => eventBlockErrors(ebr) case pebr: ParseErrorBlockResult => List(new TestInputParsingException(pebr.block.error)) case _ => throw new IllegalStateException( "BUG: Only Event blocks, Report blocks and Parse Error blocks are expected to have non-block errors! " + "(blockResult = " + blockResult + ")") } } ) }) val junitExceptions: List[Throwable] = errors.flatten.flatten junitExceptions } private def runTest(testFile: File): SodaTestResult = { SodaFileRunner.runTest(testFile) } private def reportBlockErrors(rbr: ReportBlockResult): List[Throwable] = { rbr.executionResults.map(executionResult => { executionResult.error match { case Some(error) => Some(new ExecutionErrorException(error.toString)) case None if !executionResult.matchResult.passed => Some( new ReportMatchFailureException("Report match failure in '" + rbr.block.name + "' at line " + (executionResult.execution.parameterValues match { case Some(line) => line.lineNumber; case _ => rbr.block.source.lines(0).lineNumber })) ) case _ => None // A passing execution in a block where others failed } }).flatten } private def eventBlockErrors(ebr: EventBlockResult): List[Throwable] = ebr.executionResults.map(_.error.map(e => new ExecutionErrorException(e.toString))).flatten } class TestClassMissingAnnotationException(message: String) extends InitializationError(message) class ReportMatchFailureException(message: String) extends java.lang.AssertionError(message) class ExecutionErrorException(message: String) extends java.lang.AssertionError(message) class TestInputParsingException(message: String) extends java.lang.AssertionError(message)
GrahamLea/SodaTest
sodatest-junit/src/main/scala/org/sodatest/junit/JUnitSodaTestRunner.scala
Scala
apache-2.0
8,081
/////////////////////////////////////////////////////////////////////////////// // package.scala // // Copyright (C) 2012-2014 Ben Wing, The University of Texas at Austin // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////////// package opennlp.textgrounder import gridlocate._ import util.spherical._ package geolocate { } package object geolocate { implicit def to_SphereCorrectCellInfo( cci: CorrectCellInfo[SphereCoord] ) = new SphereCorrectCellInfo(cci) type SphereDoc = GridDoc[SphereCoord] type SphereCell = GridCell[SphereCoord] type SphereGrid = Grid[SphereCoord] def get_sphere_docfact(grid: SphereGrid) = grid.docfact.asInstanceOf[SphereDocFactory] }
utcompling/textgrounder
src/main/scala/opennlp/textgrounder/geolocate/package.scala
Scala
apache-2.0
1,278
/** * Copyright 2011-2017 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.recorder.config import io.gatling.commons.util.ClassSimpleNameToString import io.gatling.recorder.util.Labelled sealed abstract class FilterStrategy(val label: String) extends Labelled with ClassSimpleNameToString object FilterStrategy { case object WhitelistFirst extends FilterStrategy("Whitelist First") case object BlacklistFirst extends FilterStrategy("Blacklist First") case object Disabled extends FilterStrategy("Disabled") val AllStrategies = List(WhitelistFirst, BlacklistFirst, Disabled) def apply(s: String): FilterStrategy = AllStrategies.find(_.toString == s).getOrElse { throw new IllegalArgumentException(s"$s is not a valid filter strategy") } }
timve/gatling
gatling-recorder/src/main/scala/io/gatling/recorder/config/FilterStrategy.scala
Scala
apache-2.0
1,330
/* * Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com> * Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com> */ package akka.kafka.tests.javadsl import akka.kafka.tests.CapturingAppender import org.junit.jupiter.api.extension.{AfterTestExecutionCallback, BeforeTestExecutionCallback, ExtensionContext} class LogCapturingExtension extends BeforeTestExecutionCallback with AfterTestExecutionCallback { // eager access of CapturingAppender to fail fast if misconfigured private val capturingAppender = CapturingAppender.get("") override def beforeTestExecution(context: ExtensionContext): Unit = { capturingAppender.clear() } override def afterTestExecution(context: ExtensionContext): Unit = { if (context.getExecutionException.isPresent) { val error = context.getExecutionException.get().toString val method = s"[${Console.BLUE}${context.getRequiredTestClass.getName}: ${context.getRequiredTestMethod.getName}${Console.RESET}]" System.out.println( s"--> $method Start of log messages of test that failed with $error" ) capturingAppender.flush() System.out.println( s"<-- $method End of log messages of test that failed with $error" ) } } }
softwaremill/reactive-kafka
tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingExtension.scala
Scala
apache-2.0
1,269
/** * == DeeBee: A Tiny Database == * * [[deebee.frontends]] contains the documentation for the various * ways of connecting to DeeBee. If you want to use DeeBee as a * library in your project, check out [[deebee.frontends.Connection]] * for the connection-manager API. If you want to interact with DeeBee * from the command-line, just run the DeeBee jarfile. * * DeeBee is released under the MIT license. * * @author Hawk Weisman * Created by hawk on 11/25/14. */ package object deebee { type Row = Seq[deebee.storage.Entry[_]] type Column = Stream[deebee.storage.Entry[_]] }
hawkw/deebee
src/main/scala/deebee/package.scala
Scala
mit
594
package scala.reflect.internal import org.junit.Assert._ import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.mutable import scala.tools.nsc.symtab.SymbolTableForUnitTesting @RunWith(classOf[JUnit4]) class TypesTest { object symbolTable extends SymbolTableForUnitTesting import symbolTable._, definitions._ @Test def testRefinedTypeSI8611(): Unit = { def stringNarrowed = StringTpe.narrow assert(stringNarrowed != stringNarrowed) assert(!(stringNarrowed =:= stringNarrowed)) def boolWithString = refinedType(BooleanTpe :: StringTpe :: Nil, NoSymbol) assert(boolWithString != boolWithString) assert(boolWithString =:= boolWithString) val boolWithString1 = boolWithString val boolWithString1narrow1 = boolWithString1.narrow val boolWithString1narrow2 = boolWithString1.narrow // Two narrowings of the same refinement end up =:=. This was the root // cause of SI-8611. See `narrowUniquely` in `Logic` for the workaround. assert(boolWithString1narrow1 =:= boolWithString1narrow2) val uniquelyNarrowed1 = refinedType(boolWithString1narrow1 :: Nil, NoSymbol) val uniquelyNarrowed2 = refinedType(boolWithString1narrow2 :: Nil, NoSymbol) assert(uniquelyNarrowed1 =:= uniquelyNarrowed2) } @Test def testTransitivityWithModuleTypeRef(): Unit = { import rootMirror.EmptyPackageClass val (module, moduleClass) = EmptyPackageClass.newModuleAndClassSymbol(TermName("O"), NoPosition, 0L) val minfo = ClassInfoType(List(ObjectTpe), newScope, moduleClass) module.moduleClass setInfo minfo module setInfo module.moduleClass.tpe val tp1 = TypeRef(ThisType(EmptyPackageClass), moduleClass, Nil) val tp2 = SingleType(ThisType(EmptyPackageClass), module) val tp3 = ThisType(moduleClass) val tps = List(tp1, tp2, tp3) val results = mutable.Buffer[String]() tps.permutations.foreach { case ts @ List(a, b, c) => def tsShownRaw = ts.map(t => showRaw(t)).mkString(", ") if (a <:< b && b <:< c && !(a <:< c)) results += s"<:< intransitive: $tsShownRaw" if (a =:= b && b =:= c && !(a =:= c)) results += s"=:= intransitive: $tsShownRaw" } results.toList match { case Nil => // okay case xs => Assert.fail(xs.mkString("\\n")) } } @Test def testRefinementContains(): Unit = { val refinement = typeOf[{def foo: Int}] assert(refinement.isInstanceOf[RefinedType]) assert(refinement.contains(IntClass)) val elem0 = refinement.baseTypeSeq(0) assert(elem0.isInstanceOf[RefinementTypeRef]) assert(elem0.contains(IntClass)) } @Test def testRefinedLubs(): Unit = { // https://github.com/scala/scala-dev/issues/168 assertEquals(typeOf[Option[AnyVal]], lub(typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean] with Option[Short]] :: Nil)) assertEquals(typeOf[Option[AnyVal]], lub(typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean]] :: Nil)) assertEquals(typeOf[Option[AnyVal]], lub((typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean] with Option[Short]] :: Nil).reverse)) assertEquals(typeOf[Option[AnyVal]], lub((typeOf[Option[Int] with Option[Char]] :: typeOf[Option[Boolean]] :: Nil).reverse)) } @Test def testExistentialRefinement(): Unit = { import rootMirror.EmptyPackageClass // class M[A] val MClass = EmptyPackageClass.newClass("M") val A = MClass.newTypeParameter("A").setInfo(TypeBounds.empty) MClass.setInfo(PolyType(A :: Nil, ClassInfoType(ObjectClass.tpeHK :: Nil, newScopeWith(), MClass))) // (M[Int] with M[X] { def m: Any }) forSome { type X } val X = NoSymbol.newExistential("X").setInfo(TypeBounds.empty) val T: Type = { val decls = newScopeWith(MClass.newMethod("m").setInfo(NullaryMethodType(AnyClass.tpeHK))) val refined = refinedType(appliedType(MClass, IntClass.tpeHK) :: appliedType(MClass, X.tpeHK) :: Nil, NoSymbol, decls, NoPosition) newExistentialType(X :: Nil, refined) } val RefinementClass = T.underlying.typeSymbol assertTrue(RefinementClass.isRefinementClass) TypeRef(NoPrefix, RefinementClass, Nil) match { case rtr : RefinementTypeRef => // ContainsCollector needs to look inside the info of symbols of RefinementTypeRefs assert(rtr.contains(X)) } val underlying = T.underlying val baseTypeSeqIndices = T.baseTypeSeq.toList.indices for (i <- baseTypeSeqIndices) { // Elements of the existential type should have the same type symbol as underlying assertEquals(T.baseTypeSeq.typeSymbol(i), underlying.baseTypeSeq.typeSymbol(i)) } // Type symbols should be distinct def checkDistinctTypeSyms(bts: BaseTypeSeq): Unit = { val syms = baseTypeSeqIndices.map(T.baseTypeSeq.typeSymbol) assertEquals(syms, syms.distinct) } checkDistinctTypeSyms(T.baseTypeSeq) checkDistinctTypeSyms(T.underlying.baseTypeSeq) // This is the entry for the refinement class assertTrue(T.baseTypeSeq.typeSymbol(0).isRefinementClass) assertEquals("M[Int] with M[X]{def m: Any} forSome { type X }", T.baseTypeSeq.rawElem(0).toString) // This is the entry for M. The raw entry is an existential over a RefinedType which encodes a lazily computed base type assertEquals(T.baseTypeSeq.typeSymbol(1), MClass) assertEquals("M[X] with M[Int] forSome { type X }", T.baseTypeSeq.rawElem(1).toString) // calling `apply` merges the prefix/args of the elements ot the RefinedType and rewraps in the existential assertEquals("M[_1] forSome { type X; type _1 >: X with Int }", T.baseTypeSeq.apply(1).toString) } @Test def testExistentialMerge(): Unit = { val ts = typeOf[Set[Any]] :: typeOf[Set[X] forSome { type X <: Y; type Y <: Int}] :: Nil def merge(ts: List[Type]) = mergePrefixAndArgs(ts, Variance.Contravariant, lubDepth(ts)) val merged1 = merge(ts) val merged2 = merge(ts.reverse) assert(ts.forall(_ <:< merged1)) // use to fail before fix to mergePrefixAndArgs for existentials assert(ts.forall(_ <:< merged2)) assert(merged1 =:= merged2) } }
felixmulder/scala
test/junit/scala/reflect/internal/TypesTest.scala
Scala
bsd-3-clause
6,184
package scala.util import org.junit.Assert._ import org.junit.Test class EitherTest { @Test def testFlatten(): Unit = { val l: Either[String, Either[String, Int]] = Left("pancake") val rl: Either[String, Either[String, Int]] = Right(Left("flounder")) val rr: Either[String, Either[String, Int]] = Right(Right(7)) val flatl : Either[String, Int] = l.flatten val flatrl: Either[String, Int] = rl.flatten val flatrr: Either[String, Int] = rr.flatten assertEquals(Left("pancake"), flatl) assertEquals(Left("flounder"), flatrl) assertEquals(Right(7), flatrr) } @Test def testWithRight(): Unit = { def rightSumOrLeftEmpty(l: List[Int]) = l.foldLeft(Left("empty").withRight[Int]) { case (Left(_), i) => Right(i) case (Right(s), i) => Right(s + i) } assertEquals(rightSumOrLeftEmpty(List(1, 2, 3)), Right(6)) assertEquals(rightSumOrLeftEmpty(Nil), Left("empty")) } @Test def testWithLeft(): Unit = { def leftSumOrRightEmpty(l: List[Int]) = l.foldLeft(Right("empty").withLeft[Int]) { case (Right(_), i) => Left(i) case (Left(s), i) => Left(s + i) } assertEquals(leftSumOrRightEmpty(List(1, 2, 3)), Left(6)) assertEquals(leftSumOrRightEmpty(Nil), Right("empty")) } }
scala/scala
test/junit/scala/util/EitherTest.scala
Scala
apache-2.0
1,302
package jigg.nlp.ccg /* Copyright 2013-2015 Hiroshi Noji Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import java.io.File import scala.collection.mutable.HashMap trait Problem { def train: Unit def predict: Unit def evaluate: Unit def save: Unit protected def pathWithBankDirPathAsDefault(fullPath: String, nameInBankDir: String) = (InputOptions.bankDirPath, fullPath) match { case (dir, "") if dir != "" => dir + "/" + nameInBankDir case (_, path) => path } protected def trainPath = pathWithBankDirPathAsDefault(InputOptions.trainPath, "train.ccgbank") protected def developPath = pathWithBankDirPathAsDefault(InputOptions.developPath, "devel.ccgbank") def prepareDirectoryOutput(path: String) = new File(path) match { case d if d.exists && d.isFile => sys.error("We cannot create a directory " + path + "; there is another file in that path!") case d => d.mkdirs match { case true => // ok, success case false => System.err.println("Directory " + path + " already exits; we override the contents.") } } }
sakabar/jigg
src/main/scala/jigg/nlp/ccg/Problem.scala
Scala
apache-2.0
1,582
package breeze.stats.distributions; /* Copyright 2009 David Hall, Daniel Ramage Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import org.junit.runner.RunWith import org.scalacheck._ import org.scalatest._ import org.scalatest.junit._ import org.scalatest.prop._ @RunWith(classOf[JUnitRunner]) class GaussianTest extends FunSuite with Checkers with UnivariateContinuousDistrTestBase with MomentsTestBase[Double] with ExpFamTest[Gaussian,Double] with HasCdfTestBase { override type Distr = Gaussian val expFam = Gaussian import org.scalacheck.Arbitrary.arbitrary; def arbParameter = Arbitrary{ for( mean <- arbitrary[Double].map{_ % 10000.0}; std <- arbitrary[Double].map{x => math.abs(x) % 8.0 + .1} ) yield (mean,std) } def paramsClose(p: (Double,Double), b: (Double,Double)) = { val y1 = (p._1 - b._1).abs / (p._1.abs / 2 + b._1.abs / 2+ 1) < 1E-1 val y2 = (p._2 - b._2).abs / (p._2.abs / 2 + b._2.abs / 2+ 1) < 1E-1 y1 && y2 } test("Probability of mean") { check( Prop.forAll { (m: Double, s: Double)=> (s == 0) || { val b = new Gaussian(mu=m,sigma=s.abs); b.unnormalizedLogPdf(m) == 0.0; } }) } test("#295, cdf/inverseCdf broken") { val gaussian = Gaussian(0, 1) assert( (gaussian.cdf(gaussian.inverseCdf(0.1)) - 0.1).abs <= 1E-3, gaussian.cdf(gaussian.inverseCdf(0.1)) + " was not close to " + 0.1) } test("Probability of N(0,1)(1) propto exp(-.5))") { assert(new Gaussian(0,1).unnormalizedLogPdf(1.0) === -0.5) } test ("Gaussian.probability throws an exception when evaluating 1.0 < N(0, 1) < 0.0") { val thrown = intercept[IllegalArgumentException] { new Gaussian(0, 1).probability(1.0, 0.0) } } override val VARIANCE_TOLERANCE: Double = 9E-2 implicit def arbDistr: Arbitrary[Distr] = Arbitrary { for(mean <- arbitrary[Double].map{x => math.abs(x) % 10000.0}; std <- arbitrary[Double].map {x => math.abs(x) % 8.0 + .1}) yield new Gaussian(mean,std); } def asDouble(x: Double) = x def fromDouble(x: Double) = x }
crealytics/breeze
math/src/test/scala/breeze/stats/distributions/GaussianTest.scala
Scala
apache-2.0
2,556
package com.geeksville.gmaps import scala.collection.mutable.ListBuffer import com.google.android.gms.maps.model._ import com.google.android.gms.maps.GoogleMap import com.google.android.gms.maps.GoogleMap.OnMarkerDragListener import scala.collection.mutable.HashMap import android.graphics.Color import com.ridemission.scandroid.AndroidLogger import scala.collection.JavaConverters._ import com.bugsense.trace.BugSenseHandler /** * A ducktype that adds a uniform API for maps drawable widgets (Polyline/Circle) */ object MapsTypes { type Drawable = { def remove() } } trait DrawableFactory { protected var drawn: Option[MapsTypes.Drawable] = None def remove() { try { drawn.foreach(_.remove()) drawn = None } catch { case ex: NullPointerException => BugSenseHandler.sendExceptionMessage("drawable_bug", "maps", ex) } } def render(map: GoogleMap) /** * Move any drawables as we are dragged (if we care) */ def handleMarkerDrag(sm: SmartMarker) {} } trait LineFactory extends DrawableFactory { def polyline: Option[Polyline] = drawn.asInstanceOf[Option[Polyline]] protected def lineOptions: PolylineOptions def render(map: GoogleMap) { drawn = Option(map.addPolyline(lineOptions)) } } class CircleFactory(val circleOptions: CircleOptions) extends DrawableFactory { def circle: Option[Circle] = drawn.asInstanceOf[Option[Circle]] def render(map: GoogleMap) { drawn = Option(map.addCircle(circleOptions)) } } /** * A line between two SmartMarkers */ case class Segment(endpoints: (SmartMarker, SmartMarker), color: Int) extends LineFactory { final def lineOptions = (new PolylineOptions).color(color).add(endpoints._1.latLng).add(endpoints._2.latLng).geodesic(true) /** * Move any drawables as we are dragged (if we care) */ override def handleMarkerDrag(sm: SmartMarker) { if (endpoints._1 == sm || endpoints._2 == sm) { val points = List(endpoints._1, endpoints._2).map(_.latLng) polyline.foreach(_.setPoints(points.asJava)) } } } /** * Just a series of poly points */ case class PolylineFactory(points: Iterable[LatLng], color: Int) extends LineFactory { final def lineOptions = (new PolylineOptions).color(color).addAll(points.asJava) }
geeksville/arduleader
andropilot/src/main/scala/com/geeksville/gmaps/Drawables.scala
Scala
gpl-3.0
2,279
package fr.njin.playoauth.common.request import fr.njin.playoauth.common.OAuth import scala.language.implicitConversions /** * The authorization request. * * Represents the request made by a client in order to obtain a code or a token * * @param responseType The response type of the request. See [[fr.njin.playoauth.common.OAuth.ResponseType]] * @param clientId The id of the client of the request * @param redirectUri The redirection url of the request * @param scopes The scopes of the request * @param state The state of the request */ case class AuthzRequest(responseType: String, clientId: String, redirectUri: Option[String], scopes: Option[Seq[String]], state: Option[String]) /** * Implicit conversion for [[fr.njin.playoauth.common.request.AuthzRequest]] */ object AuthzRequest { implicit def authzRequest2QueryString(authzRequest: AuthzRequest): Map[String, Seq[String]] = { Map( OAuth.OauthResponseType -> Seq(OAuth.ResponseType.Code), OAuth.OauthClientId -> Seq(authzRequest.clientId) ) ++ authzRequest.redirectUri.map(s => OAuth.OauthRedirectUri -> Seq(s)) ++ authzRequest.scopes.map(s => OAuth.OauthScope -> Seq(s.mkString(" "))) } }
giabao/play-oauth
common/src/main/scala/fr/njin/playoauth/common/request/AuthzRequest.scala
Scala
apache-2.0
1,291
package inox package parsing import org.scalatest._ class TypeParserSuite extends FunSuite { import inox.trees._ import interpolator._ implicit val symbols = NoSymbols test("Parsing basic types") { assertResult(IntegerType()) { t"BigInt" } assertResult(BooleanType()) { t"Boolean" } assertResult(UnitType()) { t"Unit" } assertResult(CharType()) { t"Char" } assertResult(StringType()) { t"String" } assertResult(Int32Type()) { t"Int" } assertResult(RealType()) { t"Real" } } test("Parsing with parentheses") { assertResult(IntegerType()) { t"(BigInt)" } assertResult(BooleanType()) { t"((Boolean))" } assertResult(UnitType()) { t"(((Unit)))" } } test("Parsing BitVector types") { assertResult(BVType(true, 32)) { t"Int32" } assertResult(BVType(true, 64)) { t"Int64" } assertResult(BVType(true, 17)) { t"Int17" } assertResult(BVType(true, 1273)) { t"Int1273" } assertResult(BVType(true, 1)) { t"Int1" } } test("Parsing Set types") { assertResult(SetType(IntegerType())) { t"Set[BigInt]" } assertResult(SetType(BooleanType())) { t"Set[Boolean]" } } test("Parsing Bag types") { assertResult(BagType(IntegerType())) { t"Bag[BigInt]" } assertResult(BagType(BooleanType())) { t"Bag[Boolean]" } } test("Parsing Map types") { assertResult(MapType(StringType(), IntegerType())) { t"Map[String, BigInt]" } assertResult(MapType(UnitType(), BooleanType())) { t"Map[Unit, Boolean]" } } test("Parsing Tuple types") { assertResult(TupleType(Seq(StringType(), IntegerType(), CharType()))) { t"(String, BigInt, Char)" } assertResult(TupleType(Seq(UnitType(), BooleanType()))) { t"(Unit, Boolean)" } } test("Parsing Function types") { assertResult(FunctionType(Seq(IntegerType()), StringType())) { t"BigInt => String" } assertResult(FunctionType(Seq(), StringType())) { t"() => String" } assertResult(FunctionType(Seq(IntegerType()), StringType())) { t"(BigInt) => String" } assertResult(FunctionType(Seq(StringType(), IntegerType(), CharType()), BooleanType())) { t"(String, BigInt, Char) => Boolean" } assertResult(FunctionType(Seq(TupleType(Seq(StringType(), IntegerType(), CharType()))), BooleanType())) { t"((String, BigInt, Char)) => Boolean" } assertResult(FunctionType(Seq(IntegerType()), FunctionType(Seq(UnitType()), BooleanType()))) { t"BigInt => Unit => Boolean" } } }
romac/inox
src/test/scala/inox/parsing/TypeParserSuite.scala
Scala
apache-2.0
2,746
package filodb.query import filodb.core.query.{ColumnFilter, RangeParams, RvRange} import filodb.core.query.Filter.Equals //scalastyle:off number.of.types sealed trait LogicalPlan { /** * Execute failure routing * Override for Queries which should not be routed e.g time(), month() * It is false for RawSeriesLikePlan, MetadataQueryPlan, RawChunkMeta, ScalarTimeBasedPlan and ScalarFixedDoublePlan */ def isRoutable: Boolean = true /** * Whether to Time-Split queries into smaller range queries if the range exceeds configured limit. * This flag will be overridden by plans, which either do not support splitting or will not help in improving * performance. For e.g. metadata query plans. */ def isTimeSplittable: Boolean = true /** * Replace filters present in logical plan */ def replaceFilters(filters: Seq[ColumnFilter]): LogicalPlan = { this match { case n: LabelCardinality => n.copy(filters = filters) case p: PeriodicSeriesPlan => p.replacePeriodicSeriesFilters(filters) case r: RawSeriesLikePlan => r.replaceRawSeriesFilters(filters) case l: LabelValues => l.copy(filters = filters) case n: LabelNames => n.copy(filters = filters) case s: SeriesKeysByFilters => s.copy(filters = filters) case c: TsCardinalities => c // immutable & no members need to be updated } } } /** * Super class for a query that results in range vectors with raw samples (chunks), * or one simple transform from the raw data. This data is likely non-periodic or at least * not in the same time cadence as user query windowing. */ sealed trait RawSeriesLikePlan extends LogicalPlan { def isRaw: Boolean = false def replaceRawSeriesFilters(newFilters: Seq[ColumnFilter]): RawSeriesLikePlan } sealed trait NonLeafLogicalPlan extends LogicalPlan { def children: Seq[LogicalPlan] } /** * Super class for a query that results in range vectors with samples * in regular steps */ sealed trait PeriodicSeriesPlan extends LogicalPlan { /** * Periodic Query start time in millis */ def startMs: Long /** * Periodic Query end time in millis */ def stepMs: Long /** * Periodic Query step time in millis */ def endMs: Long def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan } sealed trait MetadataQueryPlan extends LogicalPlan { override def isTimeSplittable: Boolean = false val filters: Seq[ColumnFilter] val startMs: Long val endMs: Long } /** * A selector is needed in the RawSeries logical plan to specify * a row key range to extract from each partition. */ sealed trait RangeSelector extends java.io.Serializable case object AllChunksSelector extends RangeSelector case object WriteBufferSelector extends RangeSelector case object InMemoryChunksSelector extends RangeSelector case object EncodedChunksSelector extends RangeSelector case class IntervalSelector(from: Long, to: Long) extends RangeSelector /** * Concrete logical plan to query for raw data in a given range * @param columns the columns to read from raw chunks. Note that it is not necessary to include * the timestamp column, that will be automatically added. * If no columns are included, the default value column will be used. */ case class RawSeries(rangeSelector: RangeSelector, filters: Seq[ColumnFilter], columns: Seq[String], lookbackMs: Option[Long] = None, offsetMs: Option[Long] = None) extends RawSeriesLikePlan { override def isRaw: Boolean = true override def replaceRawSeriesFilters(newFilters: Seq[ColumnFilter]): RawSeriesLikePlan = { val filterColumns = newFilters.map(_.column) val updatedFilters = this.filters.filterNot(f => filterColumns.contains(f.column)) ++ newFilters this.copy(filters = updatedFilters) } } case class LabelValues(labelNames: Seq[String], filters: Seq[ColumnFilter], startMs: Long, endMs: Long) extends MetadataQueryPlan case class LabelCardinality( filters: Seq[ColumnFilter], startMs: Long, endMs: Long, clusterType: String = "raw") extends MetadataQueryPlan case class LabelNames(filters: Seq[ColumnFilter], startMs: Long, endMs: Long) extends MetadataQueryPlan case class SeriesKeysByFilters(filters: Seq[ColumnFilter], fetchFirstLastSampleTimes: Boolean, startMs: Long, endMs: Long) extends MetadataQueryPlan object TsCardinalities { val SHARD_KEY_LABELS = Seq("_ws_", "_ns_", "__name__") } /** * Plan to answer queries of the abstract form: * * Find (active, total) cardinality pairs for all time series with <shard-key-prefix>, * then group them by { key[:1], key[:2], key[:3], ... }. * * Examples: * * { prefix=[], numGroupByFields=2 } -> { * prefix=["ws_a", "ns_a"] -> (4, 6), * prefix=["ws_a", "ns_b"] -> (2, 4), * prefix=["ws_b", "ns_c"] -> (3, 5) } * * { prefix=["ws_a", "ns_a"], numGroupByFields=3 } -> { * prefix=["ws_a", "ns_a", "met_a"] -> (4, 6), * prefix=["ws_a", "ns_a", "met_b"] -> (3, 5) } * * { prefix=["ws_a"], numGroupByFields=1 } -> { * prefix=["ws_a"] -> (3, 5) } * * @param numGroupByFields: indicates "hierarchical depth" at which to group cardinalities. * For example: * 1 -> workspace * 2 -> namespace * 3 -> metric * Must indicate a depth: * (1) at least as deep as shardKeyPrefix. * (2) less than '3' when the prefix does not contain values for all lesser depths. * Example (if shard keys specify a ws, ns, and metric): * shardKeyPrefix numGroupByFields * [] { 1, 2 } * [ws] { 1, 2 } * [ws, ns] { 2, 3 } * [ws, ns, metric] { 3 } */ case class TsCardinalities(shardKeyPrefix: Seq[String], numGroupByFields: Int) extends LogicalPlan { import TsCardinalities._ require(numGroupByFields >= 1 && numGroupByFields <= 3, "numGroupByFields must lie on [1, 3]") require(numGroupByFields >= shardKeyPrefix.size, "numGroupByFields indicate a depth at least as deep as shardKeyPrefix") require(numGroupByFields < 3 || shardKeyPrefix.size >= 2, "cannot group at the metric level when prefix does not contain ws and ns") // TODO: this should eventually be "true" to enable HAP/LTRP routing override def isRoutable: Boolean = false def filters(): Seq[ColumnFilter] = SHARD_KEY_LABELS.zip(shardKeyPrefix).map{ case (label, value) => ColumnFilter(label, Equals(value))} } /** * Concrete logical plan to query for chunk metadata from raw time series in a given range * @param column the column name from which to extract chunk information like chunk size and encoding type */ case class RawChunkMeta(rangeSelector: RangeSelector, filters: Seq[ColumnFilter], column: String) extends PeriodicSeriesPlan { override def isRoutable: Boolean = false // FIXME - TechDebt - This class should not be a PeriodicSeriesPlan override def startMs: Long = ??? override def stepMs: Long = ??? override def endMs: Long = ??? override def replacePeriodicSeriesFilters(newFilters: Seq[ColumnFilter]): PeriodicSeriesPlan = { val filterColumns = newFilters.map(_.column) val updatedFilters = this.filters.filterNot(f => filterColumns.contains(f.column)) ++ newFilters this.copy(filters = updatedFilters) } } /** * Concrete logical plan to query for data in a given range * with results in a regular time interval. * * Issue with specifying start/end/step here in the selector * is that plans involving multiple series can come with different * ranges and steps. * * This should be taken care outside this layer, or we need to have * proper validation. */ case class PeriodicSeries(rawSeries: RawSeriesLikePlan, startMs: Long, stepMs: Long, endMs: Long, offsetMs: Option[Long] = None) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(rawSeries) override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(rawSeries = rawSeries.replaceRawSeriesFilters(filters)) } /** * Subquery represents a series of N points conceptually generated * by running N homogeneous queries where * SW = subquery window * SS = subquery step * N = SW / SS + 1 * For example, foo[5m:1m], would generate a series of 6 points 1 minute apart. * * query_range API is an equivalent of subquery though the concept of subquery * is wider. query_range can be called with start/end/step parameters only on the * top most expression in the query while subqueries can be: * (1) top level expression (complete equivalent to query_range) * (2) arguments to time range functions, potentially nested several levels deep * * So, there are two major cases for a subquery expression: * A) any_instant_expression[W:S] as a top level expression. In this case, subquery * would issue "any_instant_expression" with its own start, end, and step * parameters. If such an expression is called with query_range API, where start != end * and step is not zero, an exception is thrown. No special subquery * node in logical plan is generated as all of the PeriodicSeries logical plans can * handle range_query API's start, step, and end parameters. * B) RangeFunction(any_instant_expression[W:S]) at any node/level of abstract syntax tree. * In this case, we ALWAYS have a range function involved and SubqueryWithWindowing * logical plan node is generated. The plan is almost identical in the functionality to * PeriodicSeriesWithWindowing. The difference between the two is that currently * PeriodicSeriesWithWindowing expects a RawSeriesLikePlan while subquery is NOT * necessarily an instant selector. PeriodicSeriesWithWindowing needs to be refactored, * so, it could handle any PeriodicSeries; however, at this point we are going to * replicate the functionality in the specialized SubqueryWithWindowing logical plan. * We do so, that we can integrage subquery code without affecting existing existing * code paths, stabilize the codebase, and later merge the two. * Below is an example of B) case: * sum_over_time(<someExpression>[5m:1m]) called with query_range API parameters: * start=S, end=E, step=ST * Here query range API start,end, and step correspond to startMs, stepMs, and endMs, * however, it's not necessarily the case for nested subqueries because start, step, and en * will depend on the parent expression of the subquery * subqueryStepMs is used exclusively for debugging/logging, as the actual subquery step is * already baked into innerPeriodicSeries which is constructed with the subquery step before * passing to the constructor of SubqueryWithWindowing */ case class SubqueryWithWindowing( innerPeriodicSeries: PeriodicSeriesPlan, // someExpression startMs: Long, // S stepMs: Long, // ST endMs: Long, // E functionId: RangeFunctionId, // sum_over_time functionArgs: Seq[FunctionArgsPlan] = Nil, subqueryWindowMs: Long, // 5m subqueryStepMs: Long, //1m offsetMs: Option[Long] ) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(innerPeriodicSeries) override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = { val updatedInnerPeriodicSeries = innerPeriodicSeries.replacePeriodicSeriesFilters(filters) this.copy(innerPeriodicSeries = updatedInnerPeriodicSeries) } } /** * Please, refer to documentation of SubqueryWithWindowing, this class * corresponds to case A), for example, foo[5m:1m] * Overall, TopLevelSubquery is just a wrapper on top of the actual underlying PeriodicSeriesPlan, and is used * primarily to record the existence of the actual subquery construct in the original query. * When we parse top level subquery to create a logical plan, start and end parameters passed from query_range API * are supposed to be the same. However, TopLevelSubquery logical plan as a PeriodicSeriesPlan * will have its own startMs set to (original_start - subquery lookback), 5m in case of the above example. * endMs will stay the same as the original end. * The original start time needs to be modified in order for the TopLevelSubquery to be splittable to support * long range subqueries. If logical plan has its start and end equal, this plan is impossible to split. * Subqueries, however, should be splittable. Generally, the parameters: * startMs, stepMs, endMs are not used by the further planners, * they are needed only for the logic to modify the logical plan to enable spanning long range queries across * several clusters. * original_start, original_end are the start and end parameters of TimeRangeParam passed to toSeriesPlan() * invoked to generate TopLevelSubquery * @param startMs (original_start - subquery_lookback) * @param stepMs is the value of the subquery step, not used by the planners but for debugging/documentation * @param endMs should always be the same as original_start */ case class TopLevelSubquery( innerPeriodicSeries: PeriodicSeriesPlan, // someExpression startMs: Long, //original start - 5m stepMs: Long, endMs: Long, orginalLookbackMs: Long, originalOffsetMs: Option[Long] ) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(innerPeriodicSeries) override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = { val updatedInnerPeriodicSeries = innerPeriodicSeries.replacePeriodicSeriesFilters(filters) this.copy(innerPeriodicSeries = updatedInnerPeriodicSeries) } } /** * Concrete logical plan to query for data in a given range * with results in a regular time interval. * * Applies a range function on raw windowed data (perhaps with instant function applied) before * sampling data at regular intervals. * @param stepMultipleNotationUsed is true if promQL lookback used a step multiple notation * like foo{..}[3i] */ case class PeriodicSeriesWithWindowing(series: RawSeriesLikePlan, startMs: Long, stepMs: Long, endMs: Long, window: Long, function: RangeFunctionId, stepMultipleNotationUsed: Boolean = false, functionArgs: Seq[FunctionArgsPlan] = Nil, offsetMs: Option[Long] = None, columnFilters: Seq[ColumnFilter] = Nil) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(series) override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(columnFilters = LogicalPlan.overrideColumnFilters(columnFilters, filters), series = series.replaceRawSeriesFilters(filters)) } /** * Aggregate data across partitions (not in the time dimension). * Aggregation can be done only on range vectors with consistent * sampling interval. * @param by columns to group by * @param without columns to leave out while grouping */ case class Aggregate(operator: AggregationOperator, vectors: PeriodicSeriesPlan, params: Seq[Any] = Nil, by: Seq[String] = Nil, without: Seq[String] = Nil) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(vectors = vectors.replacePeriodicSeriesFilters(filters)) } /** * Binary join between collections of RangeVectors. * One-To-One, Many-To-One and One-To-Many are supported. * * If data resolves to a Many-To-Many relationship, error will be returned. * * @param on columns to join on * @param ignoring columns to ignore while joining * @param include labels specified in group_left/group_right to be included from one side */ case class BinaryJoin(lhs: PeriodicSeriesPlan, operator: BinaryOperator, cardinality: Cardinality, rhs: PeriodicSeriesPlan, on: Seq[String] = Nil, ignoring: Seq[String] = Nil, include: Seq[String] = Nil) extends PeriodicSeriesPlan with NonLeafLogicalPlan { require(lhs.startMs == rhs.startMs) require(lhs.endMs == rhs.endMs) require(lhs.stepMs == rhs.stepMs) override def children: Seq[LogicalPlan] = Seq(lhs, rhs) override def startMs: Long = lhs.startMs override def stepMs: Long = lhs.stepMs override def endMs: Long = lhs.endMs override def isRoutable: Boolean = lhs.isRoutable && rhs.isRoutable override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(lhs = lhs.replacePeriodicSeriesFilters(filters), rhs = rhs.replacePeriodicSeriesFilters(filters)) } /** * Apply Scalar Binary operation to a collection of RangeVectors */ case class ScalarVectorBinaryOperation(operator: BinaryOperator, scalarArg: ScalarPlan, vector: PeriodicSeriesPlan, scalarIsLhs: Boolean) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vector) override def startMs: Long = vector.startMs override def stepMs: Long = vector.stepMs override def endMs: Long = vector.endMs override def isRoutable: Boolean = vector.isRoutable override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(vector = vector.replacePeriodicSeriesFilters(filters), scalarArg = scalarArg.replacePeriodicSeriesFilters(filters).asInstanceOf[ScalarPlan]) } /** * Apply Instant Vector Function to a collection of periodic RangeVectors, * returning another set of periodic vectors */ case class ApplyInstantFunction(vectors: PeriodicSeriesPlan, function: InstantFunctionId, functionArgs: Seq[FunctionArgsPlan] = Nil) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def isRoutable: Boolean = vectors.isRoutable override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(vectors = vectors.replacePeriodicSeriesFilters(filters)) } /** * Apply Instant Vector Function to a collection of raw RangeVectors, * returning another set of non-periodic vectors */ case class ApplyInstantFunctionRaw(vectors: RawSeries, function: InstantFunctionId, functionArgs: Seq[FunctionArgsPlan] = Nil) extends RawSeriesLikePlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def replaceRawSeriesFilters(newFilters: Seq[ColumnFilter]): RawSeriesLikePlan = this.copy(vectors = vectors.replaceRawSeriesFilters(newFilters).asInstanceOf[RawSeries]) } /** * Apply Miscellaneous Function to a collection of RangeVectors */ case class ApplyMiscellaneousFunction(vectors: PeriodicSeriesPlan, function: MiscellaneousFunctionId, stringArgs: Seq[String] = Nil) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(vectors = vectors.replacePeriodicSeriesFilters(filters)) } /** * Apply Sort Function to a collection of RangeVectors */ case class ApplySortFunction(vectors: PeriodicSeriesPlan, function: SortFunctionId) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(vectors = vectors.replacePeriodicSeriesFilters(filters)) } /** * Nested logical plan for argument of function * Example: clamp_max(node_info{job = "app"},scalar(http_requests_total{job = "app"})) */ sealed trait FunctionArgsPlan extends LogicalPlan with PeriodicSeriesPlan /** * Generate scalar * Example: scalar(http_requests_total), time(), hour() */ sealed trait ScalarPlan extends FunctionArgsPlan /** * Generate scalar from vector * Example: scalar(http_requests_total) */ final case class ScalarVaryingDoublePlan(vectors: PeriodicSeriesPlan, function: ScalarFunctionId, functionArgs: Seq[FunctionArgsPlan] = Nil) extends ScalarPlan with PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def isRoutable: Boolean = vectors.isRoutable override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(vectors = vectors.replacePeriodicSeriesFilters(filters)) } /** * Scalar generated by time functions which do not have metric as input * Example: time(), hour() */ final case class ScalarTimeBasedPlan(function: ScalarFunctionId, rangeParams: RangeParams) extends ScalarPlan { override def isRoutable: Boolean = false override def startMs: Long = rangeParams.startSecs * 1000 override def stepMs: Long = rangeParams.stepSecs * 1000 override def endMs: Long = rangeParams.endSecs * 1000 override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this // No Filter } /** * Logical plan for numeric values. Used in queries like foo + 5 * Example: 3, 4.2 */ final case class ScalarFixedDoublePlan(scalar: Double, timeStepParams: RangeParams) extends ScalarPlan with FunctionArgsPlan { override def isRoutable: Boolean = false override def startMs: Long = timeStepParams.startSecs * 1000 override def stepMs: Long = timeStepParams.stepSecs * 1000 override def endMs: Long = timeStepParams.endSecs * 1000 override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this } //scalastyle:off number.of.types /** * Generates vector from scalars * Example: vector(3), vector(scalar(http_requests_total) */ final case class VectorPlan(scalars: ScalarPlan) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(scalars) override def startMs: Long = scalars.startMs override def stepMs: Long = scalars.stepMs override def endMs: Long = scalars.endMs override def isRoutable: Boolean = scalars.isRoutable override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(scalars = scalars.replacePeriodicSeriesFilters(filters).asInstanceOf[ScalarPlan]) } /** * Apply Binary operation between two fixed scalars */ case class ScalarBinaryOperation(operator: BinaryOperator, lhs: Either[Double, ScalarBinaryOperation], rhs: Either[Double, ScalarBinaryOperation], rangeParams: RangeParams) extends ScalarPlan { override def startMs: Long = rangeParams.startSecs * 1000 override def stepMs: Long = rangeParams.stepSecs * 1000 override def endMs: Long = rangeParams.endSecs * 1000 override def isRoutable: Boolean = false override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = { val updatedLhs = if (lhs.isRight) Right(lhs.right.get.replacePeriodicSeriesFilters(filters). asInstanceOf[ScalarBinaryOperation]) else Left(lhs.left.get) val updatedRhs = if (rhs.isRight) Right(rhs.right.get.replacePeriodicSeriesFilters(filters). asInstanceOf[ScalarBinaryOperation]) else Left(rhs.left.get) this.copy(lhs = updatedLhs, rhs = updatedRhs) } } /** * Apply Absent Function to a collection of RangeVectors */ case class ApplyAbsentFunction(vectors: PeriodicSeriesPlan, columnFilters: Seq[ColumnFilter], rangeParams: RangeParams, functionArgs: Seq[Any] = Nil) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(columnFilters = LogicalPlan.overrideColumnFilters(columnFilters, filters), vectors = vectors.replacePeriodicSeriesFilters(filters)) } /** * Apply Limit Function to a collection of RangeVectors */ case class ApplyLimitFunction(vectors: PeriodicSeriesPlan, columnFilters: Seq[ColumnFilter], rangeParams: RangeParams, limit: Int) extends PeriodicSeriesPlan with NonLeafLogicalPlan { override def children: Seq[LogicalPlan] = Seq(vectors) override def startMs: Long = vectors.startMs override def stepMs: Long = vectors.stepMs override def endMs: Long = vectors.endMs override def replacePeriodicSeriesFilters(filters: Seq[ColumnFilter]): PeriodicSeriesPlan = this.copy(columnFilters = LogicalPlan.overrideColumnFilters(columnFilters, filters), vectors = vectors.replacePeriodicSeriesFilters(filters)) } object LogicalPlan { /** * Get leaf Logical Plans */ def findLeafLogicalPlans (logicalPlan: LogicalPlan) : Seq[LogicalPlan] = { logicalPlan match { // scalarArg can have vector like scalar(http_requests_total) case lp: ScalarVectorBinaryOperation => findLeafLogicalPlans(lp.vector) ++ findLeafLogicalPlans(lp.scalarArg) // Find leaf logical plans for all children and concatenate results case lp: NonLeafLogicalPlan => lp.children.flatMap(findLeafLogicalPlans) case lp: MetadataQueryPlan => Seq(lp) case lp: TsCardinalities => Seq(lp) case lp: ScalarBinaryOperation => val lhsLeafs = if (lp.lhs.isRight) findLeafLogicalPlans(lp.lhs.right.get) else Nil val rhsLeafs = if (lp.rhs.isRight) findLeafLogicalPlans(lp.rhs.right.get) else Nil lhsLeafs ++ rhsLeafs case lp: ScalarFixedDoublePlan => Seq(lp) case lp: ScalarTimeBasedPlan => Seq(lp) case lp: RawSeries => Seq(lp) case lp: RawChunkMeta => Seq(lp) } } /** * Returns true if there is a subquery with windowing in the logical plan */ def hasSubqueryWithWindowing(logicalPlan: LogicalPlan) : Boolean = { logicalPlan match { case sqww: SubqueryWithWindowing => true case lp: NonLeafLogicalPlan => lp.children.foldLeft(false)( (acc: Boolean, lp: LogicalPlan) => acc || hasSubqueryWithWindowing(lp) ) case _ => false } } def getColumnValues(logicalPlan: LogicalPlan, labelName: String): Set[String] = { getColumnValues(getColumnFilterGroup(logicalPlan), labelName) } def getColumnValues(columnFilterGroup: Seq[Set[ColumnFilter]], labelName: String): Set[String] = { columnFilterGroup.flatMap (columnFilters => getColumnValues(columnFilters, labelName)) match { case columnValues: Iterable[String] => if (columnValues.isEmpty) Set.empty else columnValues.toSet case _ => Set.empty } } def getColumnValues(columnFilters: Set[ColumnFilter], labelName: String): Set[String] = { columnFilters.flatMap(cFilter => { if (cFilter.column == labelName) { cFilter.filter.valuesStrings.map(_.toString) } else { Seq.empty } }) } /** * Given a LogicalPlan, the function finds a Seq of all Child nodes, and returns a Set of ColumnFilters for * each of the Leaf node * * @param logicalPlan the root LogicalPlan * @return Seq of Set of Column filters, Seq has size same as the number of leaf nodes */ def getColumnFilterGroup(logicalPlan: LogicalPlan): Seq[Set[ColumnFilter]] = { LogicalPlan.findLeafLogicalPlans(logicalPlan) map { lp => lp match { case lp: LabelValues => lp.filters toSet case lp: LabelNames => lp.filters toSet case lp: RawSeries => lp.filters toSet case lp: RawChunkMeta => lp.filters toSet case lp: SeriesKeysByFilters => lp.filters toSet case lp: LabelCardinality => lp.filters.toSet case lp: TsCardinalities => lp.filters.toSet case _: ScalarTimeBasedPlan => Set.empty[ColumnFilter] // Plan does not have labels case _: ScalarFixedDoublePlan => Set.empty[ColumnFilter] case _: ScalarBinaryOperation => Set.empty[ColumnFilter] case _ => throw new BadQueryException(s"Invalid logical plan $logicalPlan") } } match { case groupSeq: Seq[Set[ColumnFilter]] => if (groupSeq.isEmpty || groupSeq.forall(_.isEmpty)) Seq.empty else groupSeq case _ => Seq.empty } } def getRawSeriesFilters(logicalPlan: LogicalPlan): Seq[Seq[ColumnFilter]] = { LogicalPlan.findLeafLogicalPlans(logicalPlan).map { l => l match { case lp: RawSeries => lp.filters case lp: LabelValues => lp.filters case lp: LabelNames => lp.filters case _ => Seq.empty } } } /** * Returns all nonMetricShardKey column filters */ def getNonMetricShardKeyFilters(logicalPlan: LogicalPlan, nonMetricShardColumns: Seq[String]): Seq[Seq[ColumnFilter]] = getRawSeriesFilters(logicalPlan).map { s => s.filter(f => nonMetricShardColumns.contains(f.column))} /** * Returns true when all shard key filters have Equals */ def hasShardKeyEqualsOnly(logicalPlan: LogicalPlan, nonMetricShardColumns: Seq[String]): Boolean = getNonMetricShardKeyFilters(logicalPlan: LogicalPlan, nonMetricShardColumns: Seq[String]). forall(_.forall(f => f.filter.isInstanceOf[filodb.core.query.Filter.Equals])) /** * Given a Logical Plan, the method returns a RVRange, there are two cases possible * 1. The given plan is a PeriodicPlan in which case the start, end and step are retrieved from the plan instance * 2. If plan is not a PeriodicPlan then start step and end are irrelevant for the plan and None is returned * * @param plan: The Logical Plan instance * @param qContext: The query context * @return Option of the RVRange instance */ def rvRangeFromPlan(plan: LogicalPlan): Option[RvRange] = { plan match { case p: PeriodicSeriesPlan => Some(RvRange( startMs = p.startMs, endMs = p.endMs, stepMs = Math.max(p.stepMs, 1))) case _ => None } } /** * @param base the column filters to override * @param overrides the overriding column filters * @return union of base and override filters except where column names intersect; * when names intersect, only the overriding filter is included. * Example: * base = [(name=a, filter=1), (name=b, filter=2)] * overrides = [(name=c, filter=3), (name=a, filter=4)] * result = [(name=a, filter=4), (name=b, filter=2), (name=c, filter=3)] */ def overrideColumnFilters(base: Seq[ColumnFilter], overrides: Seq[ColumnFilter]): Seq[ColumnFilter] = { val overrideColumns = overrides.map(_.column) base.filterNot(f => overrideColumns.contains(f.column)) ++ overrides } } //scalastyle:on number.of.types
filodb/FiloDB
query/src/main/scala/filodb/query/LogicalPlan.scala
Scala
apache-2.0
33,841
package ca.dubey.music.melodic import ca.dubey.music.midi.ChannelInfo import ca.dubey.music.midi.Patch import ca.dubey.music.theory.Chord import ca.dubey.music.theory.ChordBuilder import javax.sound.midi.MidiMessage import javax.sound.midi.MidiEvent import javax.sound.midi.MidiChannel import javax.sound.midi.Sequence import javax.sound.midi.ShortMessage import javax.sound.midi.Track import scala.collection.mutable.ArrayBuffer /** V1 */ class ConsumeSequence(sequence : Sequence) { var tick : Long = 0 var chordBuilder = ChordBuilder() var song = ArrayBuffer.empty[Chord] var trackCollector = new TrackCollector val channelInfos = (0 until 16).map((i) => ChannelInfo(i)) def output = { for (chord <- song) { chord.output } } private def updateChord(event : MidiEvent) : Unit = { event.getMessage match { case e:ShortMessage => e.getCommand match { case ShortMessage.PROGRAM_CHANGE => channelInfos(e.getChannel).patch = Patch(e.getData1) case _ => chordBuilder += (e, channelInfos(e.getChannel)) } case _ => // Don't know how to deal with Sysex yet. () } } private def updateChord(events : collection.mutable.Set[MidiEvent]) : Unit = { for(event <- events) { updateChord(event) } // printf("Current chord: %s\\n", chord.toString) song += chordBuilder.result } private def gatherEventsAndUpdateChord : collection.mutable.Set[MidiEvent] = { return null; } /** Consume all tracks together. */ def consume = { var eventsLeft = true while (eventsLeft) { val events = gatherEventsAndUpdateChord if (events.size == 0) { eventsLeft = false } } } def consumeTracksIndependently = { for (track <- sequence.getTracks) { chordBuilder = ChordBuilder() var time = track.get(0).getTick for (i <- 0 until track.size) { val event = track.get(i) updateChord(event) // printf("%d -> %d : %s\\n", time, event.getTick, chordBuilder.toString) if (!chordBuilder.isEmpty && event.getTick > time + 1) { // Allow +- 1 song += chordBuilder.result time = event.getTick } } } } class TrackEventIterator(val trackNum : Int, val track : Track) { var index = 0 def currentEvent : Option[MidiEvent] = { if (index < track.size) { Some(track.get(index)) } else { None } } def advance = index += 1 } class TrackCollector { val trackIterators = sequence.getTracks.zipWithIndex.map { case (track, i) => new TrackEventIterator(i, track) } def nextEvents : collection.mutable.Set[MidiEvent] = { var events = collection.mutable.Set.empty[MidiEvent] var minTick : Long = -1 for (trackIterator <- trackIterators) { var anyEventsAdded = false // printf("\\tOn track %d\\n", trackNum) do { anyEventsAdded = false for (event <- trackIterator.currentEvent) { if (event.getTick < tick) { // Bad thing. printf("Warning: track %d event %d lower than expected tick %d vs %d\\n", trackIterator.trackNum, trackIterator.index, event.getTick, tick) } else if (event.getTick <= tick + 1) { // Allow +1 tick events through. printf("\\t\\tAdding event at: %d\\n", event.getTick) events += event // advanceTrackEvent(trackNum) anyEventsAdded = true } else { // printf("\\t\\tLeaving event at: %d\\n", event.getTick) if (minTick == -1 || event.getTick < minTick) { minTick = event.getTick } } } } while (anyEventsAdded) } // printf("Gathering events at %d\\n", tick) if (events.size == 0 && minTick == tick && tick > 0) { // printf("Warning: no events added at tick: %d\\n", tick) System.exit(1) } /* else { printf("Tick: %d, got %d events\\n", tick, events.size) } */ tick = minTick return events } } }
adubey/music
src/main/scala/melodic/ConsumeSequence.scala
Scala
gpl-2.0
3,963
package org.tejo.iza.rules /** Class holding request urls of which responses * are later deserialized to concerning `Fact` objects. */ class TrelloRequests { }
tomaszym/izabela
rules/src/main/scala/org/tejo/iza/rules/TrelloRequests.scala
Scala
gpl-2.0
166
import java.math.BigInteger import java.util.Scanner object Solution extends App { val scanner = new Scanner(System.in) val players = scanner.next.toInt val topics = scanner.next.toInt val teamTopics = new Array[BigInteger](players) var i = 0 while ( { i < players }) { val input = scanner.next val binary = new BigInteger(input, 2) teamTopics(i) = binary { i += 1; i - 1 } } val knownTopicsPerTeam = for { i <- 0 until players j <- i until players } yield { val commonTopics = teamTopics(i).or(teamTopics(j)) knownTopics(commonTopics, topics) } val maxTopics = knownTopicsPerTeam.toList.max val teams = knownTopicsPerTeam.count(_ == maxTopics) println(maxTopics) println(teams) private def knownTopics(binary: BigInteger, topics: Int): Int = { (0 until topics).map(binary.testBit).count(_ == true) } }
PaulNoth/hackerrank
practice/algorithms/implementation/acm_icpc_team/AcmIcpcTeam.scala
Scala
mit
893
package cromwell.engine import org.scalatest.{FlatSpec, Matchers} class EngineFunctionsSpec extends FlatSpec with Matchers { // TODO WOM: move to wdl4s ? // trait WdlStandardLibraryImpl extends ReadLikeFunctions with WriteFunctions with PureStandardLibraryFunctionsLike { // private def fail(name: String) = Failure(new NotImplementedError(s"$name() not implemented yet")) // // override def stdout(params: Seq[Try[WomValue]]): Try[WdlFile] = fail("stdout") // override def stderr(params: Seq[Try[WomValue]]): Try[WdlFile] = fail("stderr") // } // // def expectFailure(value: Try[WomValue]) = value match { // case Success(s) => fail(s"$s: Expected this function invocation to fail") // case Failure(_) => // expected // } // "EngineFunctions" should "all initially be undefined" in { // val stdFunctions = Seq( // "stdout", "stderr", "read_lines", "read_tsv", "read_map", "read_object", "read_objects", // "read_json", "read_int", "read_string", "read_float", "read_boolean", "write_lines", // "write_tsv", "write_map", "write_object", "write_objects", "write_json", "size", "sub" // ) // stdFunctions.foreach {func => // expectFailure(NoFunctions.getFunction(func)(Seq.empty[Try[WomValue]])) // } // } // // "sub" should "replace a string according to a pattern" in { // class TestEngineFn extends WdlStandardLibraryImpl { // override def glob(path: String, pattern: String): Seq[String] = throw new NotImplementedError // override def pathBuilders: List[PathBuilder] = List(DefaultPathBuilder) // override def writeDirectory: Path = throw new NotImplementedError // } // // val engineFn = new TestEngineFn // // val table = Table( // ("str", "pattern", "replace", "result"), // ("somefilename.bam", ".bam$", ".txt", "somefilename.txt"), // ("somefilename.bam", "\\\\.bam$", "", "somefilename"), // ("somefilename.bam.bam", ".bam$", ".txt", "somefilename.bam.txt"), // ("somefilenamewith.baminside.ext", ".bam$", ".txt", "somefilenamewith.baminside.ext"), // ("gs://some/gcs/path/to/my_bame_file.bam", "gs://.*/", "", "my_bame_file.bam"), // ("somefilename", "^some", "other", "otherfilename") // ) // // forAll(table) { (str, pattern, replace, result) => // val stringSubstitution: Try[WdlString] = engineFn.sub(List(Success(WdlString(str)), Success(WdlString(pattern)), Success(WdlString(replace)))) // stringSubstitution.isSuccess shouldBe true // stringSubstitution.get.valueString shouldBe result // // val fileSubstitution: Try[WdlString] = engineFn.sub(List(Success(WdlFile(str)), Success(WdlString(pattern)), Success(WdlString(replace)))) // fileSubstitution.isSuccess shouldBe true // fileSubstitution.get.valueString shouldBe result // } // // val nonCompliantValues = List( // Seq(Success(WdlFile("input")), Success(WdlInteger(1)), Success(WdlString("replaces"))), // Seq(Failure(new Exception), Success(WdlString("pattern")), Success(WdlString("replaces"))) // ) // // nonCompliantValues foreach { nonCompliantValue => // val sub: Try[WdlString] = engineFn.sub(nonCompliantValue) // sub.isFailure shouldBe true // val failure = sub.failed.get // failure.isInstanceOf[IllegalArgumentException] shouldBe true // failure.getMessage should include("Invalid parameters for engine function sub") // } // // val failedSub = engineFn.sub(nonCompliantValues.head :+ Success(WdlString("extra value"))) // failedSub.isFailure shouldBe true // val failure = failedSub.failed.get // failure.isInstanceOf[IllegalArgumentException] shouldBe true // failure.getMessage shouldBe "Invalid number of parameters for engine function sub: 4. sub takes exactly 3 parameters." // } }
ohsu-comp-bio/cromwell
engine/src/test/scala/cromwell/engine/EngineFunctionsSpec.scala
Scala
bsd-3-clause
3,793
/*********************************************************************** * Copyright (c) 2013-2022 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.bigtable import org.geotools.data._ import org.geotools.data.collection.ListFeatureCollection import org.geotools.util.factory.Hints import org.geotools.filter.text.ecql.ECQL import org.junit.runner.RunWith import org.locationtech.geomesa.bigtable.data.BigtableDataStoreFactory import org.locationtech.geomesa.features.ScalaSimpleFeature import org.locationtech.geomesa.hbase.data.HBaseDataStore import org.locationtech.geomesa.hbase.data.HBaseDataStoreParams._ import org.locationtech.geomesa.utils.collection.SelfClosingIterator import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner import scala.collection.JavaConversions._ @RunWith(classOf[JUnitRunner]) class BigTableIntegrationTest extends Specification { // note: make sure you update src/test/resources/hbase-site.xml to point to your bigtable instance sequential "HBaseDataStore" should { "work with points" >> { val typeName = "testpoints" val params = Map(BigtableDataStoreFactory.BigtableCatalogParam.getName -> "integration_test") lazy val ds = DataStoreFinder.getDataStore(params).asInstanceOf[HBaseDataStore] def createFeatures(sft: SimpleFeatureType) = (0 until 10).map { i => val sf = new ScalaSimpleFeature(sft, i.toString) sf.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE) sf.setAttribute(0, s"name $i") sf.setAttribute(1, s"2014-01-01T0$i:00:01.000Z") sf.setAttribute(2, s"POINT(4$i 5$i)") sf } "create schema" >> { skipped("integration") ds.getSchema(typeName) must beNull ds.createSchema(SimpleFeatureTypes.createType(typeName, "name:String,dtg:Date,*geom:Point:srid=4326")) val sft = ds.getSchema(typeName) println(SimpleFeatureTypes.encodeType(sft, includeUserData = true)) sft must not(beNull) } "insert" >> { skipped("integration") val sft = ds.getSchema(typeName) println(SimpleFeatureTypes.encodeType(sft, includeUserData = true)) sft must not(beNull) val features = createFeatures(sft) val fs = ds.getFeatureSource(typeName) val ids = fs.addFeatures(new ListFeatureCollection(sft, features)) ids.map(_.getID) must containTheSameElementsAs((0 until 10).map(_.toString)) } "query" >> { skipped("integration") val sft = ds.getSchema(typeName) println(SimpleFeatureTypes.encodeType(sft, includeUserData = true)) sft must not(beNull) val features = createFeatures(sft) testQuery(ds, typeName, "INCLUDE", null, features) testQuery(ds, typeName, "IN('0', '2')", null, Seq(features(0), features(2))) testQuery(ds, typeName, "bbox(geom,38,48,52,62) and dtg DURING 2014-01-01T00:00:00.000Z/2014-01-01T07:59:59.000Z", null, features.dropRight(2)) testQuery(ds, typeName, "bbox(geom,42,48,52,62)", null, features.drop(2)) } } "work with points" >> { val typeName = "testpolys" val params = Map(HBaseCatalogParam.getName -> "integration_test") lazy val ds = DataStoreFinder.getDataStore(params).asInstanceOf[HBaseDataStore] def createFeatures(sft: SimpleFeatureType) = (0 until 10).map { i => val sf = new ScalaSimpleFeature(sft, i.toString) sf.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE) sf.setAttribute(0, s"name $i") sf.setAttribute(1, s"2014-01-01T0$i:00:01.000Z") sf.setAttribute(2, s"POLYGON((-120 4$i, -120 50, -125 50, -125 4$i, -120 4$i))") sf } "create schema" >> { skipped("integration") ds.getSchema(typeName) must beNull ds.createSchema(SimpleFeatureTypes.createType(typeName, "name:String,dtg:Date,*geom:Polygon:srid=4326")) val sft = ds.getSchema(typeName) println(SimpleFeatureTypes.encodeType(sft, includeUserData = true)) sft must not(beNull) } "insert" >> { skipped("integration") val sft = ds.getSchema(typeName) println(SimpleFeatureTypes.encodeType(sft, includeUserData = true)) sft must not(beNull) val features = createFeatures(sft) val fs = ds.getFeatureSource(typeName) val ids = fs.addFeatures(new ListFeatureCollection(sft, features)) ids.map(_.getID) must containTheSameElementsAs((0 until 10).map(_.toString)) } "query" >> { skipped("integration") val sft = ds.getSchema(typeName) println(SimpleFeatureTypes.encodeType(sft, includeUserData = true)) sft must not(beNull) val features = createFeatures(sft) testQuery(ds, typeName, "INCLUDE", null, features) testQuery(ds, typeName, "IN('0', '2')", null, Seq(features(0), features(2))) testQuery(ds, typeName, "bbox(geom,-126,38,-119,52) and dtg DURING 2014-01-01T00:00:00.000Z/2014-01-01T07:59:59.000Z", null, features.dropRight(2)) testQuery(ds, typeName, "bbox(geom,-126,42,-119,45)", null, features.dropRight(4)) } } } def testQuery(ds: DataStore, typeName: String, filter: String, transforms: Array[String], results: Seq[SimpleFeature]) = { val fr = ds.getFeatureReader(new Query(typeName, ECQL.toFilter(filter), transforms), Transaction.AUTO_COMMIT) val features = SelfClosingIterator(fr).toList features must containTheSameElementsAs(results) } }
locationtech/geomesa
geomesa-bigtable/geomesa-bigtable-datastore/src/test/scala/org/locationtech/geomesa/bigtable/BigTableIntegrationTest.scala
Scala
apache-2.0
6,057
package io.fintrospect.formats import com.twitter.finagle.Service import com.twitter.finagle.http.Method.Get import com.twitter.finagle.http.path.Root import com.twitter.finagle.http.{Request, Response, Status} import com.twitter.io.Buf import com.twitter.util.Await.result import com.twitter.util.{Await, Future} import io.fintrospect.{RouteModule, RouteSpec} import io.fintrospect.parameters.{Body, BodySpec} import org.scalatest.{FunSpec, Matchers} case class StreetAddress(address: String) case class Letter(to: StreetAddress, from: StreetAddress, message: String) case class LetterOpt(to: StreetAddress, from: StreetAddress, message: Option[String]) abstract class AutoSpec[J](val f: Auto[J]) extends FunSpec with Matchers { val aLetter = Letter(StreetAddress("my house"), StreetAddress("your house"), "hi there") private def request = { val request = Request() request.content = toBuf(aLetter) request } def toBuf(l: Letter): Buf def transform(): (Letter => J) def fromBuf(s: Buf): Letter def bodySpec: BodySpec[Letter] describe("Auto filters") { describe("In") { val svc = f.In(Service.mk { in: Letter => { val r = Response() r.content = toBuf(in) Future(r) } })(Body.of(bodySpec, "")) it("takes the object from the request") { fromBuf(result(svc(request)).content) shouldBe aLetter } it("rejects illegal content with a BadRequest") { val request = Request() request.contentString = "invalid" val response = Await.result(svc(request)) response.status shouldBe Status.BadRequest response.contentString should include ("Failed to unmarshal body [body:Invalid]") } it("works when no body specified in route") { val mSvc = RouteModule(Root).withRoute(RouteSpec().at(Get) bindTo svc).toService fromBuf(result(mSvc(request)).content) shouldBe aLetter } } describe("Out") { it("takes the object from the request") { val svc = f.Out(Service.mk { in: Request => Future(aLetter) }, Status.Created)(transform()) val response = result(svc(Request())) response.status shouldBe Status.Created fromBuf(response.content) shouldBe aLetter } } describe("InOut") { it("returns Ok") { val svc = f.InOut(Service.mk { in: Letter => Future(in) }, Status.Created)(Body.of(bodySpec), transform()) val response = result(svc(request)) response.status shouldBe Status.Created fromBuf(response.content) shouldBe aLetter } } describe("InOptionalOut") { it("returns Ok when present") { val svc = f.InOptionalOut(Service.mk[Letter, Option[Letter]] { in => Future(Option(in)) })(Body.of(bodySpec), transform()) val response = result(svc(request)) response.status shouldBe Status.Ok fromBuf(response.content) shouldBe aLetter } it("returns NotFound when missing present") { val svc = f.InOptionalOut(Service.mk[Letter, Option[Letter]] { in => Future(None) })(Body.of(bodySpec), transform()) result(svc(request)).status shouldBe Status.NotFound } } describe("OptionalOut") { it("returns Ok when present") { val svc = f.OptionalOut(Service.mk[Request, Option[Letter]] { in => Future(Option(aLetter)) }, Status.Created)(transform()) val response = result(svc(Request())) response.status shouldBe Status.Created fromBuf(response.content) shouldBe aLetter } it("returns NotFound when missing present") { val svc = f.OptionalOut(Service.mk[Request, Option[Letter]] { _ => Future(None) }, Status.Created)(transform()) val response = result(svc(request)) response.status shouldBe Status.NotFound response.contentString should include ("No object available to unmarshal") } } } }
daviddenton/fintrospect
core/src/test/scala/io/fintrospect/formats/AutoSpec.scala
Scala
apache-2.0
3,929
package chandu0101.scalajs.offline.util import chandu0101.pouchdb.{ChangesEventEmitter, ChangesOptions, ReplicateOptions} import chandu0101.scalajs.offline.services.BaseService import japgolly.scalajs.react.extra.OnUnmount import org.scalajs.dom import scala.concurrent.ExecutionContext.Implicits.global import scala.scalajs.js import scala.scalajs.js.JSON /** * Created by chandrasekharkode on 2/26/15. */ abstract class PouchDBChangesListener[T](service: BaseService[T]) extends OnUnmount { var changes: ChangesEventEmitter = null var synChanges: ChangesEventEmitter = null /** * implement this method to update react component * @param data */ def updateNewData(data: List[T]) def startListening = { changes = service.store.changes(ChangesOptions.since("now").live(true).result).onChange((resp: js.Dynamic) => { getNewData }).onError((err: js.Dynamic) => println(s"Error occurred while performing db operations on ${service.store.name} : ${JSON.stringify(err)}")) getNewData // call explicitly for the first time if (service.retrySync) retrySync else if (service.sync) sync } def sync = { synChanges = service.store.sync(service.remoteStore, ReplicateOptions.live(true).result) .onError((err: js.Dynamic) => // oops network error println(s"Error occurred while syncing db's : $err}")) } /** * Useful when user have on/off internet connectivity (example : mobile internet) */ def retrySync: Unit = { var timeout = 10000 // 10secs var increment = 2 synChanges = service.store.sync(service.remoteStore, ReplicateOptions.live(true).result) .onChange((resp: js.Dynamic) => timeout = 10000) // reset retry timer when user came back on .onError((err: js.Dynamic) => dom.setTimeout(() => { timeout *= increment retrySync }, timeout) ) } def getNewData = { service.getAll.onSuccess { case (data: List[T]) => { updateNewData(data) } } } /** * clean up */ onUnmount(() => { if (changes != null) changes.cancel() if (synChanges != null) synChanges.cancel() }) }
chandu0101/scalajs-offline
src/main/scala/chandu0101/scalajs/offline/util/PouchDBChangesListener.scala
Scala
apache-2.0
2,159
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.optimizer import scala.collection.immutable.HashSet import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans.DslLogicalPlan import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.IntegralLiteralTestUtils._ import org.apache.spark.sql.catalyst.optimizer.UnwrapCastInBinaryComparison._ import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ class UnwrapCastInBinaryComparisonSuite extends PlanTest with ExpressionEvalHelper { object Optimize extends RuleExecutor[LogicalPlan] { val batches: List[Batch] = Batch("Unwrap casts in binary comparison", FixedPoint(10), NullPropagation, UnwrapCastInBinaryComparison) :: Nil } val testRelation: LocalRelation = LocalRelation('a.short, 'b.float, 'c.decimal(5, 2), 'd.boolean) val f: BoundReference = 'a.short.canBeNull.at(0) val f2: BoundReference = 'b.float.canBeNull.at(1) val f3: BoundReference = 'c.decimal(5, 2).canBeNull.at(2) val f4: BoundReference = 'd.boolean.canBeNull.at(3) test("unwrap casts when literal == max") { val v = Short.MaxValue assertEquivalent(castInt(f) > v.toInt, falseIfNotNull(f)) assertEquivalent(castInt(f) >= v.toInt, f === v) assertEquivalent(castInt(f) === v.toInt, f === v) assertEquivalent(castInt(f) <=> v.toInt, f <=> v) assertEquivalent(castInt(f) <= v.toInt, trueIfNotNull(f)) assertEquivalent(castInt(f) < v.toInt, f =!= v) val d = Float.NaN assertEquivalent(castDouble(f2) > d.toDouble, falseIfNotNull(f2)) assertEquivalent(castDouble(f2) >= d.toDouble, f2 === d) assertEquivalent(castDouble(f2) === d.toDouble, f2 === d) assertEquivalent(castDouble(f2) <=> d.toDouble, f2 <=> d) assertEquivalent(castDouble(f2) <= d.toDouble, trueIfNotNull(f2)) assertEquivalent(castDouble(f2) < d.toDouble, f2 =!= d) } test("unwrap casts when literal > max") { val v: Int = positiveInt assertEquivalent(castInt(f) > v, falseIfNotNull(f)) assertEquivalent(castInt(f) >= v, falseIfNotNull(f)) assertEquivalent(castInt(f) === v, falseIfNotNull(f)) assertEquivalent(castInt(f) <=> v, false) assertEquivalent(castInt(f) <= v, trueIfNotNull(f)) assertEquivalent(castInt(f) < v, trueIfNotNull(f)) } test("unwrap casts when literal == min") { val v = Short.MinValue assertEquivalent(castInt(f) > v.toInt, f =!= v) assertEquivalent(castInt(f) >= v.toInt, trueIfNotNull(f)) assertEquivalent(castInt(f) === v.toInt, f === v) assertEquivalent(castInt(f) <=> v.toInt, f <=> v) assertEquivalent(castInt(f) <= v.toInt, f === v) assertEquivalent(castInt(f) < v.toInt, falseIfNotNull(f)) val d = Float.NegativeInfinity assertEquivalent(castDouble(f2) > d.toDouble, f2 =!= d) assertEquivalent(castDouble(f2) >= d.toDouble, trueIfNotNull(f2)) assertEquivalent(castDouble(f2) === d.toDouble, f2 === d) assertEquivalent(castDouble(f2) <=> d.toDouble, f2 <=> d) assertEquivalent(castDouble(f2) <= d.toDouble, f2 === d) assertEquivalent(castDouble(f2) < d.toDouble, falseIfNotNull(f2)) // Double.NegativeInfinity == Float.NegativeInfinity val d2 = Double.NegativeInfinity assertEquivalent(castDouble(f2) > d2, f2 =!= d) assertEquivalent(castDouble(f2) >= d2, trueIfNotNull(f2)) assertEquivalent(castDouble(f2) === d2, f2 === d) assertEquivalent(castDouble(f2) <=> d2, f2 <=> d) assertEquivalent(castDouble(f2) <= d2, f2 === d) assertEquivalent(castDouble(f2) < d2, falseIfNotNull(f2)) } test("unwrap casts when literal < min") { val v: Int = negativeInt assertEquivalent(castInt(f) > v, trueIfNotNull(f)) assertEquivalent(castInt(f) >= v, trueIfNotNull(f)) assertEquivalent(castInt(f) === v, falseIfNotNull(f)) assertEquivalent(castInt(f) <=> v, false) assertEquivalent(castInt(f) <= v, falseIfNotNull(f)) assertEquivalent(castInt(f) < v, falseIfNotNull(f)) } test("unwrap casts when literal is within range (min, max) or fromType has no range") { Seq(300, 500, 32766, -6000, -32767).foreach(v => { assertEquivalent(castInt(f) > v, f > v.toShort) assertEquivalent(castInt(f) >= v, f >= v.toShort) assertEquivalent(castInt(f) === v, f === v.toShort) assertEquivalent(castInt(f) <=> v, f <=> v.toShort) assertEquivalent(castInt(f) <= v, f <= v.toShort) assertEquivalent(castInt(f) < v, f < v.toShort) }) Seq(3.14.toFloat.toDouble, -1000.0.toFloat.toDouble, 20.0.toFloat.toDouble, -2.414.toFloat.toDouble, Float.MinValue.toDouble, Float.MaxValue.toDouble, Float.PositiveInfinity.toDouble ).foreach(v => { assertEquivalent(castDouble(f2) > v, f2 > v.toFloat) assertEquivalent(castDouble(f2) >= v, f2 >= v.toFloat) assertEquivalent(castDouble(f2) === v, f2 === v.toFloat) assertEquivalent(castDouble(f2) <=> v, f2 <=> v.toFloat) assertEquivalent(castDouble(f2) <= v, f2 <= v.toFloat) assertEquivalent(castDouble(f2) < v, f2 < v.toFloat) }) Seq(decimal2(100.20), decimal2(-200.50)).foreach(v => { assertEquivalent(castDecimal2(f3) > v, f3 > decimal(v)) assertEquivalent(castDecimal2(f3) >= v, f3 >= decimal(v)) assertEquivalent(castDecimal2(f3) === v, f3 === decimal(v)) assertEquivalent(castDecimal2(f3) <=> v, f3 <=> decimal(v)) assertEquivalent(castDecimal2(f3) <= v, f3 <= decimal(v)) assertEquivalent(castDecimal2(f3) < v, f3 < decimal(v)) }) } test("unwrap cast when literal is within range (min, max) AND has round up or down") { // Cases for rounding down var doubleValue = 100.6 assertEquivalent(castDouble(f) > doubleValue, f > doubleValue.toShort) assertEquivalent(castDouble(f) >= doubleValue, f > doubleValue.toShort) assertEquivalent(castDouble(f) === doubleValue, falseIfNotNull(f)) assertEquivalent(castDouble(f) <=> doubleValue, false) assertEquivalent(castDouble(f) <= doubleValue, f <= doubleValue.toShort) assertEquivalent(castDouble(f) < doubleValue, f <= doubleValue.toShort) // Cases for rounding up: 3.14 will be rounded to 3.14000010... after casting to float doubleValue = 3.14 assertEquivalent(castDouble(f2) > doubleValue, f2 >= doubleValue.toFloat) assertEquivalent(castDouble(f2) >= doubleValue, f2 >= doubleValue.toFloat) assertEquivalent(castDouble(f2) === doubleValue, falseIfNotNull(f2)) assertEquivalent(castDouble(f2) <=> doubleValue, false) assertEquivalent(castDouble(f2) <= doubleValue, f2 < doubleValue.toFloat) assertEquivalent(castDouble(f2) < doubleValue, f2 < doubleValue.toFloat) // Another case: 400.5678 is rounded up to 400.57 val decimalValue = decimal2(400.5678) assertEquivalent(castDecimal2(f3) > decimalValue, f3 >= decimal(decimalValue)) assertEquivalent(castDecimal2(f3) >= decimalValue, f3 >= decimal(decimalValue)) assertEquivalent(castDecimal2(f3) === decimalValue, falseIfNotNull(f3)) assertEquivalent(castDecimal2(f3) <=> decimalValue, false) assertEquivalent(castDecimal2(f3) <= decimalValue, f3 < decimal(decimalValue)) assertEquivalent(castDecimal2(f3) < decimalValue, f3 < decimal(decimalValue)) } test("unwrap casts when cast is on rhs") { val v = Short.MaxValue assertEquivalent(Literal(v.toInt) < castInt(f), falseIfNotNull(f)) assertEquivalent(Literal(v.toInt) <= castInt(f), Literal(v) === f) assertEquivalent(Literal(v.toInt) === castInt(f), Literal(v) === f) assertEquivalent(Literal(v.toInt) <=> castInt(f), Literal(v) <=> f) assertEquivalent(Literal(v.toInt) >= castInt(f), trueIfNotNull(f)) assertEquivalent(Literal(v.toInt) > castInt(f), f =!= v) assertEquivalent(Literal(30) <= castInt(f), Literal(30.toShort, ShortType) <= f) } test("unwrap cast should skip when expression is non-deterministic or foldable") { Seq(positiveLong, negativeLong).foreach (v => { val e = Cast(Rand(0), LongType) <=> v assertEquivalent(e, e, evaluate = false) val e2 = Cast(Literal(30), LongType) >= v assertEquivalent(e2, e2, evaluate = false) }) } test("unwrap casts when literal is null") { val intLit = Literal.create(null, IntegerType) val nullLit = Literal.create(null, BooleanType) assertEquivalent(castInt(f) > intLit, nullLit) assertEquivalent(castInt(f) >= intLit, nullLit) assertEquivalent(castInt(f) === intLit, nullLit) assertEquivalent(castInt(f) <=> intLit, IsNull(castInt(f))) assertEquivalent(castInt(f) <= intLit, nullLit) assertEquivalent(castInt(f) < intLit, nullLit) } test("unwrap casts should skip if downcast failed") { Seq("true", "false").foreach { ansiEnabled => withSQLConf(SQLConf.ANSI_ENABLED.key -> ansiEnabled) { val decimalValue = decimal2(123456.1234) assertEquivalent(castDecimal2(f3) === decimalValue, castDecimal2(f3) === decimalValue) } } } test("unwrap cast should skip if cannot coerce type") { assertEquivalent(Cast(f, ByteType) > 100.toByte, Cast(f, ByteType) > 100.toByte) } test("test getRange()") { assert(Some((Byte.MinValue, Byte.MaxValue)) === getRange(ByteType)) assert(Some((Short.MinValue, Short.MaxValue)) === getRange(ShortType)) assert(Some((Int.MinValue, Int.MaxValue)) === getRange(IntegerType)) assert(Some((Long.MinValue, Long.MaxValue)) === getRange(LongType)) val floatRange = getRange(FloatType) assert(floatRange.isDefined) val (floatMin, floatMax) = floatRange.get assert(floatMin.isInstanceOf[Float]) assert(floatMin.asInstanceOf[Float].isNegInfinity) assert(floatMax.isInstanceOf[Float]) assert(floatMax.asInstanceOf[Float].isNaN) val doubleRange = getRange(DoubleType) assert(doubleRange.isDefined) val (doubleMin, doubleMax) = doubleRange.get assert(doubleMin.isInstanceOf[Double]) assert(doubleMin.asInstanceOf[Double].isNegInfinity) assert(doubleMax.isInstanceOf[Double]) assert(doubleMax.asInstanceOf[Double].isNaN) assert(getRange(DecimalType(5, 2)).isEmpty) } test("SPARK-35316: unwrap should support In/InSet predicate.") { val longLit = Literal.create(null, LongType) val intLit = Literal.create(null, IntegerType) val shortLit = Literal.create(null, ShortType) def checkInAndInSet(in: In, expected: Expression): Unit = { assertEquivalent(in, expected) val toInSet = (in: In) => InSet(in.value, HashSet() ++ in.list.map(_.eval())) val expectedInSet = expected match { case expectedIn: In => toInSet(expectedIn) case Or(falseIfNotNull: And, expectedIn: In) => Or(falseIfNotNull, toInSet(expectedIn)) } assertEquivalent(toInSet(in), expectedInSet) } checkInAndInSet( In(Cast(f, LongType), Seq(1.toLong, 2.toLong, 3.toLong)), f.in(1.toShort, 2.toShort, 3.toShort)) // in.list contains the value which out of `fromType` range checkInAndInSet( In(Cast(f, LongType), Seq(1.toLong, Int.MaxValue.toLong, Long.MaxValue)), Or(falseIfNotNull(f), f.in(1.toShort))) // in.list only contains the value which out of `fromType` range checkInAndInSet( In(Cast(f, LongType), Seq(Int.MaxValue.toLong, Long.MaxValue)), Or(falseIfNotNull(f), f.in())) // in.list is empty checkInAndInSet( In(Cast(f, IntegerType), Seq.empty), Cast(f, IntegerType).in()) // in.list contains null value checkInAndInSet( In(Cast(f, IntegerType), Seq(intLit)), In(Cast(f, IntegerType), Seq(intLit))) checkInAndInSet( In(Cast(f, IntegerType), Seq(intLit, intLit)), In(Cast(f, IntegerType), Seq(intLit, intLit))) checkInAndInSet( In(Cast(f, IntegerType), Seq(intLit, 1)), f.in(shortLit, 1.toShort)) checkInAndInSet( In(Cast(f, LongType), Seq(longLit, 1.toLong, Long.MaxValue)), Or(falseIfNotNull(f), f.in(shortLit, 1.toShort)) ) } test("SPARK-36130: unwrap In should skip when in.list contains an expression that " + "is not literal") { val add = Cast(f2, DoubleType) + 1.0d val doubleLit = Literal.create(null, DoubleType) assertEquivalent(In(Cast(f2, DoubleType), Seq(add)), In(Cast(f2, DoubleType), Seq(add))) assertEquivalent( In(Cast(f2, DoubleType), Seq(doubleLit, add)), In(Cast(f2, DoubleType), Seq(doubleLit, add))) assertEquivalent( In(Cast(f2, DoubleType), Seq(doubleLit, 1.0d, add)), In(Cast(f2, DoubleType), Seq(doubleLit, 1.0d, add))) assertEquivalent( In(Cast(f2, DoubleType), Seq(1.0d, add)), In(Cast(f2, DoubleType), Seq(1.0d, add))) assertEquivalent( In(Cast(f2, DoubleType), Seq(0.0d, 1.0d, add)), In(Cast(f2, DoubleType), Seq(0.0d, 1.0d, add))) } test("SPARK-36607: Support BooleanType in UnwrapCastInBinaryComparison") { assert(Some((false, true)) === getRange(BooleanType)) val n = -1 assertEquivalent(castInt(f4) > n, trueIfNotNull(f4)) assertEquivalent(castInt(f4) >= n, trueIfNotNull(f4)) assertEquivalent(castInt(f4) === n, falseIfNotNull(f4)) assertEquivalent(castInt(f4) <=> n, false) assertEquivalent(castInt(f4) <= n, falseIfNotNull(f4)) assertEquivalent(castInt(f4) < n, falseIfNotNull(f4)) val z = 0 assertEquivalent(castInt(f4) > z, f4 =!= false) assertEquivalent(castInt(f4) >= z, trueIfNotNull(f4)) assertEquivalent(castInt(f4) === z, f4 === false) assertEquivalent(castInt(f4) <=> z, f4 <=> false) assertEquivalent(castInt(f4) <= z, f4 === false) assertEquivalent(castInt(f4) < z, falseIfNotNull(f4)) val o = 1 assertEquivalent(castInt(f4) > o, falseIfNotNull(f4)) assertEquivalent(castInt(f4) >= o, f4 === true) assertEquivalent(castInt(f4) === o, f4 === true) assertEquivalent(castInt(f4) <=> o, f4 <=> true) assertEquivalent(castInt(f4) <= o, trueIfNotNull(f4)) assertEquivalent(castInt(f4) < o, f4 =!= true) val t = 2 assertEquivalent(castInt(f4) > t, falseIfNotNull(f4)) assertEquivalent(castInt(f4) >= t, falseIfNotNull(f4)) assertEquivalent(castInt(f4) === t, falseIfNotNull(f4)) assertEquivalent(castInt(f4) <=> t, false) assertEquivalent(castInt(f4) <= t, trueIfNotNull(f4)) assertEquivalent(castInt(f4) < t, trueIfNotNull(f4)) } private def castInt(e: Expression): Expression = Cast(e, IntegerType) private def castDouble(e: Expression): Expression = Cast(e, DoubleType) private def castDecimal2(e: Expression): Expression = Cast(e, DecimalType(10, 4)) private def decimal(v: Decimal): Decimal = Decimal(v.toJavaBigDecimal, 5, 2) private def decimal2(v: BigDecimal): Decimal = Decimal(v, 10, 4) private def assertEquivalent(e1: Expression, e2: Expression, evaluate: Boolean = true): Unit = { val plan = testRelation.where(e1).analyze val actual = Optimize.execute(plan) val expected = testRelation.where(e2).analyze comparePlans(actual, expected) if (evaluate) { Seq( (100.toShort, 3.14.toFloat, decimal2(100), true), (-300.toShort, 3.1415927.toFloat, decimal2(-3000.50), false), (null, Float.NaN, decimal2(12345.6789), null), (null, null, null, null), (Short.MaxValue, Float.PositiveInfinity, decimal2(Short.MaxValue), true), (Short.MinValue, Float.NegativeInfinity, decimal2(Short.MinValue), false), (0.toShort, Float.MaxValue, decimal2(0), null), (0.toShort, Float.MinValue, decimal2(0.01), null) ).foreach(v => { val row = create_row(v._1, v._2, v._3, v._4) checkEvaluation(e1, e2.eval(row), row) }) } } }
shaneknapp/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala
Scala
apache-2.0
16,626
/* * Copyright 2017 PayPal * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.squbs.metrics import java.lang.management.ManagementFactory import javax.management.ObjectName import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Keep, Sink} import akka.stream.testkit.scaladsl.TestSource import akka.testkit.TestKit import org.scalatest.OptionValues._ import org.scalatest.{AsyncFlatSpecLike, Matchers} import scala.concurrent.duration._ import scala.concurrent.{Await, Future} class MaterializationMetricsCollectorSpec extends TestKit(ActorSystem("MaterializationMetricsCollectorSpec")) with AsyncFlatSpecLike with Matchers { val awaitMax = 60 seconds implicit val materializer = ActorMaterializer() it should "update metrics when upstream finishes" in { val stream = TestSource.probe[Int] .via(MaterializationMetricsCollector[Int]("upstream-finishes")) .toMat(Sink.ignore)(Keep.both) val (probe1, future1) = stream.run() val (probe2, future2) = stream.run() jmxValue("upstream-finishes-active-count", "Count").value shouldBe 2 jmxValue("upstream-finishes-creation-count", "Count").value shouldBe 2 probe1.sendComplete() Await.ready(future1, awaitMax) // It should decrement the counter when stream fails jmxValue("upstream-finishes-active-count", "Count").value shouldBe 1 jmxValue("upstream-finishes-creation-count", "Count").value shouldBe 2 jmxValue("upstream-finishes-termination-count", "Count").value shouldBe 1 val (probe3, future3) = stream.run() // It should increment the counter with a new materialization jmxValue("upstream-finishes-active-count", "Count").value shouldBe 2 jmxValue("upstream-finishes-creation-count", "Count").value shouldBe 3 jmxValue("upstream-finishes-termination-count", "Count").value shouldBe 1 probe2.sendComplete() probe3.sendComplete() Future.sequence(future2 :: future3:: Nil) map { _ => jmxValue("upstream-finishes-active-count", "Count").value shouldBe 0 jmxValue("upstream-finishes-creation-count", "Count").value shouldBe 3 jmxValue("upstream-finishes-termination-count", "Count").value shouldBe 3 } } it should "update metrics when upstream fails" in { val stream = TestSource.probe[Int] .via(MaterializationMetricsCollector[Int]("upstream-fails")) .toMat(Sink.head[Int])(Keep.both) val (probe1, future1) = stream.run() val (probe2, future2) = stream.run() jmxValue("upstream-fails-active-count", "Count").value shouldBe 2 jmxValue("upstream-fails-creation-count", "Count").value shouldBe 2 probe1.sendError(new Exception("boom")) Await.ready(future1, awaitMax) // It should decrement the counter when stream fails jmxValue("upstream-fails-active-count", "Count").value shouldBe 1 jmxValue("upstream-fails-creation-count", "Count").value shouldBe 2 jmxValue("upstream-fails-termination-count", "Count").value shouldBe 1 val (probe3, future3) = stream.run() // It should increment the counter with a new materialization jmxValue("upstream-fails-active-count", "Count").value shouldBe 2 jmxValue("upstream-fails-creation-count", "Count").value shouldBe 3 jmxValue("upstream-fails-termination-count", "Count").value shouldBe 1 probe2.sendError(new Exception("boom")) probe3.sendError(new Exception("boom")) recoverToSucceededIf[Exception](Future.sequence(future2 :: future3:: Nil)) map { _ => jmxValue("upstream-fails-active-count", "Count").value shouldBe 0 jmxValue("upstream-fails-creation-count", "Count").value shouldBe 3 jmxValue("upstream-fails-termination-count", "Count").value shouldBe 3 } } it should "update metrics when downstream terminates" in { val stream = TestSource.probe[Int] .via(MaterializationMetricsCollector[Int]("downstream-finishes")) .map { elem => if(elem == 3) throw new Exception("boom") else elem } .toMat(Sink.ignore)(Keep.both) val (probe1, future1) = stream.run() val (probe2, future2) = stream.run() jmxValue("downstream-finishes-active-count", "Count").value shouldBe 2 jmxValue("downstream-finishes-creation-count", "Count").value shouldBe 2 probe1.sendNext(1) probe1.sendNext(2) probe1.sendNext(3) Await.ready(future1, awaitMax) // It should decrement the counter when downstream fails jmxValue("downstream-finishes-active-count", "Count").value shouldBe 1 jmxValue("downstream-finishes-creation-count", "Count").value shouldBe 2 jmxValue("downstream-finishes-termination-count", "Count").value shouldBe 1 val (probe3, future3) = stream.run() // It should increment the counter with a new materialization jmxValue("downstream-finishes-active-count", "Count").value shouldBe 2 jmxValue("downstream-finishes-creation-count", "Count").value shouldBe 3 jmxValue("downstream-finishes-termination-count", "Count").value shouldBe 1 probe2.sendNext(1) probe2.sendNext(2) probe2.sendNext(3) probe3.sendNext(1) probe3.sendNext(2) probe3.sendNext(3) recoverToSucceededIf[Exception](Future.sequence(future2 :: future3:: Nil)) map { _ => jmxValue("downstream-finishes-active-count", "Count").value shouldBe 0 jmxValue("downstream-finishes-creation-count", "Count").value shouldBe 3 jmxValue("downstream-finishes-termination-count", "Count").value shouldBe 3 } } def jmxValue(beanName: String, key: String) = { val oName = ObjectName.getInstance(s"${MetricsExtension(system).Domain}:name=${MetricsExtension(system).Domain}.$beanName") Option(ManagementFactory.getPlatformMBeanServer.getAttribute(oName, key)) } }
az-qbradley/squbs
squbs-ext/src/test/scala/org/squbs/metrics/MaterializationMetricsCollectorSpec.scala
Scala
apache-2.0
6,292
package test class ScalaClass { /* double-checking that we can still do this */ def hmm = JavaClass.STATIC_PROTECTED_FIELD }
lrytz/scala
test/files/neg/t6934/ScalaClass.scala
Scala
apache-2.0
130
/* * ____ ____ _____ ____ ___ ____ * | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R) * | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data * | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc. * |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved. * * This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version * 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See * the GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. * */ package com.precog.common package accounts import com.precog.common.Path import com.precog.common.client._ import com.precog.common.security._ import com.precog.util._ import akka.dispatch.{ ExecutionContext, Future, Promise } import blueeyes.bkka._ import blueeyes.core.data.DefaultBijections._ import blueeyes.core.data.ByteChunk import blueeyes.core.http._ import blueeyes.core.http.MimeTypes._ import blueeyes.core.http.HttpStatusCodes._ import blueeyes.core.service._ import blueeyes.core.service.engines.HttpClientXLightWeb import blueeyes.json._ import blueeyes.json.serialization.{ Extractor, Decomposer } import blueeyes.json.serialization.DefaultSerialization.{ DateTimeDecomposer => _, DateTimeExtractor => _, _ } import blueeyes.json.serialization.Extractor._ import org.apache.commons.codec.binary.Base64 import org.joda.time.DateTime import org.streum.configrity.Configuration import com.weiglewilczek.slf4s.Logging import scalaz._ import scalaz.NonEmptyList._ import scalaz.Validation._ import scalaz.syntax.bifunctor._ import scalaz.syntax.monad._ import scalaz.syntax.validation._ import scalaz.syntax.std.option._ object WebAccountFinder extends Logging { def apply(config: Configuration)(implicit executor: ExecutionContext): Validation[NonEmptyList[String], AccountFinder[Response]] = { val serviceConfig = config.detach("service") serviceConfig.get[String]("hardcoded_account") map { accountId => implicit val M = ResponseMonad(new FutureMonad(executor)) success(new StaticAccountFinder[Response](accountId, serviceConfig[String]("hardcoded_rootKey", ""), serviceConfig.get[String]("hardcoded_rootPath"))) } getOrElse { (serviceConfig.get[String]("protocol").toSuccess(nels("Configuration property service.protocol is required")) |@| serviceConfig.get[String]("host").toSuccess(nels("Configuration property service.host is required")) |@| serviceConfig.get[Int]("port").toSuccess(nels("Configuration property service.port is required")) |@| serviceConfig.get[String]("path").toSuccess(nels("Configuration property service.path is required")) |@| serviceConfig.get[String]("user").toSuccess(nels("Configuration property service.user is required")) |@| serviceConfig.get[String]("password").toSuccess(nels("Configuration property service.password is required"))) { (protocol, host, port, path, user, password) => logger.info("Creating new WebAccountFinder with properties %s://%s:%s/%s %s:%s".format(protocol, host, port.toString, path, user, password)) new WebAccountFinder(protocol, host, port, path, user, password) } } } } class WebAccountFinder(protocol: String, host: String, port: Int, path: String, user: String, password: String)(implicit executor: ExecutionContext) extends WebClient(protocol, host, port, path) with AccountFinder[Response] with Logging { import scalaz.syntax.monad._ import EitherT.{ left => leftT, right => rightT, _ } import \\/.{ left, right } import blueeyes.core.data.DefaultBijections._ import blueeyes.json.serialization.DefaultSerialization._ implicit val M: Monad[Future] = new FutureMonad(executor) def findAccountByAPIKey(apiKey: APIKey) : Response[Option[AccountId]] = { logger.debug("Finding account for API key " + apiKey + " with " + (protocol, host, port, path, user, password).toString) invoke { client => logger.info("Querying accounts service for API key %s".format(apiKey)) eitherT(client.query("apiKey", apiKey).get[JValue]("/accounts/") map { case HttpResponse(HttpStatus(OK, _), _, Some(jaccountId), _) => logger.info("Got response for apiKey " + apiKey) (((_:Extractor.Error).message) <-: jaccountId.validated[WrappedAccountId] :-> { wid => Some(wid.accountId) }).disjunction case HttpResponse(HttpStatus(OK, _), _, None, _) => logger.warn("No account found for apiKey: " + apiKey) right(None) case res => logger.error("Unexpected response from accounts service for findAccountByAPIKey: " + res) left("Unexpected response from accounts service; unable to proceed: " + res) } recoverWith { case ex => logger.error("findAccountByAPIKey for " + apiKey + "failed.", ex) Promise.successful(left("Client error accessing accounts service; unable to proceed: " + ex.getMessage)) }) } } def findAccountDetailsById(accountId: AccountId): Response[Option[AccountDetails]] = { logger.debug("Finding accoung for id: " + accountId) invoke { client => eitherT(client.get[JValue]("/accounts/" + accountId) map { case HttpResponse(HttpStatus(OK, _), _, Some(jaccount), _) => logger.info("Got response for AccountId " + accountId) (((_:Extractor.Error).message) <-: jaccount.validated[Option[AccountDetails]]).disjunction case res => logger.error("Unexpected response from accounts serviceon findAccountDetailsById: " + res) left("Unexpected response from accounts service; unable to proceed: " + res) } recoverWith { case ex => logger.error("findAccountById for " + accountId + "failed.", ex) Promise.successful(left("Client error accessing accounts service; unable to proceed: " + ex.getMessage)) }) } } def invoke[A](f: HttpClient[ByteChunk] => A): A = { val auth = HttpHeaders.Authorization("Basic "+new String(Base64.encodeBase64((user+":"+password).getBytes("UTF-8")), "UTF-8")) withJsonClient { client => f(client.header(auth)) } } }
precog/platform
common/src/main/scala/com/precog/common/accounts/WebAccountFinder.scala
Scala
agpl-3.0
6,721
package fpgatidbits.dma import Chisel._ import fpgatidbits.ocm._ import fpgatidbits.streams._ // a burst-oriented read order cache that outputs the entire burst content at // once. useful for filling cachelines. design is almost identical to // ReadOrderCacheBRAM, just with wide (instead of deep) BRAMs for the storage. class WideReadOrderCache(p: ReadOrderCacheParams) extends Module { val burstBits = p.mrp.dataWidth * p.maxBurst val burstBytes = burstBits / 8 val io = new Bundle { // interface towards in-order processing elements // note the difference from the regular ReadOrderCache req/rsp interface; // all metadata is stripped val reqOrdered = Decoupled(UInt(width = p.mrp.addrWidth)).flip val rspOrdered = Decoupled(UInt(width = burstBits)) // unordered interface towards out-of-order memory system val reqMem = Decoupled(new GenericMemoryRequest(p.mrp)) val rspMem = Decoupled(new GenericMemoryResponse(p.mrp)).flip } val beat = UInt(0, width = p.mrp.dataWidth) val fullBurst = UInt(width = burstBits) val rid = UInt(0, width = p.mrp.idWidth) val mreq = new GenericMemoryRequest(p.mrp) // build a clonetype for the wide memory rsps val modMRP = new MemReqParams(p.mrp.addrWidth, burstBits, p.mrp.idWidth, p.mrp.metaDataWidth, p.mrp.sameIDInOrder) val mrsp = new GenericMemoryResponse(modMRP) // queue with pool of available request IDs val freeReqID = Module(new ReqIDQueue( p.mrp.idWidth, p.outstandingReqs, 0)).io // queue with issued requests val busyReqs = Module(new FPGAQueue(mreq, p.outstandingReqs)).io // headRsps is used for handshaking-over-latency for reading rsps from BRAM // capacity = 1 (BRAM latency) + 2 (needed for full throughput) val headRsps = Module(new FPGAQueue(mrsp, 3)).io // issue new requests: sync freeReqID and incoming reqs val readyReqs = StreamJoin( inA = freeReqID.idOut, inB = io.reqOrdered, genO = mreq, join = {(freeID: UInt, r: UInt) => GenericMemoryRequest( p = p.mrp, addr = r, write = Bool(false), id = freeID, numBytes = UInt(burstBytes) )} ) // issued requests go to both mem req channel and busyReqs queue val reqIssueFork = Module(new StreamFork( genIn = mreq, genA = mreq, genB = mreq, forkA = {x: GenericMemoryRequest => x}, forkB = {x: GenericMemoryRequest => x} )).io readyReqs <> reqIssueFork.in reqIssueFork.outA <> io.reqMem reqIssueFork.outB <> busyReqs.enq io.reqMem.bits.channelID := UInt(p.chanIDBase) + reqIssueFork.outA.bits.channelID //========================================================================== val ctrBits = log2Up(p.maxBurst) val reqIDBits = log2Up(p.outstandingReqs) // since burst responses can be interleaved, each in-flight burst can have // a number of elements it has already received. we use the following BRAM // as a counter to keep track of the number of elements received for each // in-flight burst. we do a read-modify-write through this BRAM to do this. val rspCounters = Module(new DualPortBRAM(reqIDBits, ctrBits)).io val ctrRd = rspCounters.ports(0) val ctrWr = rspCounters.ports(1) // an issued request always means its storage space is ready, so we can always // accept memory responses. io.rspMem.ready := Bool(true) // subtract chanIDBase to get index of counter to read & use as read addr val ctrRdInd = io.rspMem.bits.channelID - UInt(p.chanIDBase) ctrRd.req.addr := ctrRdInd ctrRd.req.writeEn := Bool(false) val regCtrInd = Reg(next = ctrRdInd) val regCtrValid = Reg(next = io.rspMem.valid) val regCtrData = Reg(next = io.rspMem.bits.readData) val regCtrLast = Reg(next = io.rspMem.bits.isLast) // bypass logic to compensate for BRAM latency val regDoBypass = Reg(next = ctrWr.req.writeEn & (ctrRd.req.addr === ctrWr.req.addr)) val regNewVal = Reg(init = UInt(0, width = ctrBits)) val ctrOldVal = Mux(regDoBypass, regNewVal, ctrRd.rsp.readData) // use regCtrLast to clear counter at end of burst val ctrNewVal = Mux(regCtrLast, UInt(0), ctrOldVal + UInt(1)) regNewVal := ctrNewVal ctrWr.req.addr := regCtrInd ctrWr.req.writeEn := regCtrValid ctrWr.req.writeData := ctrNewVal /* TODO IMPROVE potentially long comb path, can hurt frequency -- can add registers prior to data store BRAM to improve */ // store received data in bank-writable BRAM, each bank is as wide as the // mem data bus val storage = Module(new DualPortMaskedBRAM( addrBits = log2Up(p.outstandingReqs), dataBits = burstBits, unit = p.mrp.dataWidth )).io val dataRd = storage.ports(0) val dataWr = storage.ports(1) dataRd.req.writeEn := Bool(false) // compute where the newly arrived data goes dataWr.req.addr := regCtrInd // store data when available dataWr.req.writeEn := regCtrValid // need to align write data into correct position // variable leftshift according to ctrOldVal dataWr.req.writeData := regCtrData << (ctrOldVal * UInt(p.mrp.dataWidth)) // compute a one-hot write mask with the correct word for(i <- 0 until p.maxBurst) { dataWr.req.writeMask(i) := (ctrOldVal === UInt(i)) } // bitfield to keep track of burst finished status val regBurstFinished = Reg(init = UInt(0, width = p.outstandingReqs)) val burstFinishedSet = UInt(width = p.outstandingReqs) burstFinishedSet := UInt(0, p.outstandingReqs) val burstFinishedClr = UInt(width = p.outstandingReqs) burstFinishedClr := UInt(0, p.outstandingReqs) regBurstFinished := (regBurstFinished & ~burstFinishedClr) | burstFinishedSet // set finished flag on last beat received when(regCtrValid & regCtrLast) { burstFinishedSet := UIntToOH(regCtrInd, p.outstandingReqs) } // ========================================================================= // pop response when appropriate val headReq = busyReqs.deq.bits val headReqID = headReq.channelID val headReqValid = busyReqs.deq.valid val headReqBurstFinished = regBurstFinished(headReqID) // handshaking-over-latency to read out results val canPopRsp = headRsps.count < UInt(2) val isRspAvailable = headReqValid & headReqBurstFinished val doPopRsp = canPopRsp & isRspAvailable dataRd.req.addr := headReqID headRsps.enq.valid := Reg(next = doPopRsp) headRsps.enq.bits.readData := dataRd.rsp.readData headRsps.enq.bits.channelID := Reg(next = headReqID) // internal ID freeReqID.idIn.bits := headReqID busyReqs.deq.ready := Bool(false) freeReqID.idIn.valid := Bool(false) when(doPopRsp) { // pop from busyReqs, recycle the ID and reset the counter burstFinishedClr := UIntToOH(headReqID, p.outstandingReqs) freeReqID.idIn.valid := Bool(true) busyReqs.deq.ready := Bool(true) } headRsps.deq <> io.rspOrdered io.rspOrdered.bits := headRsps.deq.bits.readData // ========================================================================= // debug //StreamMonitor(io.reqOrdered, Bool(true), "reqOrdered", true) //StreamMonitor(io.rspOrdered, Bool(true), "rspOrdered", true) //PrintableBundleStreamMonitor(io.reqMem, Bool(true), "memRdReq", true) //PrintableBundleStreamMonitor(io.rspMem, Bool(true), "memRdRsp", true) } // an alternative to WideReadOrderCache: upsize an incoming burst using a // shift register (StreamUpsizer) class BurstUpsizer(mIn: MemReqParams, wOut: Int) extends Module { val mOut = new MemReqParams( mIn.addrWidth, wOut, mIn.dataWidth, mIn.metaDataWidth, mIn.sameIDInOrder ) val io = new Bundle { val in = Decoupled(new GenericMemoryResponse(mIn)).flip val out = Decoupled(new GenericMemoryResponse(mOut)) } val wIn = mIn.dataWidth if(wOut % wIn != 0) throw new Exception("Cannot upsize from unaligned size") // copy all fields by default io.out.bits := io.in.bits // upsize the read data if needed var upsized = if(wOut > mIn.dataWidth)StreamUpsizer(ReadRespFilter(io.in), wOut) else ReadRespFilter(io.in) // use the upsized read data stream to drive output readData and handshake io.out.valid := upsized.valid upsized.ready := io.out.ready io.out.bits.readData := upsized.bits }
maltanar/fpga-tidbits
src/main/scala/fpgatidbits/dma/WideReadOrderCache.scala
Scala
bsd-2-clause
8,102
package se.lu.nateko.cp.meta.services.sparql.magic import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt import org.eclipse.rdf4j.model.Statement import org.eclipse.rdf4j.sail.Sail import akka.actor.Scheduler import se.lu.nateko.cp.meta.instanceserver.RdfUpdate import se.lu.nateko.cp.meta.utils.async.throttle import org.eclipse.rdf4j.sail.SailConnectionListener import org.eclipse.rdf4j.sail.memory.MemoryStore import akka.event.NoLogging import akka.event.LoggingAdapter trait IndexProvider extends SailConnectionListener{ def index: CpIndex } class IndexHandler(fromSail: Sail, scheduler: Scheduler, log: LoggingAdapter)(implicit ctxt: ExecutionContext) extends IndexProvider { val index = new CpIndex(fromSail)(log) index.flush() private val flushIndex: () => Unit = throttle(() => index.flush(), 1.second, scheduler) def statementAdded(s: Statement): Unit = { index.put(RdfUpdate(s, true)) flushIndex() } def statementRemoved(s: Statement): Unit = { index.put(RdfUpdate(s, false)) flushIndex() } } class DummyIndexProvider extends IndexProvider{ val index = { val sail = new MemoryStore sail.initialize() new CpIndex(sail)(NoLogging) } def statementAdded(s: Statement): Unit = {} def statementRemoved(s: Statement): Unit = {} }
ICOS-Carbon-Portal/meta
src/main/scala/se/lu/nateko/cp/meta/services/sparql/magic/IndexHandler.scala
Scala
gpl-3.0
1,303
/* * Artificial Intelligence for Humans * Volume 2: Nature Inspired Algorithms * Java Version * http://www.aifh.org * http://www.jeffheaton.com * * Code repository: * https://github.com/jeffheaton/aifh * * Copyright 2014 by Jeff Heaton * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package com.heatonresearch.aifh.genetic.species import com.heatonresearch.aifh.AIFHError import com.heatonresearch.aifh.evolutionary.genome.Genome import com.heatonresearch.aifh.evolutionary.species.ThresholdSpeciation import com.heatonresearch.aifh.genetic.genome.DoubleArrayGenome import com.heatonresearch.aifh.genetic.genome.IntegerArrayGenome class ArraySpeciation extends ThresholdSpeciation { def getCompatibilityScore(genome1: Genome, genome2: Genome): Double = { genome1 match { case _: DoubleArrayGenome => scoreDouble(genome1, genome2) case _: IntegerArrayGenome => scoreInt(genome1, genome2) case _ => throw new AIFHError("This speciation does not support: " + genome1.getClass.getName) } } private def scoreInt(genome1: Genome, genome2: Genome): Double = { val intGenome1: IntegerArrayGenome = genome1.asInstanceOf[IntegerArrayGenome] val intGenome2: IntegerArrayGenome = genome2.asInstanceOf[IntegerArrayGenome] var sum: Double = 0 for(i <- 0 until intGenome1.size) { val diff: Double = intGenome1.getData(i) - intGenome2.getData(i) sum += diff * diff } Math.sqrt(sum) } private def scoreDouble(genome1: Genome, genome2: Genome): Double = { val doubleGenome1: DoubleArrayGenome = genome1.asInstanceOf[DoubleArrayGenome] val doubleGenome2: DoubleArrayGenome = genome2.asInstanceOf[DoubleArrayGenome] var sum: Double = 0 for(i <- 0 until doubleGenome1.size) { val diff: Double = doubleGenome1.getData(i) - doubleGenome2.getData(i) sum += diff * diff } Math.sqrt(sum) } }
PeterLauris/aifh
vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/genetic/species/ArraySpeciation.scala
Scala
apache-2.0
2,564
package lower_tir import exceptions.ICE import byteR._ import tir._ object LowerStoreIdent { /* This class only deals with 'normal' identifiers. All special cases are * expected to be handled in LowerExp. * * The expception is if the frontend has been changed to handle 'op'. * * In that case, this should be changed to generate method refs * for the appropriate functions. * * This is for generating sequences of instructions to store the top * of stack into an identifier. */ def apply(exp: TIdent, env: TTypeEnv): List[JVMInstruction] = exp match { case ident: TNamedIdent => ident match { case TIdentVar(_, _) => throw new ICE("""Unexpected TIdent var during |lower_tir: %s""".stripMargin.format(ident)) case TMutableIdent(_, _) => throw new ICE("""Unexpected TMutableIdent |var during lower_tir: %s""".stripMargin.format(ident)) case TIdentLongVar(names, _) => throw new ICE("""Unexpected store to a long |ident""".stripMargin) case TInternalIdentVar(_) => throw new ICE("Can't store to an internal ident") // Add 1 since 0 is reserved for the self reference. case TNumberedIdentVar(funName, number) => List(JVMLocalAStore(number + 1)) case TArgumentNode(funName, number) => throw new ICE("""Store to a TArgument node. There is no particular |reason that this has to be disallowed. However, it should be done |with careful assesment on the performance impact of accessing |heap variables repeatedly. See the implmentation in LowerLoadIdent |to implement this""".stripMargin) case TTopLevelIdent(name, identClass) => { // Since we are storing to the indentifier, ensure that it is something // that we can put directly into a variable assert(identClass.isRegisterClass) List(JVMPutStaticField(JVMMainClassRef(), LowerName(name), LowerType(env.getOrFail(ident), env))) } } case other => throw new ICE("Cannot store to identifier " + other.prettyPrint) } }
j-c-w/mlc
src/main/scala/lower_tir/LowerStoreIdent.scala
Scala
gpl-3.0
2,124
package ristretto.main object FreshId { var next = 0 def freshId(prefix: String) = { next = next + 1 s"$prefix$next" } }
usi-pl/ristretto
src/main/scala/ristretto/main/FreshId.scala
Scala
bsd-3-clause
136
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.flaminem.flamy.parsing.model import com.flaminem.flamy.model.Table import com.flaminem.flamy.model.exceptions.{FlamyException, UnexpectedBehaviorException} import com.flaminem.flamy.utils.logging.Logging /** * Created by fpin on 9/12/16. */ class TableDependencyChecker(that: TableDependency) extends Logging { /** * Check that the number of columns correspond between the TableDependency and the table definition. * * @param tableDef */ def checkColumnAndPartitions(tableDef: Table): Unit = { /* We need to force the evaluation of lazy val hasDynamicPartition now, before the partitions are modified */ val hasDynamicPartitions = that.hasDynamicPartitions def tdColNum: Int = that.columns.size val tdPartNum: Int = that.partitions.size val tableDefPartNum: Int = tableDef.partitions.size val tableDefColNum: Int = tableDef.columns.size logger.debug( s""" |checkColumnAndPartitions: ${that.fullName} |tdColNum: $tdColNum |tdPartNum: $tdPartNum |tableDefPartNum: $tableDefPartNum |tableDefColNum: $tableDefColNum """.stripMargin ) if (tdPartNum != tableDefPartNum && tdColNum + tdPartNum > 0) { throw new FlamyException(s"Table ${that.fullName} is defined with $tableDefPartNum partitions but $tdPartNum are inserted.") } if (hasDynamicPartitions && !that.hasExternalTableDeps && tdColNum > 0) { if(tdColNum == tableDefColNum + tableDefPartNum) { propagatePartitionColumnValues() } else { throw new FlamyException( s"Table ${that.fullName} has $tableDefColNum columns and $tableDefPartNum dynamic partitions, " + s"but the number of inserted columns is $tdColNum.\\n" + s"When using dynamic partitioning, it is expected to be equal to the number of columns plus the number of partitions (${tableDefColNum + tableDefPartNum})." ) } } if (tdColNum != tableDefColNum && !that.hasExternalTableDeps && tdColNum > 0) { throw new FlamyException(s"Table ${that.fullName} is defined with $tableDefColNum columns but $tdColNum are inserted.") } } /** * When dynamic partitioning is used, * we check if the columns corresponding to the partition have a value, * and if they have, we add it to the partition. */ private def propagatePartitionColumnValues(): Unit = { val partNum = that.partitions.size val cols = that.columns.takeRight(partNum) val newPartitions = that.partitions.zip(cols).map{ case (part, col) if col.value.isDefined => if(part.value.isDefined && part.value != col.value) { throw new UnexpectedBehaviorException() } else{ part.copy(value = col.value) } case (part, _) => part } that.columns = that.columns.dropRight(partNum) that.partitions = newPartitions } override def toString: String = { that.toString } }
flaminem/flamy
src/main/scala/com/flaminem/flamy/parsing/model/TableDependencyChecker.scala
Scala
apache-2.0
3,579
/* * Copyright 2020 Precog Data * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.api.datasource import quasar.Condition import scala.Exception import scalaz.{\\/, ISet} /** @tparam F effects * @tparam G multple results * @tparam I identity * @tparam C configuration */ trait Datasources[F[_], G[_], I, C] { import DatasourceError._ /** Adds the datasource described by the given `DatasourceRef` to the * set of datasources, returning its identifier or an error if it could * not be added. */ def addDatasource(ref: DatasourceRef[C]): F[CreateError[C] \\/ I] /** Metadata for all datasources. */ def allDatasourceMetadata: F[G[(I, DatasourceMeta)]] /** Returns the reference to the specified datasource, or an error if * it doesn't exist. */ def datasourceRef(datasourceId: I): F[ExistentialError[I] \\/ DatasourceRef[C]] /** Returns the status of the specified datasource or an error if it doesn't * exist. */ def datasourceStatus(datasourceId: I): F[ExistentialError[I] \\/ Condition[Exception]] /** Removes the specified datasource, making its resources unavailable. */ def removeDatasource(datasourceId: I): F[Condition[ExistentialError[I]]] /** Replaces the reference to the specified datasource. */ def replaceDatasource(datasourceId: I, ref: DatasourceRef[C]) : F[Condition[DatasourceError[I, C]]] /** Replaces the reference to the specified datasource, applying the patch * to the existing configuration. */ def reconfigureDatasource(datasourceId: I, patch: C) : F[Condition[DatasourceError[I, C]]] /** Renames the reference to the specified datasource. */ def renameDatasource(datasourceId: I, name: DatasourceName) : F[Condition[DatasourceError[I, C]]] /** creates temporary cop of the datasource specified by id */ def copyDatasource(datasourceId: I, modifyName: DatasourceName => DatasourceName): F[DatasourceError[I, C] \\/ I] /** The set of supported datasource types. */ def supportedDatasourceTypes: F[ISet[DatasourceType]] }
quasar-analytics/quasar
api/src/main/scala/quasar/api/datasource/Datasources.scala
Scala
apache-2.0
2,592
package controllers import javax.inject.Inject import dao.TransactionDAO import models._ import pdi.jwt.JwtSession._ import play.api.libs.functional.syntax._ import play.api.libs.json._ import play.api.mvc._ import utils.Const import scala.concurrent.{ExecutionContext, Future} class TransactionController @Inject()(transactionDAO: TransactionDAO)(implicit executionContext: ExecutionContext) extends Controller with Secured { implicit val transactionBudgetGETDTOWrites: Writes[TransactionBudgetGETDTO] = ( (JsPath \\ "id").write[Int] and (JsPath \\ "name").write[String] ) (unlift(TransactionBudgetGETDTO.unapply)) implicit val transactionPOSTDTOReads: Reads[TransactionPOSTDTO] = ( (JsPath \\ "name").read[String](notEqual(Const.errorMessageEmptyStringJSON, "")) and (JsPath \\ "date").readNullable[DateDTO] and (JsPath \\ "budgetId").read[Int] and (JsPath \\ "amount").read[Double] ) (TransactionPOSTDTO.apply _) implicit val transactionGETDTOWrites: Writes[TransactionGETDTO] = ( (JsPath \\ "name").write[String] and (JsPath \\ "date").write[DateDTO] and (JsPath \\ "budget").write[TransactionBudgetGETDTO] and (JsPath \\ "amount").write[Double] ) (unlift(TransactionGETDTO.unapply)) implicit val transactionAllGETDTOWrites: Writes[TransactionAllGETDTO] = ( (JsPath \\ "id").write[Int] and (JsPath \\ "name").write[String] and (JsPath \\ "date").write[DateDTO] and (JsPath \\ "budget").write[TransactionBudgetGETDTO] and (JsPath \\ "amount").write[Double] ) (unlift(TransactionAllGETDTO.unapply)) implicit val transactionPATCHDTOReads: Reads[TransactionPATCHDTO] = ( (JsPath \\ "name").readNullable[String] and (JsPath \\ "date").readNullable[DateDTO] and (JsPath \\ "budgetId").readNullable[Int] and (JsPath \\ "amount").readNullable[Double] ) (TransactionPATCHDTO.apply _) def create(): Action[JsValue] = Authenticated.async(BodyParsers.parse.json) { implicit request => val result = request.body.validate[TransactionPOSTDTO] result.fold( errors => Future.successful { BadRequest(Json.obj("status" -> "ERROR", "message" -> JsError.toJson(errors))) } , transaction => { // we look for the user email in the JWT transactionDAO.insert(request.jwtSession.getAs[String](Const.ValueStoredJWT).get, transaction).map { _ => Created(Json.obj("status" -> "OK", "message" -> "transaction '%s' created".format(transaction.name))) }.recover { // case in not found the specified budgetId value case _: NoSuchElementException => NotFound(Json.obj("status" -> "ERROR", "message" -> "budget with id '%s' not found".format(transaction.budgetId))) // case in problem with insertion case e: Exception => BadRequest(Json.obj("status" -> "ERROR", "message" -> e.getMessage)) } } ) } def readAll: Action[JsValue] = Authenticated.async(BodyParsers.parse.json) { implicit request => val result = request.body.validate[FromToDatesDTO] result.fold( errors => Future.successful { BadRequest(Json.obj("status" -> "ERROR", "message" -> JsError.toJson(errors))) }, dates => { // we look for the user email in the JWT transactionDAO.findAll(request.jwtSession.getAs[String](Const.ValueStoredJWT).get, dates).map { transactions => Ok(Json.toJson(transactions)) } } ) } def read(id: Int): Action[AnyContent] = Authenticated.async { implicit request => // we look for the user email in the JWT transactionDAO.find(request.jwtSession.getAs[String](Const.ValueStoredJWT).get, id).map { transaction => Ok(Json.toJson(transaction)) }.recover { // case in not found the specified transaction with its id (or the transaction doesn't belong to this user) case _: NoSuchElementException => NotFound(Json.obj("status" -> "ERROR", "message" -> "transaction with id '%s' not found".format(id))) } } def update(id: Int): Action[JsValue] = Authenticated.async(BodyParsers.parse.json) { implicit request => val result = request.body.validate[TransactionPATCHDTO] result.fold( errors => Future.successful { BadRequest(Json.obj("status" -> "ERROR", "message" -> JsError.toJson(errors))) }, transaction => { transactionDAO.update(request.jwtSession.getAs[String](Const.ValueStoredJWT).get, id, transaction).map { _ => Ok(Json.obj("status" -> "OK", "message" -> "transaction updated")) }.recover { // case in not found the specified transaction with its id (or the transaction doesn't belong to this user) case _: NoSuchElementException => NotFound(Json.obj("status" -> "ERROR", "message" -> "transaction with id '%s' not found".format(id))) // case in problem with the update of the new budget case e: Exception => BadRequest(Json.obj("status" -> "ERROR", "message" -> e.getMessage)) } } ) } def delete(id: Int): Action[AnyContent] = Authenticated.async { implicit request => // we look for the user email in the JWT transactionDAO.delete(request.jwtSession.getAs[String](Const.ValueStoredJWT).get, id).map { _ => Ok(Json.obj("status" -> "OK", "message" -> "transaction deleted")) }.recover { // case in not found the specified transaction with its id (or the transaction doesn't belong to this user) case _: NoSuchElementException => NotFound(Json.obj("status" -> "ERROR", "message" -> "transaction with id '%s' not found".format(id))) } } }
MathieuUrstein/HEIG.SCALA.Projet
server/app/controllers/TransactionController.scala
Scala
apache-2.0
5,686
package com.datascience.education.common.lecture5 trait Stream[+A] { import Stream._ def foldRight[B](z: => B)(f: (A, => B) => B): B = // The arrow `=>` in front of the argument type `B` means that the function `f` takes its second argument by name and may choose not to evaluate it. this match { case Cons(h,t) => f(h(), t().foldRight(z)(f)) // If `f` doesn't evaluate its second argument, the recursion never occurs. case _ => z } def append[B>:A](s: => Stream[B]): Stream[B] = foldRight(s)((h,t) => cons(h,t)) def flatMap[B](f: A => Stream[B]): Stream[B] = { def g(a: A, sb: => Stream[B]) = f(a).append(sb) foldRight(empty[B])(g) } def map[B](f: A => B): Stream[B] = { def g(a: A, sb: => Stream[B]): Stream[B] = cons(f(a), sb) foldRight(empty[B])(g) } def tail: Stream[A] = this match { case cons(h, t) => t case empty => empty } def zip[B](streamB: Stream[B]): Stream[(A, B)] = { val initialState: (Stream[A], Stream[B]) = (this, streamB) def f(s: (Stream[A], Stream[B])): Option[((A, B), (Stream[A], Stream[B]))] = s match { case (cons(a, tailA), cons(b, tailB)) => Some(((a, b), (tailA, tailB))) case (empty, _) => None case (_, empty) => None } unfold(initialState)(f): Stream[(A, B)] } def zipAll[B](s2: Stream[B]): Stream[(Option[A], Option[B])] = { val s = (this, s2) def f(tuple: (Stream[A], Stream[B])): Option[((Option[A], Option[B]), (Stream[A], Stream[B]))] = tuple match { case (cons(a, ta), cons(b, tb)) => Some((Some(a), Some(b)), (ta, tb)) case (cons(a, ta), empty) => Some((Some(a), None), (ta, empty)) case (empty, cons(b, tb)) => Some((None, Some(b)), (empty, tb)) case _ => None } unfold(s)(f) } def zipWith[B,C](s2: Stream[B])(f: (A,B) => C): Stream[C] = unfold((this, s2)) { case (Cons(h1,t1), Cons(h2,t2)) => Some((f(h1(), h2()), (t1(), t2()))) case _ => None } def merge[B >: A](s2: Stream[B]): Stream[B] = { val zipped: Stream[(Option[A], Option[B])] = this.zipAll(s2) zipped.flatMap { (tuple: (Option[A], Option[B])) => //println("flatten this tuple: "+tuple) tuple match { case (Some(a: A), Some(b: B)) => cons(a, unit(b)) case (Some(a: A), None) => unit(a) case (None, Some(b: B)) => unit(b) case (None, None) => empty } } } def enumerate: Stream[(Int, A)] = from(0).zip(this) def exists(p: A => Boolean): Boolean = foldRight(false)((a, b) => p(a) || b) def find(p: A => Boolean): Option[A] = { def f(a: A, op: => Option[A]) = if(p(a)) Some(a) else op foldRight(None: Option[A])(f) } def filter(f: A => Boolean): Stream[A] = foldRight(empty[A])((h,t) => if (f(h)) cons(h, t) else t) def printRecursively(upTo: Int): Unit = this match { case cons(h, lazyTail) if upTo > 1 => println(h) lazyTail.printRecursively(upTo - 1) case cons(h, lazyTail) if upTo <= 1 => println(h) case Empty => println("reached end of stream") } def take(n: Int): Stream[A] = this match { case Cons(h, t) if n > 1 => cons(h(), t().take(n - 1)) case Cons(h, _) if n == 1 => cons(h(), empty) case _ => empty } def takeRecursive(n: Int): Stream[A] = this match { case cons(h, t) if n > 0 => cons(h, t.take(n - 1)) case _ if n == 0 => Stream.empty case Empty => Stream.empty } def takeViaUnfold(n: Int): Stream[A] = unfold((this,n)) { case (Cons(h,t), 1) => Some((h(), (empty, 0))) case (Cons(h,t), n) if n > 1 => Some((h(), (t(), n-1))) case _ => None } def print(n: Int): Unit = { def f(a: A, remaining: => Int): Int = { Predef.print(a + " ") remaining - 1 } this.take(n).foldRight(n)(f) println() } /* Not possible to implement print with `unfold` as it would give Stream[Unit] */ def headOption: Option[A] = { def f(a: A, op: => Option[A]) = Some(a) this.foldRight(None: Option[A])(f) } /* from FP in Scala answers The function can't be implemented using `unfold`, since `unfold` generates elements of the `Stream` from left to right. It can be implemented using `foldRight` though. The implementation is just a `foldRight` that keeps the accumulated value and the stream of intermediate results, which we `cons` onto during each iteration. When writing folds, it's common to have more state in the fold than is needed to compute the result. Here, we simply extract the accumulated list once finished. */ def scanRight[B](z: B)(f: (A, => B) => B): Stream[B] = foldRight((z, Stream.unit(z)))((a, p0) => { // p0 is passed by-name and used in by-name args in f and cons. So use lazy val to ensure only one evaluation... lazy val p1 = p0 val b2 = f(a, p1._1) (b2, cons(b2, p1._2)) })._2 def toListFinite(n: Int): List[A] = { def f(a: A, la: => List[A]) = a::la this.take(n).foldRight(List[A]())(f) } } case object Empty extends Stream[Nothing] case class Cons[+C](h: () => C, t: () => Stream[C]) extends Stream[C] object Stream { def empty[C]: Stream[C] = Empty object cons { def apply[C](hd: => C, tl: => Stream[C]): Stream[C] = { // println("cons apply") lazy val head = hd lazy val tail = tl Cons(() => head, () => tail) } def unapply[C](cs: Cons[C]): Option[(C, Stream[C])] = Some((cs.h(), cs.t())) } def unit[A](a: => A): Stream[A] = cons(a, empty) def apply[A](as: A*): Stream[A] = if (as.isEmpty) empty else cons(as.head, apply(as.tail: _*)) def listToStream[A](la: List[A]): Stream[A] = unfold(la){(listA: List[A]) => listA match { case h::t => Some((h, t)) case Nil => None } } def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] = f(z) match { case Some((h,s)) => cons(h, unfold(s)(f)) case None => empty } def from(i: Int): Stream[Int] = cons(i, from(i + 1)) } object StreamExamples extends App { import Stream._ def countFrom(n: Int): Stream[Int] = unfold(n)((n0: Int) => Some(n0, n0+1)) def countFromTo(lowerInclusive: Int, upperExclusive: Int): Stream[Int] = unfold(lowerInclusive){(n0: Int) => if (n0 < upperExclusive) Some(n0, n0+1) else None } println("count from 5, print 6 elements") countFrom(5).print(6) println("count from 5 until 8, print 6 elements") countFromTo(5,8).print(6) println("------------------------------") def sawtooth(upperBoundExclusive: Int): Stream[Int] = Stream.unfold(0){(i: Int) => Some((i, (i + 1) % upperBoundExclusive))} val sawtooth7: Stream[Int] = sawtooth(7) sawtooth7.print(14) println("find 4 in sawtooth function") println(sawtooth7.find((i: Int) => i==4)) println("------------------------------") println("foldRight examples") val ints = from(0) def limitInts(i: Int, u: => Unit): Unit = if(i<=20) { println(i) u } ints.foldRight(())(limitInts) println("------------------------------") println("Append one infinite streams to another infinite stream") val sawtooth12To19: Stream[Int] = sawtooth7.map { i => i+12 } val appended = sawtooth7.append(sawtooth12To19) appended.print(32) val appended2 = sawtooth7.take(15).append(sawtooth12To19) println("first Stream limited by `take`") appended2.print(32) println("------------------------------") println("Merging/interspersing Streams") /* Broken when `take` not used. Endless cycle of appends */ val merged: Stream[Int] = sawtooth7.take(64).merge(sawtooth12To19.take(15)) merged.print(64) println("------------------------------") println("head option") val emptyOp = Stream.empty[Int].headOption println(emptyOp) val sawtoothHeadop = sawtooth12To19.headOption println(sawtoothHeadop) println("------------------------------") println("Fibonacci") def fibonacciHelper(a: Int, b: Int): Stream[Int] = Stream.cons(a, fibonacciHelper(b, a+b)) val fibonacci: Stream[Int] = fibonacciHelper(0, 1) fibonacci.print(22) println("---------------------------------") val zeroes: Stream[Int] = Stream.cons(0, zeroes) val oneZero = Stream.cons(1, zeroes) oneZero.take(16).print(12) println("----------------------------------") println("scanRight") def foo(i: Int, acc: => Double) = (0.5*i) + acc oneZero.take(6).scanRight(0.0)(foo).print(12) println("----------------------------------") println("decay") println(oneZero.take(6).foldRight(0.0)(foo)) println("----------------------------------") println("scanRight") def bar(i: Int, acc: => Double) = i + acc oneZero.take(6).scanRight(0.0)(bar).print(12) println("----------------------------------") println("scanRight") def baz(i: Int, acc: => Int) = i + acc Stream.from(0).take(6).scanRight(0)(baz).print(12) }
DS12/scala-class
common/src/main/scala/com/datascience/education/common/lecture5/Stream.scala
Scala
cc0-1.0
9,046
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.delegation import org.apache.flink.api.common.RuntimeExecutionMode import org.apache.flink.api.dag.Transformation import org.apache.flink.configuration.ExecutionOptions import org.apache.flink.table.api.config.OptimizerConfigOptions import org.apache.flink.table.api.{ExplainDetail, PlanReference, TableConfig, TableException} import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog} import org.apache.flink.table.delegation.{Executor, InternalPlan} import org.apache.flink.table.module.ModuleManager import org.apache.flink.table.operations.{ModifyOperation, Operation} import org.apache.flink.table.planner.plan.`trait`.FlinkRelDistributionTraitDef import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeGraph import org.apache.flink.table.planner.plan.nodes.exec.batch.BatchExecNode import org.apache.flink.table.planner.plan.nodes.exec.processor.{DeadlockBreakupProcessor, ExecNodeGraphProcessor, ForwardHashExchangeProcessor, MultipleInputNodeCreationProcessor} import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodePlanDumper import org.apache.flink.table.planner.plan.optimize.{BatchCommonSubGraphBasedOptimizer, Optimizer} import org.apache.flink.table.planner.plan.utils.FlinkRelOptUtil import org.apache.flink.table.planner.utils.DummyStreamExecutionEnvironment import org.apache.calcite.plan.{ConventionTraitDef, RelTrait, RelTraitDef} import org.apache.calcite.rel.RelCollationTraitDef import org.apache.calcite.sql.SqlExplainLevel import java.util import scala.collection.JavaConversions._ class BatchPlanner( executor: Executor, tableConfig: TableConfig, moduleManager: ModuleManager, functionCatalog: FunctionCatalog, catalogManager: CatalogManager) extends PlannerBase(executor, tableConfig, moduleManager, functionCatalog, catalogManager, isStreamingMode = false) { override protected def getTraitDefs: Array[RelTraitDef[_ <: RelTrait]] = { Array( ConventionTraitDef.INSTANCE, FlinkRelDistributionTraitDef.INSTANCE, RelCollationTraitDef.INSTANCE) } override protected def getOptimizer: Optimizer = new BatchCommonSubGraphBasedOptimizer(this) override protected def getExecNodeGraphProcessors: Seq[ExecNodeGraphProcessor] = { val processors = new util.ArrayList[ExecNodeGraphProcessor]() // deadlock breakup processors.add(new DeadlockBreakupProcessor()) // multiple input creation if (getTableConfig.getConfiguration.getBoolean( OptimizerConfigOptions.TABLE_OPTIMIZER_MULTIPLE_INPUT_ENABLED)) { processors.add(new MultipleInputNodeCreationProcessor(false)) } processors.add(new ForwardHashExchangeProcessor) processors } override protected def translateToPlan(execGraph: ExecNodeGraph): util.List[Transformation[_]] = { beforeTranslation() val planner = createDummyPlanner() val transformations = execGraph.getRootNodes.map { case node: BatchExecNode[_] => node.translateToPlan(planner) case _ => throw new TableException("Cannot generate BoundedStream due to an invalid logical plan. " + "This is a bug and should not happen. Please file an issue.") } afterTranslation() transformations } override def explain(operations: util.List[Operation], extraDetails: ExplainDetail*): String = { val (sinkRelNodes, optimizedRelNodes, execGraph, streamGraph) = getExplainGraphs(operations) val sb = new StringBuilder sb.append("== Abstract Syntax Tree ==") sb.append(System.lineSeparator) sinkRelNodes.foreach { sink => // use EXPPLAN_ATTRIBUTES to make the ast result more readable // and to keep the previous behavior sb.append(FlinkRelOptUtil.toString(sink, SqlExplainLevel.EXPPLAN_ATTRIBUTES)) sb.append(System.lineSeparator) } sb.append("== Optimized Physical Plan ==") sb.append(System.lineSeparator) val explainLevel = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) { SqlExplainLevel.ALL_ATTRIBUTES } else { SqlExplainLevel.EXPPLAN_ATTRIBUTES } optimizedRelNodes.foreach { rel => sb.append(FlinkRelOptUtil.toString(rel, explainLevel)) sb.append(System.lineSeparator) } sb.append("== Optimized Execution Plan ==") sb.append(System.lineSeparator) sb.append(ExecNodePlanDumper.dagToString(execGraph)) if (extraDetails.contains(ExplainDetail.JSON_EXECUTION_PLAN)) { sb.append(System.lineSeparator) sb.append("== Physical Execution Plan ==") sb.append(System.lineSeparator) sb.append(streamGraph.getStreamingPlanAsJSON) } sb.toString() } private def createDummyPlanner(): BatchPlanner = { val dummyExecEnv = new DummyStreamExecutionEnvironment(getExecEnv) val executor = new DefaultExecutor(dummyExecEnv) new BatchPlanner(executor, tableConfig, moduleManager, functionCatalog, catalogManager) } override def loadPlan(planReference: PlanReference): InternalPlan = { throw new UnsupportedOperationException( "The compiled plan feature is not supported in batch mode.") } override def compilePlan( modifyOperations: util.List[ModifyOperation]): InternalPlan = throw new UnsupportedOperationException( "The compiled plan feature is not supported in batch mode.") override def translatePlan(plan: InternalPlan): util.List[Transformation[_]] = throw new UnsupportedOperationException( "The compiled plan feature is not supported in batch mode.") override def explainPlan(plan: InternalPlan, extraDetails: ExplainDetail*): String = throw new UnsupportedOperationException( "The compiled plan feature is not supported in batch mode.") override def beforeTranslation(): Unit = { super.beforeTranslation() val runtimeMode = getConfiguration.get(ExecutionOptions.RUNTIME_MODE) if (runtimeMode != RuntimeExecutionMode.BATCH) { throw new IllegalArgumentException( "Mismatch between configured runtime mode and actual runtime mode. " + "Currently, the 'execution.runtime-mode' can only be set when instantiating the " + "table environment. Subsequent changes are not supported. " + "Please instantiate a new TableEnvironment if necessary.") } } }
apache/flink
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/BatchPlanner.scala
Scala
apache-2.0
7,105
package org.reactivecouchbase.client import play.api.libs.json.{Json, JsObject} import net.spy.memcached.ops.OperationStatus import java.lang.RuntimeException /** * * When the JSON format isn't good * * @param message * @param errors */ class JsonValidationException(message: String, errors: JsObject) extends ReactiveCouchbaseException("Json Validation failed", message + " : " + Json.stringify(errors)) /** * * When a Couchbase operation fails * * @param status */ class OperationFailedException(status: OperationStatus) extends ReactiveCouchbaseException("Operation failed", status.getMessage) /** * * Standard ReactiveCouchbase Exception * * @param title * @param message */ class ReactiveCouchbaseException(title: String, message: String) extends RuntimeException(title + " : " + message)
wsargent/ReactiveCouchbase-core
driver/src/main/scala/org/reactivecouchbase/client/Exceptions.scala
Scala
apache-2.0
814
package me.ivanyu.luscinia.entities /** * Timeout to resend RPCs by node * @param timeout timeout */ case class RPCResendTimeout(timeout: Int)
ivanyu/luscinia
node/src/main/scala/me/ivanyu/luscinia/entities/RPCResendTimeout.scala
Scala
unlicense
147
package services.neo4j import javax.inject.{Inject, Named} import play.api.libs.json.Json import play.api.libs.ws.{WSAuthScheme, WSClient} /** * Copyright (c) 2017 A. Roberto Fischer * * @author A. Roberto Fischer <a.robertofischer@gmail.com> on 4/24/2017 */ class ConstraintSetter @Inject()(wsClient: WSClient, @Named("Neo4jURI") neo4jURI: String, @Named("Neo4jUserName") neo4jUserName: String, @Named("Neo4jPassword") neo4jPassword: String) { val constraints = Vector( s"CREATE CONSTRAINT ON (status:${Schema.NodeLabels.Status}) ASSERT status.id IS UNIQUE", s"CREATE CONSTRAINT ON (user:${Schema.NodeLabels.User}) ASSERT user.id IS UNIQUE", s"CREATE CONSTRAINT ON (user:${Schema.NodeLabels.User}) ASSERT user.screenName IS UNIQUE", s"CREATE CONSTRAINT ON (url:${Schema.NodeLabels.URL}) ASSERT url.name IS UNIQUE", s"CREATE CONSTRAINT ON (domain:${Schema.NodeLabels.Domain}) ASSERT domain.name IS UNIQUE", s"CREATE CONSTRAINT ON (tag:${Schema.NodeLabels.HashTag}) ASSERT tag.name IS UNIQUE", s"CREATE CONSTRAINT ON (symbol:${Schema.NodeLabels.Symbol}) ASSERT symbol.name IS UNIQUE" ) wsClient.url(neo4jURI + "/db/data/transaction/commit") .withAuth(neo4jUserName, neo4jPassword, WSAuthScheme.BASIC) .withHeaders( "Accept" -> "application/json; charset=UTF-8", "Content-Type" -> "application/json" ) .post(Json.obj( "statements" -> Json.toJson(constraints.map(statement => Json.obj("statement" -> statement))) )) }
Queendimimi/twitter_extractor
app/services/neo4j/ConstraintSetter.scala
Scala
apache-2.0
1,599
/** * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). * Licensed under the Apache License, Version 2.0 (see LICENSE). */ package org.pantsbuild.zinc.options import java.io.File trait OptionSet[T] { /** An empty set of options. */ def empty: T /** Apply any residual entries to an instance of T and return a new T. */ def applyResidual(t: T, residualArgs: Seq[String]): T = if (residualArgs.nonEmpty) { throw new RuntimeException( s"Unexpected residual arguments: ${residualArgs.mkString("[", ", ", "]")}" ) } else { t } /** All available command-line options. */ def options: Seq[OptionDef[T]] private def allOptions: Set[OptionDef[T]] = options.toSet /** * Print out the usage message. */ def printUsage(cmdName: String, residualArgs: String = ""): Unit = { val column = options.map(_.length).max + 2 println(s"Usage: ${cmdName} <options> ${residualArgs}") options foreach { opt => if (opt.extraline) println(); println(opt.usage(column)) } println() } /** * Anything starting with '-' is considered an option, not a source file. */ private def isOpt(s: String) = s startsWith "-" /** * Parse all args into a T. * Residual args are either unknown options or applied. */ def parse(args: Seq[String]): Parsed[T] = { val Parsed(instance, remaining, errors) = Options.parse(empty, allOptions, args, stopOnError = false) val (unknown, residual) = remaining partition isOpt val unknownErrors = unknown map ("Unknown option: " + _) Parsed(applyResidual(instance, residual), Seq.empty, errors ++ unknownErrors) } // helpers for creating options def boolean(opt: String, desc: String, action: T => T) = new BooleanOption[T](Seq(opt), desc, action) def boolean(opts: (String, String), desc: String, action: T => T) = new BooleanOption[T](Seq(opts._1, opts._2), desc, action) def string(opt: String, arg: String, desc: String, action: (T, String) => T) = new StringOption[T](Seq(opt), arg, desc, action) def int(opt: String, arg: String, desc: String, action: (T, Int) => T) = new IntOption[T](Seq(opt), arg, desc, action) def long(opt: String, arg: String, desc: String, action: (T, Long) => T) = new LongOption[T](Seq(opt), arg, desc, action) def double(opt: String, arg: String, desc: String, action: (T, Double) => T) = new DoubleOption[T](Seq(opt), arg, desc, action) def fraction(opt: String, arg: String, desc: String, action: (T, Double) => T) = new FractionOption[T](Seq(opt), arg, desc, action) def file(opt: String, arg: String, desc: String, action: (T, File) => T) = new FileOption[T](Seq(opt), arg, desc, action) def path(opt: String, arg: String, desc: String, action: (T, Seq[File]) => T) = new PathOption[T](Seq(opt), arg, desc, action) def path(opts: (String, String), arg: String, desc: String, action: (T, Seq[File]) => T) = new PathOption[T](Seq(opts._1, opts._2), arg, desc, action) def prefix(pre: String, arg: String, desc: String, action: (T, String) => T) = new PrefixOption[T](pre, arg, desc, action) def filePair(opt: String, arg: String, desc: String, action: (T, (File, File)) => T) = new FilePairOption[T](Seq(opt), arg, desc, action) def fileMap(opt: String, desc: String, action: (T, Map[File, File]) => T) = new FileMapOption[T](Seq(opt), desc, action) def fileSeqMap(opt: String, desc: String, action: (T, Map[Seq[File], File]) => T) = new FileSeqMapOption[T](Seq(opt), desc, action) def header(label: String) = new HeaderOption[T](label) def dummy(opt: String, desc: String) = new DummyOption[T](opt, desc) }
foursquare/pants
src/scala/org/pantsbuild/zinc/options/OptionSet.scala
Scala
apache-2.0
3,625
package org.http4s package server package jetty import cats.effect._ import java.net.InetSocketAddress import java.util import javax.net.ssl.SSLContext import javax.servlet.{DispatcherType, Filter} import javax.servlet.http.HttpServlet import org.eclipse.jetty.server.{ServerConnector, Server => JServer, _} import org.eclipse.jetty.servlet.{FilterHolder, ServletContextHandler, ServletHolder} import org.eclipse.jetty.util.component.AbstractLifeCycle.AbstractLifeCycleListener import org.eclipse.jetty.util.component.LifeCycle import org.eclipse.jetty.util.ssl.SslContextFactory import org.eclipse.jetty.util.thread.QueuedThreadPool import org.http4s.server.SSLKeyStoreSupport.StoreInfo import org.http4s.servlet.{Http4sServlet, ServletContainer, ServletIo} import org.log4s.getLogger import scala.concurrent.ExecutionContext import scala.collection.immutable import scala.concurrent.duration._ sealed class JettyBuilder[F[_]: Effect] private ( socketAddress: InetSocketAddress, private val executionContext: ExecutionContext, private val idleTimeout: Duration, private val asyncTimeout: Duration, private val servletIo: ServletIo[F], sslBits: Option[SSLConfig], mounts: Vector[Mount[F]], private val serviceErrorHandler: ServiceErrorHandler[F], banner: immutable.Seq[String] ) extends ServletContainer[F] with ServerBuilder[F] with IdleTimeoutSupport[F] with SSLKeyStoreSupport[F] with SSLContextSupport[F] { private val F = Effect[F] type Self = JettyBuilder[F] private[this] val logger = getLogger private def copy( socketAddress: InetSocketAddress = socketAddress, executionContext: ExecutionContext = executionContext, idleTimeout: Duration = idleTimeout, asyncTimeout: Duration = asyncTimeout, servletIo: ServletIo[F] = servletIo, sslBits: Option[SSLConfig] = sslBits, mounts: Vector[Mount[F]] = mounts, serviceErrorHandler: ServiceErrorHandler[F] = serviceErrorHandler, banner: immutable.Seq[String] = banner ): Self = new JettyBuilder( socketAddress, executionContext, idleTimeout, asyncTimeout, servletIo, sslBits, mounts, serviceErrorHandler, banner) override def withSSL( keyStore: StoreInfo, keyManagerPassword: String, protocol: String, trustStore: Option[StoreInfo], clientAuth: Boolean ): Self = copy( sslBits = Some(KeyStoreBits(keyStore, keyManagerPassword, protocol, trustStore, clientAuth))) override def withSSLContext(sslContext: SSLContext, clientAuth: Boolean): Self = copy(sslBits = Some(SSLContextBits(sslContext, clientAuth))) override def bindSocketAddress(socketAddress: InetSocketAddress): Self = copy(socketAddress = socketAddress) override def withExecutionContext(executionContext: ExecutionContext): Self = copy(executionContext = executionContext) override def mountServlet( servlet: HttpServlet, urlMapping: String, name: Option[String] = None): Self = copy(mounts = mounts :+ Mount[F] { (context, index, _) => val servletName = name.getOrElse(s"servlet-$index") context.addServlet(new ServletHolder(servletName, servlet), urlMapping) }) override def mountFilter( filter: Filter, urlMapping: String, name: Option[String], dispatches: util.EnumSet[DispatcherType] ): Self = copy(mounts = mounts :+ Mount[F] { (context, index, _) => val filterName = name.getOrElse(s"filter-$index") val filterHolder = new FilterHolder(filter) filterHolder.setName(filterName) context.addFilter(filterHolder, urlMapping, dispatches) }) override def mountService(service: HttpService[F], prefix: String): Self = copy(mounts = mounts :+ Mount[F] { (context, index, builder) => val servlet = new Http4sServlet( service = service, asyncTimeout = builder.asyncTimeout, servletIo = builder.servletIo, executionContext = builder.executionContext, serviceErrorHandler = builder.serviceErrorHandler ) val servletName = s"servlet-$index" val urlMapping = ServletContainer.prefixMapping(prefix) context.addServlet(new ServletHolder(servletName, servlet), urlMapping) }) override def withIdleTimeout(idleTimeout: Duration): Self = copy(idleTimeout = idleTimeout) override def withAsyncTimeout(asyncTimeout: Duration): Self = copy(asyncTimeout = asyncTimeout) override def withServletIo(servletIo: ServletIo[F]): Self = copy(servletIo = servletIo) def withServiceErrorHandler(serviceErrorHandler: ServiceErrorHandler[F]): Self = copy(serviceErrorHandler = serviceErrorHandler) def withBanner(banner: immutable.Seq[String]): Self = copy(banner = banner) private def getConnector(jetty: JServer): ServerConnector = { def serverConnector(sslContextFactory: SslContextFactory) = { // SSL HTTP Configuration val https_config = new HttpConfiguration() https_config.setSecureScheme("https") https_config.setSecurePort(socketAddress.getPort) https_config.addCustomizer(new SecureRequestCustomizer()) val connectionFactory = new HttpConnectionFactory(https_config) new ServerConnector( jetty, new SslConnectionFactory( sslContextFactory, org.eclipse.jetty.http.HttpVersion.HTTP_1_1.asString()), connectionFactory) } sslBits match { case Some(KeyStoreBits(keyStore, keyManagerPassword, protocol, trustStore, clientAuth)) => // SSL Context Factory val sslContextFactory = new SslContextFactory() sslContextFactory.setKeyStorePath(keyStore.path) sslContextFactory.setKeyStorePassword(keyStore.password) sslContextFactory.setKeyManagerPassword(keyManagerPassword) sslContextFactory.setNeedClientAuth(clientAuth) sslContextFactory.setProtocol(protocol) trustStore.foreach { trustManagerBits => sslContextFactory.setTrustStorePath(trustManagerBits.path) sslContextFactory.setTrustStorePassword(trustManagerBits.password) } serverConnector(sslContextFactory) case Some(SSLContextBits(sslContext, clientAuth)) => val sslContextFactory = new SslContextFactory() sslContextFactory.setSslContext(sslContext) sslContextFactory.setNeedClientAuth(clientAuth) serverConnector(sslContextFactory) case None => val connectionFactory = new HttpConnectionFactory new ServerConnector(jetty, connectionFactory) } } def start: F[Server[F]] = F.delay { val threadPool = new QueuedThreadPool val jetty = new JServer(threadPool) val context = new ServletContextHandler() context.setContextPath("/") jetty.setHandler(context) val connector = getConnector(jetty) connector.setHost(socketAddress.getHostString) connector.setPort(socketAddress.getPort) connector.setIdleTimeout(if (idleTimeout.isFinite()) idleTimeout.toMillis else -1) jetty.addConnector(connector) for ((mount, i) <- mounts.zipWithIndex) mount.f(context, i, this) jetty.start() val server = new Server[F] { override def shutdown: F[Unit] = F.delay(jetty.stop()) override def onShutdown(f: => Unit): this.type = { jetty.addLifeCycleListener { new AbstractLifeCycleListener { override def lifeCycleStopped(event: LifeCycle): Unit = f } } this } lazy val address: InetSocketAddress = { val host = socketAddress.getHostString val port = jetty.getConnectors()(0).asInstanceOf[ServerConnector].getLocalPort new InetSocketAddress(host, port) } lazy val isSecure: Boolean = sslBits.isDefined } banner.foreach(logger.info(_)) logger.info( s"http4s v${BuildInfo.version} on Jetty v${JServer.getVersion} started at ${server.baseUri}") server } } object JettyBuilder { def apply[F[_]: Effect] = new JettyBuilder[F]( socketAddress = ServerBuilder.DefaultSocketAddress, executionContext = ExecutionContext.global, idleTimeout = IdleTimeoutSupport.DefaultIdleTimeout, asyncTimeout = AsyncTimeoutSupport.DefaultAsyncTimeout, servletIo = ServletContainer.DefaultServletIo, sslBits = None, mounts = Vector.empty, serviceErrorHandler = DefaultServiceErrorHandler, banner = ServerBuilder.DefaultBanner ) } private final case class Mount[F[_]](f: (ServletContextHandler, Int, JettyBuilder[F]) => Unit)
reactormonk/http4s
jetty/src/main/scala/org/http4s/server/jetty/JettyBuilder.scala
Scala
apache-2.0
8,620
package org.elastic.elasticsearch.scala.driver.v2_3 import org.elastic.elasticsearch.scala.driver.common._ /** * The full API model in v2.3 */ object ApiModel_v2_3 { trait Common extends ApiModelCommon { //2.3 specific common resources } trait Cluster extends ApiModelCluster { //2.3 specific cluster resources } trait Indices extends ApiModelIndices { //2.3 specific indices resources } trait Search extends ApiModelSearch { //2.3 specific search resources } trait Xpack extends ApiModelXpack { //2.3 specific Xpack resources } }
Alex-At-Home/elasticsearch_scala_driver
elasticsearch_scala_core/shared/src/main/scala/org/elastic/elasticsearch/scala/driver/v2_3/ApiModel_v2_3.scala
Scala
apache-2.0
601
package com.github.sstone.amqp import org.scalatest.matchers.ShouldMatchers import org.scalatest.WordSpecLike import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.actor.ActorSystem import akka.pattern.gracefulStop import akka.util.Timeout import concurrent.duration._ import concurrent.Await import com.rabbitmq.client.{ConnectionFactory, Address, Channel} import Amqp._ import ConnectionOwner.{Connected, CreateChannel, Disconnected} import java.util.concurrent.TimeUnit @RunWith(classOf[JUnitRunner]) class ConnectionOwnerSpec extends TestKit(ActorSystem("TestSystem")) with WordSpecLike with ShouldMatchers with ImplicitSender { implicit val timeout = Timeout(5 seconds) "ConnectionOwner" should { "provide channels for many child actors" in { val connFactory = new ConnectionFactory() val uri = system.settings.config.getString("amqp-client-test.rabbitmq.uri") connFactory.setUri(uri) val conn = system.actorOf(ConnectionOwner.props(connFactory)) Amqp.waitForConnection(system, conn).await(2, TimeUnit.SECONDS) val actors = 100 for (i <- 0 until actors) { val p = TestProbe() p.send(conn, CreateChannel) p.expectMsgClass(2.second, classOf[Channel]) } Await.result(gracefulStop(conn, 5 seconds), 6 seconds) } "connect even if the default host is unavailable" in { val connFactory = new ConnectionFactory() val uri = system.settings.config.getString("amqp-client-test.rabbitmq.uri") connFactory.setUri(uri) val goodHost = connFactory.getHost connFactory.setHost("fake-host") val conn = system.actorOf(ConnectionOwner.props(connFactory, addresses = Some(Array( new Address("another.fake.host"), new Address(goodHost) )))) Amqp.waitForConnection(system, conn).await(50, TimeUnit.SECONDS) val actors = 100 for (i <- 0 until actors) { val p = TestProbe() p.send(conn, CreateChannel) p.expectMsgClass(2.second, classOf[Channel]) } Await.result(gracefulStop(conn, 5 seconds), 6 seconds) } "send Connected/Disconnected status messages" in { val connFactory = new ConnectionFactory() val uri = system.settings.config.getString("amqp-client-test.rabbitmq.uri") connFactory.setUri(uri) val probe = TestProbe() val conn = system.actorOf(ConnectionOwner.props(connFactory)) conn ! AddStatusListener(probe.ref) probe.expectMsg(2 seconds, Connected) conn ! Abort() probe.expectMsg(2 seconds, Disconnected) } } }
sstone/amqp-client
src/test/scala/com/github/sstone/amqp/ConnectionOwnerSpec.scala
Scala
mit
2,670
// Generated by the Scala Plugin for the Protocol Buffer Compiler. // Do not edit! // // Protofile syntax: PROTO3 package com.google.protobuf.source_context object SourceContextProto extends _root_.scalapb.GeneratedFileObject { lazy val dependencies: Seq[_root_.scalapb.GeneratedFileObject] = Seq.empty lazy val messagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]]( com.google.protobuf.source_context.SourceContext ) private lazy val ProtoBytes: _root_.scala.Array[Byte] = scalapb.Encoding.fromBase64(scala.collection.immutable.Seq( """CiRnb29nbGUvcHJvdG9idWYvc291cmNlX2NvbnRleHQucHJvdG8SD2dvb2dsZS5wcm90b2J1ZiI7Cg1Tb3VyY2VDb250ZXh0E ioKCWZpbGVfbmFtZRgBIAEoCUIN4j8KEghmaWxlTmFtZVIIZmlsZU5hbWVClQEKE2NvbS5nb29nbGUucHJvdG9idWZCElNvdXJjZ UNvbnRleHRQcm90b1ABWkFnb29nbGUuZ29sYW5nLm9yZy9nZW5wcm90by9wcm90b2J1Zi9zb3VyY2VfY29udGV4dDtzb3VyY2VfY 29udGV4dKICA0dQQqoCHkdvb2dsZS5Qcm90b2J1Zi5XZWxsS25vd25UeXBlc2IGcHJvdG8z""" ).mkString) lazy val scalaDescriptor: _root_.scalapb.descriptors.FileDescriptor = { val scalaProto = com.google.protobuf.descriptor.FileDescriptorProto.parseFrom(ProtoBytes) _root_.scalapb.descriptors.FileDescriptor.buildFrom(scalaProto, dependencies.map(_.scalaDescriptor)) } lazy val javaDescriptor: com.google.protobuf.Descriptors.FileDescriptor = com.google.protobuf.SourceContextProto.getDescriptor() @deprecated("Use javaDescriptor instead. In a future version this will refer to scalaDescriptor.", "ScalaPB 0.5.47") def descriptor: com.google.protobuf.Descriptors.FileDescriptor = javaDescriptor }
scalapb/ScalaPB
scalapb-runtime/src/main/scalajvm/com/google/protobuf/source_context/SourceContextProto.scala
Scala
apache-2.0
1,716
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.streaming import java.{util => ju} import java.lang.{Long => JLong} import java.util.UUID import scala.collection.JavaConverters._ import scala.util.control.NonFatal import org.json4s._ import org.json4s.JsonAST.JValue import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import org.apache.spark.annotation.Evolving import org.apache.spark.sql.streaming.SinkProgress.DEFAULT_NUM_OUTPUT_ROWS /** * Information about updates made to stateful operators in a [[StreamingQuery]] during a trigger. */ @Evolving class StateOperatorProgress private[sql]( val numRowsTotal: Long, val numRowsUpdated: Long, val memoryUsedBytes: Long, val customMetrics: ju.Map[String, JLong] = new ju.HashMap() ) extends Serializable { /** The compact JSON representation of this progress. */ def json: String = compact(render(jsonValue)) /** The pretty (i.e. indented) JSON representation of this progress. */ def prettyJson: String = pretty(render(jsonValue)) private[sql] def copy(newNumRowsUpdated: Long): StateOperatorProgress = new StateOperatorProgress(numRowsTotal, newNumRowsUpdated, memoryUsedBytes, customMetrics) private[sql] def jsonValue: JValue = { ("numRowsTotal" -> JInt(numRowsTotal)) ~ ("numRowsUpdated" -> JInt(numRowsUpdated)) ~ ("memoryUsedBytes" -> JInt(memoryUsedBytes)) ~ ("customMetrics" -> { if (!customMetrics.isEmpty) { val keys = customMetrics.keySet.asScala.toSeq.sorted keys.map { k => k -> JInt(customMetrics.get(k).toLong) : JObject }.reduce(_ ~ _) } else { JNothing } }) } override def toString: String = prettyJson } /** * Information about progress made in the execution of a [[StreamingQuery]] during * a trigger. Each event relates to processing done for a single trigger of the streaming * query. Events are emitted even when no new data is available to be processed. * * @param id An unique query id that persists across restarts. See `StreamingQuery.id()`. * @param runId A query id that is unique for every start/restart. See `StreamingQuery.runId()`. * @param name User-specified name of the query, null if not specified. * @param timestamp Beginning time of the trigger in ISO8601 format, i.e. UTC timestamps. * @param batchId A unique id for the current batch of data being processed. Note that in the * case of retries after a failure a given batchId my be executed more than once. * Similarly, when there is no data to be processed, the batchId will not be * incremented. * @param durationMs The amount of time taken to perform various operations in milliseconds. * @param eventTime Statistics of event time seen in this batch. It may contain the following keys: * {{{ * "max" -> "2016-12-05T20:54:20.827Z" // maximum event time seen in this trigger * "min" -> "2016-12-05T20:54:20.827Z" // minimum event time seen in this trigger * "avg" -> "2016-12-05T20:54:20.827Z" // average event time seen in this trigger * "watermark" -> "2016-12-05T20:54:20.827Z" // watermark used in this trigger * }}} * All timestamps are in ISO8601 format, i.e. UTC timestamps. * @param stateOperators Information about operators in the query that store state. * @param sources detailed statistics on data being read from each of the streaming sources. * @since 2.1.0 */ @Evolving class StreamingQueryProgress private[sql]( val id: UUID, val runId: UUID, val name: String, val timestamp: String, val batchId: Long, val durationMs: ju.Map[String, JLong], val eventTime: ju.Map[String, String], val stateOperators: Array[StateOperatorProgress], val sources: Array[SourceProgress], val sink: SinkProgress) extends Serializable { /** The aggregate (across all sources) number of records processed in a trigger. */ def numInputRows: Long = sources.map(_.numInputRows).sum /** The aggregate (across all sources) rate of data arriving. */ def inputRowsPerSecond: Double = sources.map(_.inputRowsPerSecond).sum /** The aggregate (across all sources) rate at which Spark is processing data. */ def processedRowsPerSecond: Double = sources.map(_.processedRowsPerSecond).sum /** The compact JSON representation of this progress. */ def json: String = compact(render(jsonValue)) /** The pretty (i.e. indented) JSON representation of this progress. */ def prettyJson: String = pretty(render(jsonValue)) override def toString: String = prettyJson private[sql] def jsonValue: JValue = { def safeDoubleToJValue(value: Double): JValue = { if (value.isNaN || value.isInfinity) JNothing else JDouble(value) } /** Convert map to JValue while handling empty maps. Also, this sorts the keys. */ def safeMapToJValue[T](map: ju.Map[String, T], valueToJValue: T => JValue): JValue = { if (map.isEmpty) return JNothing val keys = map.asScala.keySet.toSeq.sorted keys.map { k => k -> valueToJValue(map.get(k)) : JObject }.reduce(_ ~ _) } ("id" -> JString(id.toString)) ~ ("runId" -> JString(runId.toString)) ~ ("name" -> JString(name)) ~ ("timestamp" -> JString(timestamp)) ~ ("batchId" -> JInt(batchId)) ~ ("numInputRows" -> JInt(numInputRows)) ~ ("inputRowsPerSecond" -> safeDoubleToJValue(inputRowsPerSecond)) ~ ("processedRowsPerSecond" -> safeDoubleToJValue(processedRowsPerSecond)) ~ ("durationMs" -> safeMapToJValue[JLong](durationMs, v => JInt(v.toLong))) ~ ("eventTime" -> safeMapToJValue[String](eventTime, s => JString(s))) ~ ("stateOperators" -> JArray(stateOperators.map(_.jsonValue).toList)) ~ ("sources" -> JArray(sources.map(_.jsonValue).toList)) ~ ("sink" -> sink.jsonValue) } } /** * Information about progress made for a source in the execution of a [[StreamingQuery]] * during a trigger. See [[StreamingQueryProgress]] for more information. * * @param description Description of the source. * @param startOffset The starting offset for data being read. * @param endOffset The ending offset for data being read. * @param numInputRows The number of records read from this source. * @param inputRowsPerSecond The rate at which data is arriving from this source. * @param processedRowsPerSecond The rate at which data from this source is being processed by * Spark. * @since 2.1.0 */ @Evolving class SourceProgress protected[sql]( val description: String, val startOffset: String, val endOffset: String, val numInputRows: Long, val inputRowsPerSecond: Double, val processedRowsPerSecond: Double) extends Serializable { /** The compact JSON representation of this progress. */ def json: String = compact(render(jsonValue)) /** The pretty (i.e. indented) JSON representation of this progress. */ def prettyJson: String = pretty(render(jsonValue)) override def toString: String = prettyJson private[sql] def jsonValue: JValue = { def safeDoubleToJValue(value: Double): JValue = { if (value.isNaN || value.isInfinity) JNothing else JDouble(value) } ("description" -> JString(description)) ~ ("startOffset" -> tryParse(startOffset)) ~ ("endOffset" -> tryParse(endOffset)) ~ ("numInputRows" -> JInt(numInputRows)) ~ ("inputRowsPerSecond" -> safeDoubleToJValue(inputRowsPerSecond)) ~ ("processedRowsPerSecond" -> safeDoubleToJValue(processedRowsPerSecond)) } private def tryParse(json: String) = try { parse(json) } catch { case NonFatal(e) => JString(json) } } /** * Information about progress made for a sink in the execution of a [[StreamingQuery]] * during a trigger. See [[StreamingQueryProgress]] for more information. * * @param description Description of the source corresponding to this status. * @param numOutputRows Number of rows written to the sink or -1 for Continuous Mode (temporarily) * or Sink V1 (until decommissioned). * @since 2.1.0 */ @Evolving class SinkProgress protected[sql]( val description: String, val numOutputRows: Long) extends Serializable { /** SinkProgress without custom metrics. */ protected[sql] def this(description: String) { this(description, DEFAULT_NUM_OUTPUT_ROWS) } /** The compact JSON representation of this progress. */ def json: String = compact(render(jsonValue)) /** The pretty (i.e. indented) JSON representation of this progress. */ def prettyJson: String = pretty(render(jsonValue)) override def toString: String = prettyJson private[sql] def jsonValue: JValue = { ("description" -> JString(description)) ~ ("numOutputRows" -> JInt(numOutputRows)) } } private[sql] object SinkProgress { val DEFAULT_NUM_OUTPUT_ROWS: Long = -1L def apply(description: String, numOutputRows: Option[Long]): SinkProgress = new SinkProgress(description, numOutputRows.getOrElse(DEFAULT_NUM_OUTPUT_ROWS)) }
pgandhi999/spark
sql/core/src/main/scala/org/apache/spark/sql/streaming/progress.scala
Scala
apache-2.0
9,878
package com.elsevier.soda import com.elsevier.soda.messages._ import org.junit.{Assert, FixMethodOrder, Test} import org.junit.runners.MethodSorters import scala.io.Source @FixMethodOrder(MethodSorters.NAME_ASCENDING) class SodaClientTest { val sodaClient = new SodaClient("http://localhost:8080") val lexiconName = "test_countries-2" val lookupId = "http://test-countries-2.com/ABW" val matchings = List("exact", "lower", "stop", "stem1", "stem2", "stem3") val text = "Institute of Clean Coal Technology, East China University of Science and Technology, Shanghai 200237, China" val phrase = "Emirates" val phraseMatchings = matchings ++ List("lsort", "s3sort") @Test def test_001_index(): Unit = { val indexResponse = sodaClient.index() Assert.assertEquals("ok", indexResponse.status) } @Test def test_002_add(): Unit = { var numLoaded = 0 Source.fromFile("src/main/resources/test-countries.tsv") .getLines() .foreach(line => { val Array(id, syns) = line.split("\\t") val idModified = id.replace("test-countries", "test-countries-2") val names = syns.split("\\\\|").toArray val commit = (numLoaded % 100 == 0) val addResponse = sodaClient.add(lexiconName, idModified, names, commit) Assert.assertEquals("ok", addResponse.status) numLoaded += 1 }) val finalResponse = sodaClient.add(lexiconName, null, null, true) Assert.assertEquals("ok", finalResponse.status) } @Test def test_003_dicts(): Unit = { val dictResponse = sodaClient.dicts() Assert.assertEquals("ok", dictResponse.status) Assert.assertEquals(1, dictResponse.lexicons.filter(lc => lc.lexicon.equals(lexiconName)).size) } @Test def test_004_annot(): Unit = { matchings.foreach(matching => { val annotResponse = sodaClient.annot(lexiconName, text, matching) Assert.assertEquals("ok", annotResponse.status) Assert.assertTrue(annotResponse.annotations.size > 0) }) } @Test def test_005_coverage(): Unit = { matchings.foreach(matching => { val coverageResponse = sodaClient.coverage(text, matching) Assert.assertEquals("ok", coverageResponse.status) Assert.assertEquals(1, coverageResponse.lexicons.filter(lc => lc.lexicon.equals(lexiconName)).size) }) } @Test def test_006_lookup(): Unit = { val lookupResponse = sodaClient.lookup(lexiconName, lookupId) Assert.assertEquals("ok", lookupResponse.status) Assert.assertEquals(1, lookupResponse.entries.size) } @Test def test_007_reverseLookup(): Unit = { phraseMatchings.foreach(matching => { val reverseLookupResponse = sodaClient.rlookup(lexiconName, phrase, matching) Assert.assertEquals("ok", reverseLookupResponse.status) Assert.assertEquals(1, reverseLookupResponse.entries.size) }) } @Test def test_008_delete(): Unit = { val deleteResponseOne = sodaClient.delete(lexiconName, lookupId) Assert.assertEquals("ok", deleteResponseOne.status) val deleteResponse = sodaClient.delete(lexiconName, "*") Assert.assertEquals("ok", deleteResponse.status) } }
elsevierlabs-os/soda
src/test/scala/com/elsevier/soda/SodaClientTest.scala
Scala
apache-2.0
3,428
package net.node3.freelancerdatabase.db import android.content.Context import android.database.sqlite.SQLiteDatabase import android.database.sqlite.SQLiteOpenHelper import net.node3.freelancerdatabase.db.tables.SectorTable import net.node3.freelancerdatabase.db.tables.StarSystemTable import net.node3.freelancerdatabase.db.tables.SolarObjectTypeTable import net.node3.freelancerdatabase.db.tables.SolarObjectTable import net.node3.freelancerdatabase.db.tables.SystemConnectionTable class DatabaseHelper(val context: Context) extends SQLiteOpenHelper(context, DatabaseInfo.databaseName, null, DatabaseInfo.databaseVersion) { private val tables = List( SectorTable(context), SolarObjectTypeTable(context), StarSystemTable(context), SolarObjectTable(context), SystemConnectionTable(context) ) override def onOpen(db: SQLiteDatabase) = { super.onOpen(db) if (!db.isReadOnly()) { db.execSQL("PRAGMA foreign_keys=ON;"); } } override def onCreate(db: SQLiteDatabase) = tables.foreach(table => table.onCreate(db)) override def onUpgrade(db: SQLiteDatabase, oldVersion: Int, newVersion: Int) = tables.foreach(table => table.onUpgrade(db, oldVersion, newVersion)) } object DatabaseInfo { val databaseName = "freelancer.db" val databaseVersion = 1 }
nadams/freelancer-database
src/net/node3/freelancerdatabase/db/DatabaseHelper.scala
Scala
mit
1,358
package org.jetbrains.plugins.scala.lang.psi.impl.expr import com.intellij.lang.ASTNode import org.jetbrains.plugins.scala.ScalaBundle import org.jetbrains.plugins.scala.lang.psi.api.expr._ import org.jetbrains.plugins.scala.lang.psi.types.result._ /** * @author Alexander Podkhalyuzin * Date: 06.03.2008 */ class ScTypedExpressionImpl(node: ASTNode) extends ScExpressionImplBase(node) with ScTypedExpression { protected override def innerType: TypeResult = { typeElement match { case Some(te) => te.`type`() case None if !expr.isInstanceOf[ScUnderscoreSection] => expr.`type`() case _ => Failure(ScalaBundle.message("typed.statement.is.not.complete.for.underscore.section")) } } override def toString: String = "TypedExpression" }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScTypedExpressionImpl.scala
Scala
apache-2.0
771