code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/**
* Copyright 2013 Robert Welin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mooo.nilewapps.bokbytarappen.server.validation
import spray.routing.Directives.validate
import com.mooo.nilewapps.bokbytarappen.server.ServiceErrors._
trait Validators {
/**
* Asserts that an email address is valid and available.
*/
def validateEmail(email: String) =
validate(EmailValidator.isValid(email), InvalidEmail) &
validate(EmailValidator.isAvailable(email), UnavailableEmail)
/**
* Asserts that a password has sufficient guessing entropy.
*/
def validatePassword(password: String) =
validate(PasswordValidator.threshold(password), BadPassword)
}
object Validators extends Validators
|
nilewapp/BokBytarAppenServer
|
src/main/scala/com/mooo/nilewapps/bokbytarappen/server/validation/Validators.scala
|
Scala
|
apache-2.0
| 1,238
|
package com.treode.cps
/** The scalatest package contains tools to support testing with Scalatest 2.0. The scalatest
* Scala package lives in the scalatest Ivy package. To make the utilities available to your
* tests, add the CPS dependency in SBT as follows:
*
* '''
* libraryDependencies += "com.treode" %% "cps" % "0.2.0" % "compile;test->scalatest"
* '''
*
* The scalatest Ivy package implicitly pulls in the stub Ivy package, so the above line also
* makes the CPS stubs available to your tests.
*/
package object scalatest {}
|
Treode/cps
|
src/scalatest/scala/com/treode/cps/scalatest/package.scala
|
Scala
|
apache-2.0
| 555
|
package frameless
import frameless.functions.CatalystExplodableCollection
import org.scalacheck.{Arbitrary, Prop}
import org.scalacheck.Prop.forAll
import org.scalacheck.Prop._
import scala.reflect.ClassTag
class ExplodeTests extends TypedDatasetSuite {
test("simple explode test") {
val ds = TypedDataset.create(Seq((1,Array(1,2))))
ds.explode('_2): TypedDataset[(Int,Int)]
}
test("explode on vectors/list/seq") {
def prop[F[X] <: Traversable[X] : CatalystExplodableCollection, A: TypedEncoder](xs: List[X1[F[A]]])(implicit arb: Arbitrary[F[A]], enc: TypedEncoder[F[A]]): Prop = {
val tds = TypedDataset.create(xs)
val framelessResults = tds.explode('a).collect().run().toVector
val scalaResults = xs.flatMap(_.a).map(Tuple1(_)).toVector
framelessResults ?= scalaResults
}
check(forAll(prop[Vector, Long] _))
check(forAll(prop[Seq, Int] _))
check(forAll(prop[Vector, Char] _))
check(forAll(prop[Vector, String] _))
check(forAll(prop[List, Long] _))
check(forAll(prop[List, Int] _))
check(forAll(prop[List, Char] _))
check(forAll(prop[List, String] _))
}
test("explode on arrays") {
def prop[A: TypedEncoder: ClassTag](xs: List[X1[Array[A]]]): Prop = {
val tds = TypedDataset.create(xs)
val framelessResults = tds.explode('a).collect().run().toVector
val scalaResults = xs.flatMap(_.a).map(Tuple1(_)).toVector
framelessResults ?= scalaResults
}
check(forAll(prop[Long] _))
check(forAll(prop[Int] _))
check(forAll(prop[String] _))
}
}
|
adelbertc/frameless
|
dataset/src/test/scala/frameless/ExplodeTests.scala
|
Scala
|
apache-2.0
| 1,574
|
package ml.combust.mleap.runtime.transformer.feature
import ml.combust.mleap.core.feature.MathBinaryModel
import ml.combust.mleap.core.types.NodeShape
import ml.combust.mleap.runtime.function.UserDefinedFunction
import ml.combust.mleap.runtime.frame.{SimpleTransformer, Transformer}
/**
* Created by hollinwilkins on 12/27/16.
*/
case class MathBinary(override val uid: String = Transformer.uniqueName("math_binary"),
override val shape: NodeShape,
override val model: MathBinaryModel) extends SimpleTransformer {
val execAB: UserDefinedFunction = (a: Double, b: Double) => model(Some(a), Some(b))
val execA: UserDefinedFunction = (a: Double) => model(Some(a), None)
val execB: UserDefinedFunction = (b: Double) => model(None, Some(b))
val execNone: UserDefinedFunction = () => model(None, None)
override val exec: UserDefinedFunction = {
(shape.getInput("input_a"), shape.getInput("input_b")) match {
case (Some(_), Some(_)) => execAB
case (Some(_), None) => execA
case (None, Some(_)) => execB
case (None, None) => execNone
}
}
}
|
combust/mleap
|
mleap-runtime/src/main/scala/ml/combust/mleap/runtime/transformer/feature/MathBinary.scala
|
Scala
|
apache-2.0
| 1,127
|
package fastgc
import java.net.Socket
import java.io.{ObjectInputStream, DataOutputStream}
import java.math.BigInteger
/**
* Refer to README for details.
* Author: Wei Xie
* Version:
*/
object TestServer {
def main(args: Array[String]) = {
// val aliceClient = new CircuitQuery()
// aliceClient.run(3491, Array("1"))
println("To connect")
val sock = new Socket("localhost", 3491)
println("Connected")
val outStream = new DataOutputStream(sock.getOutputStream)
val inStream = new ObjectInputStream(sock.getInputStream)
for (i <- 0 to 5) {
println("to write data..")
outStream.writeInt(1 + i)
println("finish writing. now get results..")
println(inStream.readObject().asInstanceOf[BigInteger])
println(inStream.readObject().asInstanceOf[BigInteger])
}
}
}
|
XieConnect/SecureMA
|
src/test/scala/fastgc/TestServer.scala
|
Scala
|
mit
| 853
|
package org.jetbrains.plugins.scala.lang.completion
package generated
class CompletionSmartInfixTest extends CompletionTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "smart/infix/"
def testSmartLeftExprInfixLeftAssoc = doTest
def testSmartRightExprInfix = doTest
def testSmartRightExprNotValTypeInfix = doTest
}
|
LPTK/intellij-scala
|
test/org/jetbrains/plugins/scala/lang/completion/generated/CompletionSmartInfixTest.scala
|
Scala
|
apache-2.0
| 411
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
class DefaultSource extends SimpleScanSource
class SimpleScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
SimpleScan(parameters("from").toInt, parameters("TO").toInt)(sqlContext.sparkSession)
}
}
case class SimpleScan(from: Int, to: Int)(@transient val sparkSession: SparkSession)
extends BaseRelation with TableScan {
override def sqlContext: SQLContext = sparkSession.sqlContext
override def schema: StructType =
StructType(StructField("i", IntegerType, nullable = false) :: Nil)
override def buildScan(): RDD[Row] = {
sparkSession.sparkContext.parallelize(from to to).map(Row(_))
}
}
class AllDataTypesScanSource extends SchemaRelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
// Check that weird parameters are passed correctly.
parameters("option_with_underscores")
parameters("option.with.dots")
AllDataTypesScan(
parameters("from").toInt,
parameters("TO").toInt, schema)(sqlContext.sparkSession)
}
}
case class AllDataTypesScan(
from: Int,
to: Int,
userSpecifiedSchema: StructType)(@transient val sparkSession: SparkSession)
extends BaseRelation
with TableScan {
override def sqlContext: SQLContext = sparkSession.sqlContext
override def schema: StructType = userSpecifiedSchema
override def needConversion: Boolean = true
override def buildScan(): RDD[Row] = {
sparkSession.sparkContext.parallelize(from to to).map { i =>
Row(
s"str_$i",
s"str_$i".getBytes(StandardCharsets.UTF_8),
i % 2 == 0,
i.toByte,
i.toShort,
i,
i.toLong,
i.toFloat,
i.toDouble,
new java.math.BigDecimal(i),
new java.math.BigDecimal(i),
Date.valueOf("1970-01-01"),
new Timestamp(20000 + i),
s"varchar_$i",
s"char_$i",
Seq(i, i + 1),
Seq(Map(s"str_$i" -> Row(i.toLong))),
Map(i -> i.toString),
Map(Map(s"str_$i" -> i.toFloat) -> Row(i.toLong)),
Row(i, i.toString),
Row(Seq(s"str_$i", s"str_${i + 1}"),
Row(Seq(Date.valueOf(s"1970-01-${i + 1}")))))
}
}
}
class TableScanSuite extends DataSourceTest with SharedSQLContext {
protected override lazy val sql = spark.sql _
private lazy val tableWithSchemaExpected = (1 to 10).map { i =>
Row(
s"str_$i",
s"str_$i",
i % 2 == 0,
i.toByte,
i.toShort,
i,
i.toLong,
i.toFloat,
i.toDouble,
new java.math.BigDecimal(i),
new java.math.BigDecimal(i),
Date.valueOf("1970-01-01"),
new Timestamp(20000 + i),
s"varchar_$i",
s"char_$i",
Seq(i, i + 1),
Seq(Map(s"str_$i" -> Row(i.toLong))),
Map(i -> i.toString),
Map(Map(s"str_$i" -> i.toFloat) -> Row(i.toLong)),
Row(i, i.toString),
Row(Seq(s"str_$i", s"str_${i + 1}"), Row(Seq(Date.valueOf(s"1970-01-${i + 1}")))))
}.toSeq
override def beforeAll(): Unit = {
super.beforeAll()
sql(
"""
|CREATE TEMPORARY VIEW oneToTen
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10',
| option_with_underscores 'someval',
| option.with.dots 'someval'
|)
""".stripMargin)
sql(
"""
|CREATE TEMPORARY VIEW tableWithSchema (
|`string$%Field` stRIng,
|binaryField binary,
|`booleanField` boolean,
|ByteField tinyint,
|shortField smaLlint,
|int_Field iNt,
|`longField_:,<>=+/~^` Bigint,
|floatField flOat,
|doubleField doubLE,
|decimalField1 decimal,
|decimalField2 decimal(9,2),
|dateField dAte,
|timestampField tiMestamp,
|varcharField varchaR(12),
|charField ChaR(18),
|arrayFieldSimple Array<inT>,
|arrayFieldComplex Array<Map<String, Struct<key:bigInt>>>,
|mapFieldSimple MAP<iNt, StRing>,
|mapFieldComplex Map<Map<stRING, fLOAT>, Struct<key:bigInt>>,
|structFieldSimple StRuct<key:INt, Value:STrINg>,
|structFieldComplex StRuct<key:Array<String>, Value:struct<`value_(2)`:Array<date>>>
|)
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| From '1',
| To '10',
| option_with_underscores 'someval',
| option.with.dots 'someval'
|)
""".stripMargin)
}
sqlTest(
"SELECT * FROM oneToTen",
(1 to 10).map(Row(_)).toSeq)
sqlTest(
"SELECT i FROM oneToTen",
(1 to 10).map(Row(_)).toSeq)
sqlTest(
"SELECT i FROM oneToTen WHERE i < 5",
(1 to 4).map(Row(_)).toSeq)
sqlTest(
"SELECT i * 2 FROM oneToTen",
(1 to 10).map(i => Row(i * 2)).toSeq)
sqlTest(
"SELECT a.i, b.i FROM oneToTen a JOIN oneToTen b ON a.i = b.i + 1",
(2 to 10).map(i => Row(i, i - 1)).toSeq)
test("Schema and all fields") {
val expectedSchema = StructType(
StructField("string$%Field", StringType, true) ::
StructField("binaryField", BinaryType, true) ::
StructField("booleanField", BooleanType, true) ::
StructField("ByteField", ByteType, true) ::
StructField("shortField", ShortType, true) ::
StructField("int_Field", IntegerType, true) ::
StructField("longField_:,<>=+/~^", LongType, true) ::
StructField("floatField", FloatType, true) ::
StructField("doubleField", DoubleType, true) ::
StructField("decimalField1", DecimalType.USER_DEFAULT, true) ::
StructField("decimalField2", DecimalType(9, 2), true) ::
StructField("dateField", DateType, true) ::
StructField("timestampField", TimestampType, true) ::
StructField("varcharField", StringType, true) ::
StructField("charField", StringType, true) ::
StructField("arrayFieldSimple", ArrayType(IntegerType), true) ::
StructField("arrayFieldComplex",
ArrayType(
MapType(StringType, StructType(StructField("key", LongType, true) :: Nil))), true) ::
StructField("mapFieldSimple", MapType(IntegerType, StringType), true) ::
StructField("mapFieldComplex",
MapType(
MapType(StringType, FloatType),
StructType(StructField("key", LongType, true) :: Nil)), true) ::
StructField("structFieldSimple",
StructType(
StructField("key", IntegerType, true) ::
StructField("Value", StringType, true) :: Nil), true) ::
StructField("structFieldComplex",
StructType(
StructField("key", ArrayType(StringType), true) ::
StructField("Value",
StructType(
StructField("value_(2)", ArrayType(DateType), true) :: Nil), true) :: Nil), true) ::
Nil
)
assert(expectedSchema == spark.table("tableWithSchema").schema)
checkAnswer(
sql(
"""SELECT
| `string$%Field`,
| cast(binaryField as string),
| booleanField,
| byteField,
| shortField,
| int_Field,
| `longField_:,<>=+/~^`,
| floatField,
| doubleField,
| decimalField1,
| decimalField2,
| dateField,
| timestampField,
| varcharField,
| charField,
| arrayFieldSimple,
| arrayFieldComplex,
| mapFieldSimple,
| mapFieldComplex,
| structFieldSimple,
| structFieldComplex FROM tableWithSchema""".stripMargin),
tableWithSchemaExpected
)
}
sqlTest(
"SELECT count(*) FROM tableWithSchema",
Seq(Row(10)))
sqlTest(
"SELECT `string$%Field` FROM tableWithSchema",
(1 to 10).map(i => Row(s"str_$i")).toSeq)
sqlTest(
"SELECT int_Field FROM tableWithSchema WHERE int_Field < 5",
(1 to 4).map(Row(_)).toSeq)
sqlTest(
"SELECT `longField_:,<>=+/~^` * 2 FROM tableWithSchema",
(1 to 10).map(i => Row(i * 2.toLong)).toSeq)
sqlTest(
"SELECT structFieldSimple.key, arrayFieldSimple[1] FROM tableWithSchema a where int_Field=1",
Seq(Row(1, 2)))
sqlTest(
"SELECT structFieldComplex.Value.`value_(2)` FROM tableWithSchema",
(1 to 10).map(i => Row(Seq(Date.valueOf(s"1970-01-${i + 1}")))).toSeq)
test("Caching") {
// Cached Query Execution
spark.catalog.cacheTable("oneToTen")
assertCached(sql("SELECT * FROM oneToTen"))
checkAnswer(
sql("SELECT * FROM oneToTen"),
(1 to 10).map(Row(_)).toSeq)
assertCached(sql("SELECT i FROM oneToTen"))
checkAnswer(
sql("SELECT i FROM oneToTen"),
(1 to 10).map(Row(_)).toSeq)
assertCached(sql("SELECT i FROM oneToTen WHERE i < 5"))
checkAnswer(
sql("SELECT i FROM oneToTen WHERE i < 5"),
(1 to 4).map(Row(_)).toSeq)
assertCached(sql("SELECT i * 2 FROM oneToTen"))
checkAnswer(
sql("SELECT i * 2 FROM oneToTen"),
(1 to 10).map(i => Row(i * 2)).toSeq)
assertCached(sql(
"SELECT a.i, b.i FROM oneToTen a JOIN oneToTen b ON a.i = b.i + 1"), 2)
checkAnswer(sql(
"SELECT a.i, b.i FROM oneToTen a JOIN oneToTen b ON a.i = b.i + 1"),
(2 to 10).map(i => Row(i, i - 1)).toSeq)
// Verify uncaching
spark.catalog.uncacheTable("oneToTen")
assertCached(sql("SELECT * FROM oneToTen"), 0)
}
test("defaultSource") {
sql(
"""
|CREATE TEMPORARY VIEW oneToTenDef
|USING org.apache.spark.sql.sources
|OPTIONS (
| from '1',
| to '10'
|)
""".stripMargin)
checkAnswer(
sql("SELECT * FROM oneToTenDef"),
(1 to 10).map(Row(_)).toSeq)
}
test("exceptions") {
// Make sure we do throw correct exception when users use a relation provider that
// only implements the RelationProvider or the SchemaRelationProvider.
val schemaNotAllowed = intercept[Exception] {
sql(
"""
|CREATE TEMPORARY VIEW relationProvierWithSchema (i int)
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
}
assert(schemaNotAllowed.getMessage.contains("does not allow user-specified schemas"))
val schemaNeeded = intercept[Exception] {
sql(
"""
|CREATE TEMPORARY VIEW schemaRelationProvierWithoutSchema
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
}
assert(schemaNeeded.getMessage.contains("A schema needs to be specified when using"))
}
test("SPARK-5196 schema field with comment") {
sql(
"""
|CREATE TEMPORARY VIEW student(name string comment "SN", age int comment "SA", grade int)
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| from '1',
| to '10',
| option_with_underscores 'someval',
| option.with.dots 'someval'
|)
""".stripMargin)
val planned = sql("SELECT * FROM student").queryExecution.executedPlan
val comments = planned.schema.fields.map { field =>
if (field.metadata.contains("comment")) field.metadata.getString("comment")
else "NO_COMMENT"
}.mkString(",")
assert(comments === "SN,SA,NO_COMMENT")
}
}
|
gioenn/xSpark
|
sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
|
Scala
|
apache-2.0
| 12,588
|
package dispatch.as
import dispatch._
import org.asynchttpclient
import org.asynchttpclient.handler.resumable._
import java.io._
import java.nio.charset.Charset
object Response {
def apply[T](f: asynchttpclient.Response => T) = f
}
object String extends (asynchttpclient.Response => String) {
/** @return response body as a string decoded as either the charset provided by
* Content-Type header of the response or ISO-8859-1 */
def apply(r: asynchttpclient.Response) = r.getResponseBody
/** @return a function that will return response body decoded in the provided charset */
case class charset(set: Charset) extends (asynchttpclient.Response => String) {
def apply(r: asynchttpclient.Response) = r.getResponseBody(set)
}
/** @return a function that will return response body as a utf8 decoded string */
object utf8 extends charset(Charset.forName("utf8"))
}
object Bytes extends (asynchttpclient.Response => Array[Byte]) {
def apply(r: asynchttpclient.Response) = r.getResponseBodyAsBytes
}
object File extends {
def apply(file: java.io.File) = {
val fileHandler = new RandomAccessFile(file, "rw")
val resumableHandler = new ResumableAsyncHandler
with OkHandler[asynchttpclient.Response]
with CloseResourcesOnThrowableHandler[asynchttpclient.Response] {
override lazy val closeable = Seq(fileHandler)
}
resumableHandler
.setResumableListener(new ResumableRandomAccessFileListener(fileHandler))
}
}
|
dispatch/reboot
|
core/src/main/scala/as/core.scala
|
Scala
|
lgpl-3.0
| 1,483
|
package korolev.http
import korolev.data.BytesLike
import korolev.effect.AsyncResourcePool.Borrow
import korolev.effect.io.{DataSocket, RawDataSocket, SecureDataSocket}
import korolev.effect.syntax._
import korolev.effect.{AsyncResourcePool, Decoder, Effect, Reporter, Scheduler, Stream}
import korolev.http.protocol.WebSocketProtocol.Frame
import korolev.http.protocol.{Http11, WebSocketProtocol}
import korolev.web.{Headers, PathAndQuery, Request, Response}
import java.net.{InetSocketAddress, URI}
import java.nio.ByteBuffer
import java.nio.channels.AsynchronousChannelGroup
import java.util.concurrent.{Executor, Executors}
import javax.net.ssl.SSLContext
import scala.collection.concurrent.TrieMap
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.util.Random
class HttpClient[F[_] : Effect, B: BytesLike] private (name: String,
maxIdleTime: FiniteDuration,
maxConnectionsPerAddress: Int,
blockingExecutor: Executor,
group: AsynchronousChannelGroup,
bufferSize: Int,
sslContext: SSLContext,
cleanupTicks: Stream[F, Unit])
(implicit executor: ExecutionContext,
reporter: Reporter) {
def apply(method: Request.Method,
uri: URI,
headers: Seq[(String, String)],
contentLength: Option[Long],
body: Stream[F, B]): F[Response[Stream[F, B]]] = {
val updatedHeaders =
if (uri.getUserInfo != null) Headers.basicAuthorization(uri.getUserInfo) +: headers
else headers
val request = Request(method, pqFromUri(uri), updatedHeaders, contentLength, body)
uri.getScheme match {
case "http" if uri.getPort == -1 => http(new InetSocketAddress(uri.getHost, 80), request)
case "https" if uri.getPort == -1 => https(new InetSocketAddress(uri.getHost, 443), request)
case "http" => http(new InetSocketAddress(uri.getHost, uri.getPort), request)
case "https" => https(new InetSocketAddress(uri.getHost, uri.getPort), request)
case "ws" | "wss" => Effect[F].fail(new IllegalArgumentException(s"Use HttpClient.webSocket() of HttpClient()"))
case scheme => Effect[F].fail(new IllegalArgumentException(s"$scheme is not supported"))
}
}
def https(address: InetSocketAddress, request: Request[Stream[F, B]]): F[Response[Stream[F, B]]] =
for {
borrow <- takeSecureConnection(address)
response <- http(borrow, address.getHostName, request)
} yield response
def http(address: InetSocketAddress, request: Request[Stream[F, B]]): F[Response[Stream[F, B]]] =
for {
borrow <- takeRawConnection(address)
response <- http(borrow, address.getHostName, request)
} yield response
private def http(borrow: Borrow[F, DataSocket[F, B]], host: String, request: Request[Stream[F, B]]): F[Response[Stream[F, B]]] =
for {
requestStream <- http11.renderRequest(request.withHeader(Headers.Host, host))
socket = borrow.value
writeBytesFiber <- requestStream.foreach(socket.write).start // Write response asynchronously
maybeResponse <- http11.decodeResponse(Decoder(socket.stream)).pull()
response <-
maybeResponse match {
case Some(response) =>
val (consumed, consumableBody) = response.body.handleConsumed
consumed
.after(writeBytesFiber.join())
// Give back connection when body was consumed
.after(borrow.give())
.start
.as(response.copy(body = consumableBody))
case None =>
Effect[F].fail[Response[Stream[F, B]]](
new IllegalStateException("Peer has closed connection before sending response."))
}
} yield {
response
}
def secureWebSocket(address: InetSocketAddress,
path: PathAndQuery,
outgoingFrames: Stream[F, WebSocketProtocol.Frame[B]],
cookie: Map[String, String],
headers: Map[String, String]): F[Response[Stream[F, Frame.Merged[B]]]] =
for {
borrow <- takeRawConnection(address)
frames <- webSocket(borrow, address.getHostName, path, outgoingFrames, cookie, headers)
} yield frames
def webSocket(uri: URI,
outgoingFrames: Stream[F, WebSocketProtocol.Frame[B]],
cookie: Map[String, String],
headers: Map[String, String]): F[Response[Stream[F, Frame.Merged[B]]]] = {
val pq = pqFromUri(uri)
val updatedHeaders =
if (uri.getUserInfo != null) headers + Headers.basicAuthorization(uri.getUserInfo)
else headers
uri.getScheme match {
case "ws" if uri.getPort == -1 => webSocket(new InetSocketAddress(uri.getHost, 80), pq, outgoingFrames, cookie, updatedHeaders)
case "wss" if uri.getPort == -1 => secureWebSocket(new InetSocketAddress(uri.getHost, 443), pq, outgoingFrames, cookie, updatedHeaders)
case "ws" => webSocket(new InetSocketAddress(uri.getHost, uri.getPort), pq, outgoingFrames, cookie, updatedHeaders)
case "wss" => secureWebSocket(new InetSocketAddress(uri.getHost, uri.getPort), pq, outgoingFrames, cookie, updatedHeaders)
case "http" | "https" => Effect[F].fail(new IllegalArgumentException(s"Use HttpClient.http() of HttpClient.webSocket()"))
case scheme => Effect[F].fail(new IllegalArgumentException(s"$scheme is not supported"))
}
}
def webSocket(address: InetSocketAddress,
path: PathAndQuery,
outgoingFrames: Stream[F, WebSocketProtocol.Frame[B]],
cookie: Map[String, String],
headers: Map[String, String]): F[Response[Stream[F, Frame.Merged[B]]]] =
for {
borrow <- takeRawConnection(address)
frames <- webSocket(borrow, address.getHostName, path, outgoingFrames, cookie, headers)
} yield frames
private def webSocket(borrow: Borrow[F, DataSocket[F, B]],
host: String,
path: PathAndQuery,
outgoingFrames: Stream[F, WebSocketProtocol.Frame[B]],
cookie: Map[String, String],
headers: Map[String, String]): F[Response[Stream[F, WebSocketProtocol.Frame.Merged[B]]]] =
for {
intention <- WebSocketProtocol.Intention.random
encodedOutgoingFrames = outgoingFrames.map(frame => webSocketProtocol.encodeFrame(frame, Some(123)))
requestRaw = Request(Request.Method.Get, path, headers.toSeq, None, encodedOutgoingFrames)
requestWithCookie = cookie.foldLeft(requestRaw) { case (acc, (k, v)) => acc.withCookie(k, v) }
requestWithIntention = webSocketProtocol.addIntention(requestWithCookie, intention)
rawResponse <- http(borrow, host, requestWithIntention)
frameDecoder = Decoder(rawResponse.body)
frames = webSocketProtocol.mergeFrames(webSocketProtocol.decodeFrames(frameDecoder))
} yield {
rawResponse.copy(body = frames)
}
private def pqFromUri(uri: URI) = {
val path = if (uri.getPath == null) "" else uri.getPath
val query = if (uri.getQuery == null) "" else uri.getQuery
PathAndQuery.fromString(s"$path$query")
}
private def takeRawConnection(address: InetSocketAddress) = {
def factory = RawDataSocket.connect(
address,
readBuffer = ByteBuffer.allocate(bufferSize),
writeBuffer = ByteBuffer.allocate(bufferSize),
group)
val pool = rawConnectionsPools.getOrElseUpdate(address,
new AsyncResourcePool(
s"$name-raw-socket-pool", factory,
() => Effect[F].delay(System.nanoTime()),
maxConnectionsPerAddress, maxIdleTime
)
)
pool.borrow()
}
private def takeSecureConnection(address: InetSocketAddress) = {
def factory =
for {
rawSocket <- RawDataSocket.connect(
address,
readBuffer = ByteBuffer.allocate(bufferSize),
writeBuffer = ByteBuffer.allocate(bufferSize),
group)
_ = reporter.debug("%s - Connection established", name)
engine = sslContext.createSSLEngine(address.getHostName, address.getPort)
socket <- SecureDataSocket.forClientMode(rawSocket, engine, blockingExecutor)
_ = reporter.debug("%s - TLS handshake finished", name)
} yield socket
val pool = secureConnectionsPools.getOrElseUpdate(address,
new AsyncResourcePool(
s"$name-tls-socket-pool", factory,
() => Effect[F].delay(System.nanoTime()),
maxConnectionsPerAddress, maxIdleTime
)
)
pool.borrow()
}
cleanupTicks
.foreach { _ =>
(rawConnectionsPools.values ++ secureConnectionsPools.values)
.toList
.map(_.cleanup())
.sequence
.map { results =>
val sum = results.sum
if (sum > 0) {
reporter.debug("HttpClient(%s) closes %d idle connection after timeout", name, sum)
}
}
}
.runAsyncForget
private val http11 = new Http11[B]()
private val webSocketProtocol = new WebSocketProtocol[B]()
private val rawConnectionsPools = TrieMap.empty[InetSocketAddress, AsyncResourcePool[F, RawDataSocket[F, B]]]
private val secureConnectionsPools = TrieMap.empty[InetSocketAddress, AsyncResourcePool[F, SecureDataSocket[F, B]]]
}
object HttpClient {
private lazy val defaultBlockingExecutor = Executors.newCachedThreadPool()
def create[F[_] : Effect : Scheduler, B: BytesLike](name: String = null,
maxIdleTime: FiniteDuration = 10.seconds,
maxConnectionsPerAddress: Int = 8,
poolCleanupInterval: FiniteDuration = 11.seconds,
blockingExecutor: Executor = null,
group: AsynchronousChannelGroup = null,
incomingBufferSize: Int = 8096,
sslContext: SSLContext = SSLContext.getDefault)
(implicit executor: ExecutionContext, reporter: Reporter): F[HttpClient[F, B]] =
for {
ticks <- Scheduler[F].schedule(poolCleanupInterval)
} yield {
val safeName = Option(name).getOrElse(s"${Random.alphanumeric.take(5).mkString}-http-client")
val updatedExecutor =
if (blockingExecutor != null) blockingExecutor
else defaultBlockingExecutor
new HttpClient(safeName, maxIdleTime, maxConnectionsPerAddress,
updatedExecutor, group, incomingBufferSize, sslContext, ticks)
}
}
|
fomkin/korolev
|
modules/http/src/main/scala/korolev/http/HttpClient.scala
|
Scala
|
apache-2.0
| 11,140
|
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
/**
* Trait implemented by RunnerJFrame, which can be passed to the RunnerGUI State objects,
* allowing them to call back into RunnerJFrame.
*
* @author Bill Venners
*/
private[scalatest] trait RunnerGUI {
def prepUIForReady(): Unit
def prepUIForRunning(): Unit
def prepUIForRerunning(): Unit
def prepUIWhileRunning(): Unit
def prepUIWhileRerunning(): Unit
def prepUIForStopping(): Unit
def prepUIForReStopping(): Unit
def showErrorDialog(title: String, msg: String): Unit
def getSelectedRerunner(): Option[Rerunner]
def runFromGUI(): Unit
def rerunFromGUI(rerunnable: Rerunner): Unit
def requestStop(): Unit
}
|
hubertp/scalatest
|
src/main/scala/org/scalatest/tools/RunnerGUI.scala
|
Scala
|
apache-2.0
| 1,295
|
/*
* Copyright 2009-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package json
/** Functions to convert between JSON and XML.
*/
object Xml {
import scala.xml._
/** Convert given XML to JSON.
* <p>
* Following rules are used in conversion.
* <ul>
* <li>XML leaf element is converted to JSON string</li>
* <li>XML parent element is converted to JSON object and its children to JSON fields</li>
* <li>XML elements with same name at same level are converted to JSON array</li>
* <li>XML attributes are converted to JSON fields</li>
* </ul>
* <p>
* Example:<pre>
* scala> val xml =
* <users>
* <user>
* <id>1</id>
* <name>Harry</name>
* </user>
* <user>
* <id>2</id>
* <name>David</name>
* </user>
* </users>
*
* scala> val json = toJson(xml)
* scala> pretty(render(json))
*
* {
* "users":{
* "user":[{
* "id":"1",
* "name":"Harry"
* },{
* "id":"2",
* "name":"David"
* }]
* }
* }
* </pre>
*
* Now, the above example has two problems. First, the id is converted to String while
* we might want it as an Int. This is easy to fix by mapping JString(s) to JInt(s.toInt).
* The second problem is more subtle. The conversion function decides to use JSON array
* because there's more than one user-element in XML. Therefore a structurally equivalent
* XML document which happens to have just one user-element will generate a JSON document
* without JSON array. This is rarely a desired outcome. These both problems can be fixed
* by following map function.
* <pre>
* json map {
* case JField("id", JString(s)) => JField("id", JInt(s.toInt))
* case JField("user", x: JObject) => JField("user", JArray(x :: Nil))
* case x => x
* }
* </pre>
*/
def toJson(xml: NodeSeq): JValue = {
def empty_?(node: Node) = node.child.isEmpty
/* Checks if given node is leaf element. For instance these are considered leafs:
* <foo>bar</foo>, <foo>{ doSomething() }</foo>, etc.
*/
def leaf_?(node: Node) = {
def descendant(n: Node): List[Node] = n match {
case g: Group => g.nodes.toList.flatMap(x => x :: descendant(x))
case _ => n.child.toList.flatMap { x => x :: descendant(x) }
}
!descendant(node).find(_.isInstanceOf[Elem]).isDefined
}
def array_?(nodeNames: Seq[String]) = nodeNames.size != 1 && nodeNames.toList.distinct.size == 1
def directChildren(n: Node): NodeSeq = n.child.filter(c => c.isInstanceOf[Elem])
def nameOf(n: Node) = (if (n.prefix ne null) n.prefix + ":" else "") + n.label
def buildAttrs(n: Node) = n.attributes.map((a: MetaData) => (a.key, XValue(a.value.text))).toList
sealed trait XElem
case class XValue(value: String) extends XElem
case class XLeaf(value: (String, XElem), attrs: List[(String, XValue)]) extends XElem
case class XNode(fields: List[(String, XElem)]) extends XElem
case class XArray(elems: List[XElem]) extends XElem
def toJValue(x: XElem): JValue = x match {
case XValue(s) => JString(s)
case XLeaf((name, value), attrs) => (value, attrs) match {
case (_, Nil) => toJValue(value)
case (XValue(""), xs) => JObject(mkFields(xs))
case (_, xs) => JObject(JField(name, toJValue(value)) :: mkFields(xs))
}
case XNode(xs) => JObject(mkFields(xs))
case XArray(elems) => JArray(elems.map(toJValue))
}
def mkFields(xs: List[(String, XElem)]) =
xs.flatMap { case (name, value) => (value, toJValue(value)) match {
// This special case is needed to flatten nested objects which resulted from
// XML attributes. Flattening keeps transformation more predicatable.
// <a><foo id="1">x</foo></a> -> {"a":{"foo":{"foo":"x","id":"1"}}} vs
// <a><foo id="1">x</foo></a> -> {"a":{"foo":"x","id":"1"}}
case (XLeaf(v, x :: xs), o: JObject) => o.obj
case (_, json) => JField(name, json) :: Nil }}
def buildNodes(xml: NodeSeq): List[XElem] = xml match {
case n: Node =>
if (empty_?(n)) XLeaf((nameOf(n), XValue("")), buildAttrs(n)) :: Nil
else if (leaf_?(n)) XLeaf((nameOf(n), XValue(n.text)), buildAttrs(n)) :: Nil
else {
val children = directChildren(n)
XNode(buildAttrs(n) ::: children.map(nameOf).toList.zip(buildNodes(children))) :: Nil
}
case nodes: NodeSeq =>
val allLabels = nodes.map(_.label)
if (array_?(allLabels)) {
val arr = XArray(nodes.toList.flatMap { n =>
if (leaf_?(n) && n.attributes.length == 0) XValue(n.text) :: Nil
else buildNodes(n)
})
XLeaf((allLabels(0), arr), Nil) :: Nil
} else nodes.toList.flatMap(buildNodes)
}
buildNodes(xml) match {
case List(x @ XLeaf(_, _ :: _)) => toJValue(x)
case List(x) => JObject(JField(nameOf(xml.head), toJValue(x)) :: Nil)
case x => JArray(x.map(toJValue))
}
}
/** Convert given JSON to XML.
* <p>
* Following rules are used in conversion.
* <ul>
* <li>JSON primitives are converted to XML leaf elements</li>
* <li>JSON objects are converted to XML elements</li>
* <li>JSON arrays are recursively converted to XML elements</li>
* </ul>
* <p>
* Use <code>map</code> function to preprocess JSON before conversion to adjust
* the end result. For instance a common conversion is to encode arrays as comma
* separated Strings since XML does not have array type.
* <p><pre>
* toXml(json map {
* case JField("nums",JArray(ns)) => JField("nums",JString(ns.map(_.values).mkString(",")))
* case x => x
* })
* </pre>
*/
def toXml(json: JValue): NodeSeq = {
def toXml(name: String, json: JValue): NodeSeq = json match {
case JObject(fields) => new XmlNode(name, fields flatMap { f => toXml(f.name, f.value) })
case JArray(xs) => xs flatMap { v => toXml(name, v) }
case JField(n, v) => new XmlNode(name, toXml(n, v))
case JInt(x) => new XmlElem(name, x.toString)
case JDouble(x) => new XmlElem(name, x.toString)
case JString(x) => new XmlElem(name, x)
case JBool(x) => new XmlElem(name, x.toString)
case JNull => new XmlElem(name, "null")
case JNothing => Text("")
}
json match {
case JField(n, v) => toXml(n, v)
case JObject(fields) => fields flatMap { f => toXml(f.name, f.value) }
case x => toXml("root", x)
}
}
private[json] class XmlNode(name: String, children: Seq[Node]) extends Elem(null, name, xml.Null, TopScope, children :_*)
private[json] class XmlElem(name: String, value: String) extends Elem(null, name, xml.Null, TopScope, Text(value))
}
|
wsaccaco/lift
|
framework/lift-base/lift-json/src/main/scala/net/liftweb/json/Xml.scala
|
Scala
|
apache-2.0
| 7,513
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.audit.http
import org.joda.time.DateTime
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Seconds, Span}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import play.api.Play
import play.api.test.FakeApplication
import uk.gov.hmrc.play.audit.EventKeys._
import uk.gov.hmrc.play.audit.EventTypes._
import uk.gov.hmrc.play.audit.http.connector.{AuditConnector, MockAuditConnector}
import uk.gov.hmrc.play.audit.model.MergedDataEvent
import uk.gov.hmrc.play.http.HeaderNames._
import uk.gov.hmrc.play.http.test.logging.LogCapturing
import uk.gov.hmrc.play.http.{DummyHttpResponse, HttpResponse}
import uk.gov.hmrc.play.test.Concurrent.await
import uk.gov.hmrc.play.test.Concurrent.liftFuture
import scala.concurrent.{ExecutionContext, Future}
class HttpAuditingSpec extends WordSpecLike with Matchers with Eventually with LogCapturing with BeforeAndAfterAll {
implicit def mockDatastreamConnector(ds: AuditConnector) : MockAuditConnector = ds.asInstanceOf[MockAuditConnector]
val requestDateTime = new DateTime()
val responseDateTime = requestDateTime.plusSeconds(5)
lazy val fakeApplication = FakeApplication()
override def beforeAll() {
super.beforeAll()
Play.start(fakeApplication)
}
override def afterAll() {
super.afterAll()
Play.stop()
}
class HttpWithAuditing extends HttpAuditing {
override lazy val appName: String = "httpWithAuditSpec"
override lazy val auditConnector: AuditConnector = new MockAuditConnector
override def auditRequestWithResponseF(url: String, verb: String, requestBody: Option[_], response: Future[HttpResponse])(implicit hc: HeaderCarrier): Unit =
super.auditRequestWithResponseF(url, verb, requestBody, response)(hc)
var now_call_count = 0
override def now = {
now_call_count=now_call_count+1
if(now_call_count == 1) requestDateTime
else responseDateTime
}
def buildRequest(url: String, verb: String, body: Option[_]) = {
now_call_count = 1
HttpRequest(url, verb, body, requestDateTime)
}
}
sealed class HttpAuditingWithAuditException extends HttpWithAuditing {
override lazy val auditConnector: AuditConnector = new MockAuditConnector {
override def sendMergedEvent(event: MergedDataEvent)(implicit hc: HeaderCarrier, ec : ExecutionContext) = {
throw new IllegalArgumentException("any exception")
}
}
}
"When asked to auditRequestWithResponseF the code" should {
val serviceUri = "/service/path"
"handle the happy path with a valid audit event passing through" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val getVerb = "GET"
val responseBody = "the response body"
val statusCode = 200
val response = Future.successful(new DummyHttpResponse(responseBody, statusCode))
implicit val hcWithHeaders = HeaderCarrier().withExtraHeaders("Surrogate" -> "true")
await(httpWithAudit.auditRequestWithResponseF(serviceUri, getVerb, requestBody, response))
eventually(timeout(Span(1, Seconds))) {
httpWithAudit.auditConnector.recordedMergedEvent shouldBe defined
val dataEvent = httpWithAudit.auditConnector.recordedMergedEvent.get
dataEvent.auditSource shouldBe httpWithAudit.appName
dataEvent.auditType shouldBe OutboundCall
dataEvent.request.tags shouldBe Map(xSessionId -> "-", xRequestId -> "-", TransactionName -> serviceUri, Path -> serviceUri, "clientIP" -> "-", "clientPort" -> "-")
dataEvent.request.detail shouldBe Map("ipAddress" -> "-", authorisation -> "-", token -> "-", Path -> serviceUri, Method -> getVerb, "surrogate" -> "true")
dataEvent.request.generatedAt shouldBe requestDateTime
dataEvent.response.tags shouldBe empty
dataEvent.response.detail shouldBe Map(ResponseMessage -> responseBody, StatusCode -> statusCode.toString)
dataEvent.response.generatedAt shouldBe responseDateTime
}
}
"handle the case of an exception being raised inside the future and still send an audit message" in {
implicit val hc = HeaderCarrier()
val httpWithAudit = new HttpWithAuditing
val requestBody = "the infamous request body"
val postVerb = "POST"
val errorMessage = "FOO bar"
val response = Future.failed(new Exception(errorMessage))
await(httpWithAudit.auditRequestWithResponseF(serviceUri, postVerb, Some(requestBody), response))
eventually(timeout(Span(1, Seconds))) {
httpWithAudit.auditConnector.recordedMergedEvent shouldBe defined
val dataEvent = httpWithAudit.auditConnector.recordedMergedEvent.get
dataEvent.auditSource shouldBe httpWithAudit.appName
dataEvent.auditType shouldBe OutboundCall
dataEvent.request.tags shouldBe Map(xSessionId -> "-", xRequestId -> "-", TransactionName -> serviceUri, Path -> serviceUri, "clientIP" -> "-", "clientPort" -> "-")
dataEvent.request.detail shouldBe Map("ipAddress" -> "-", authorisation -> "-", token -> "-", Path -> serviceUri, Method -> postVerb, RequestBody -> requestBody)
dataEvent.request.generatedAt shouldBe requestDateTime
dataEvent.response.tags shouldBe empty
dataEvent.response.detail should contain(FailedRequestMessage -> errorMessage)
dataEvent.response.generatedAt shouldBe responseDateTime
}
}
"not do anything if the datastream service is throwing an error as in this specific case datastream is logging the event" in {
implicit val hc = HeaderCarrier()
val httpWithAudit = new HttpAuditingWithAuditException
val requestBody = "the infamous request body"
val postVerb = "POST"
val errorMessage = "FOO bar"
val response = Future.failed(new Exception(errorMessage))
await(httpWithAudit.auditRequestWithResponseF(serviceUri, postVerb, Some(requestBody), response))
eventually(timeout(Span(1, Seconds))) {
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
}
}
"Calling audit" should {
val serviceUri = "/service/path"
implicit val hc = HeaderCarrier()
"send unique event of type OutboundCall" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val getVerb = "GET"
val request = httpWithAudit.buildRequest(serviceUri, getVerb, requestBody)
val response = new DummyHttpResponse("the response body", 200)
implicit val hc = HeaderCarrier(trueClientIp = Some("192.168.1.2"), trueClientPort = Some("12000")).withExtraHeaders("Surrogate" -> "true")
httpWithAudit.audit(request, response)
httpWithAudit.auditConnector.recordedMergedEvent shouldBe defined
val dataEvent = httpWithAudit.auditConnector.recordedMergedEvent.get
dataEvent.auditSource shouldBe httpWithAudit.appName
dataEvent.auditType shouldBe OutboundCall
dataEvent.request.tags shouldBe Map(xSessionId -> "-", xRequestId -> "-", TransactionName -> serviceUri, Path -> serviceUri, "clientIP" -> "192.168.1.2", "clientPort" -> "12000")
dataEvent.request.detail shouldBe Map("ipAddress" -> "-", authorisation -> "-", token -> "-", Path -> serviceUri, Method -> getVerb, "surrogate" -> "true")
dataEvent.request.generatedAt shouldBe requestDateTime
dataEvent.response.tags shouldBe empty
dataEvent.response.detail shouldBe Map(ResponseMessage -> response.body, StatusCode -> response.status.toString)
dataEvent.response.generatedAt shouldBe responseDateTime
}
"send unique event of type OutboundCall including the requestbody" in {
val httpWithAudit = new HttpWithAuditing
val postVerb = "POST"
val requestBody = Some("The request body gets added to the audit details")
val response = new DummyHttpResponse("the response body", 200)
val request = httpWithAudit.buildRequest(serviceUri, postVerb, requestBody)
httpWithAudit.audit(request, response)
httpWithAudit.auditConnector.recordedMergedEvent shouldBe defined
val dataEvent = httpWithAudit.auditConnector.recordedMergedEvent.get
dataEvent.auditSource shouldBe httpWithAudit.appName
dataEvent.auditType shouldBe OutboundCall
dataEvent.request.tags shouldBe Map(xSessionId -> "-", xRequestId -> "-", TransactionName -> serviceUri, Path -> serviceUri, "clientIP" -> "-", "clientPort" -> "-")
dataEvent.request.detail shouldBe Map("ipAddress" -> "-", authorisation -> "-", token -> "-", Path -> serviceUri, Method -> postVerb, RequestBody -> requestBody.get)
dataEvent.request.generatedAt shouldBe requestDateTime
dataEvent.response.tags shouldBe empty
dataEvent.response.detail shouldBe Map(ResponseMessage -> response.body, StatusCode -> response.status.toString)
dataEvent.response.generatedAt shouldBe responseDateTime
}
}
"Calling an internal microservice" should {
val AuditUri = "http://auth.service:80/auth/authority"
val getVerb = "GET"
implicit val hc = HeaderCarrier()
"not generate an audit event" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val response = new DummyHttpResponse("the response body", 200)
val request = httpWithAudit.buildRequest(AuditUri, getVerb, requestBody)
httpWithAudit.audit(request, response)
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
"not generate an audit event when an exception has been thrown" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val request = httpWithAudit.buildRequest(AuditUri, getVerb, requestBody)
httpWithAudit.auditRequestWithException(request, "An exception occured when calling sendevent datastream")
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
}
"Auditing the url /write/audit" should {
val AuditUri = "/write/audit"
val getVerb = "GET"
implicit val hc = HeaderCarrier()
"not generate an audit event" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val response = new DummyHttpResponse("the response body", 200)
val request = httpWithAudit.buildRequest(AuditUri, getVerb, requestBody)
httpWithAudit.audit(request, response)
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
"not generate an audit event when an exception has been thrown" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val request = httpWithAudit.buildRequest(AuditUri, getVerb, requestBody)
httpWithAudit.auditRequestWithException(request, "An exception occured when calling sendevent datastream")
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
}
"Auditing the url /write/audit/merged" should {
val AuditUri = "/write/audit/merged"
val getVerb = "GET"
implicit val hc = HeaderCarrier()
"not generate an audit event" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val response = new DummyHttpResponse("the response body", 200)
val request = httpWithAudit.buildRequest(AuditUri, getVerb, requestBody)
httpWithAudit.audit(request, response)
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
"not generate an audit event when an exception has been thrown" in {
val httpWithAudit = new HttpWithAuditing
val requestBody = None
val request = httpWithAudit.buildRequest(AuditUri, getVerb, requestBody)
httpWithAudit.auditRequestWithException(request, "An exception occured when calling sendevent datastream")
httpWithAudit.auditConnector.recordedMergedEvent shouldBe None
}
}
}
|
liquidarmour/http-verbs
|
src/test/scala/uk/gov/hmrc/play/audit/http/HttpAuditingSpec.scala
|
Scala
|
apache-2.0
| 12,445
|
package com.alopatindev.videoacid.ui
import android.support.v4.app.Fragment
import android.content.Context
import com.alopatindev.videoacid.R
class GalleryFragment extends Fragment with FragmentUtils {
import android.os.Bundle
import android.view.{LayoutInflater, View, ViewGroup}
import com.alopatindev.videoacid.Logs.logd
override def onCreateView(inflater: LayoutInflater, container: ViewGroup, savedInstanceState: Bundle): View = {
logd("GalleryFragment.onCreateView")
inflater.inflate(R.layout.gallery, container, false)
}
override def onPause(): Unit = {
logd("GalleryFragment.onPause")
super.onPause()
}
override def onResume(): Unit = {
logd("GalleryFragment.onResume")
super.onResume()
}
}
object GalleryFragment {
val titleStringId: Int = R.string.gallery
}
|
alopatindev/videoacid
|
src/main/scala/com/alopatindev/videoacid/ui/GalleryFragment.scala
|
Scala
|
bsd-2-clause
| 824
|
package models.daos
import com.mohiva.play.silhouette.api.LoginInfo
import models.User
import scala.concurrent.Future
/**
* Give access to the user object.
*/
trait UserDAO {
/**
* Finds a user by its login info.
*
* @param loginInfo The login info of the user to find.
* @return The found user or None if no user for the given login info could be found.
*/
def find(loginInfo: LoginInfo): Future[Option[User]]
/**
* Finds a user by its user string ID.
*
* @param userID The ID of the user to find.
* @return The found user or None if no user for the given ID could be found.
*/
def find(userID: String): Future[Option[User]]
/**
* Saves a user.
*
* @param user The user to save.
* @return The saved user.
*/
def save(user: User): Future[User]
}
|
workingDog/trackersys
|
app/models/daos/UserDAO.scala
|
Scala
|
apache-2.0
| 813
|
package cromwell
package object webservice {
case class QueryParameter(key: String, value: String)
type QueryParameters = Seq[QueryParameter]
object Patterns {
val WorkflowName = """
(?x) # Turn on comments and whitespace insensitivity.
( # Begin capture.
[a-zA-Z][a-zA-Z0-9_]* # WDL identifier naming pattern of an initial alpha character followed by zero
# or more alphanumeric or underscore characters.
) # End capture.
""".trim.r
val CallFullyQualifiedName = """
(?x) # Turn on comments and whitespace insensitivity.
( # Begin outer capturing group for FQN.
(?:[a-zA-Z][a-zA-Z0-9_]*) # Inner noncapturing group for top-level workflow name. This is the WDL
# identifier naming pattern of an initial alpha character followed by zero
# or more alphanumeric or underscore characters.
(?:\\.[a-zA-Z][a-zA-Z0-9_]*){1} # Inner noncapturing group for call name, a literal dot followed by a WDL
# identifier. Currently this is quantified to {1} since the call name is
# mandatory and nested workflows are not supported. This could be changed
# to + or a different quantifier if these assumptions change.
) # End outer capturing group for FQN.
(?: # Begin outer noncapturing group for shard.
\\. # Literal dot.
(\\d+) # Captured shard digits.
)? # End outer optional noncapturing group for shard.
""".trim.r // The trim is necessary as (?x) must be at the beginning of the regex.
}
}
|
dgtester/cromwell
|
src/main/scala/cromwell/webservice/package.scala
|
Scala
|
bsd-3-clause
| 2,169
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.enablers
import org.scalactic.Equality
import org.scalactic.ArrayWrapper
import scala.collection.GenTraversable
import scala.annotation.tailrec
import scala.collection.JavaConverters._
/**
* Supertrait for typeclasses that enable the <code>be</code> <code>sorted</code> matcher syntax.
*
* <p>
* A <code>Sortable[S]</code> provides access to the "sortable nature" of type <code>S</code> in such
* a way that <code>be</code> <code>sorted</code> matcher syntax can be used with type <code>S</code>. An <code>S</code>
* can be any type for which the concept of being sorted makes sense, such as sequences. ScalaTest provides
* implicit implementations for several types. You can enable the <code>be</code> <code>sorted</code> matcher syntax on your own
* type <code>U</code> by defining a <code>Sortable[U]</code> for the type and making it available implicitly.
*
* <p>
* ScalaTest provides an implicit <code>Sortable</code> instance for types out of the box
* in the <a href="Sortable$.html"><code>Sortable</code> companion object</a>:
* </p>
*
* <ul>
* <li><code>scala.collection.GenSeq</code></li>
* <li><code>Array</code></li>
* <li><code>java.util.List</code></li>
* </ul>
*
*/
trait Sortable[-S] {
/**
* Determines whether the passed sequence is sorted, <em>i.e.</em>, the elements of the passed sequence are in sorted order.
*
* @param sequence the sequence to check whether it is sorted
* @return <code>true</code> if passed <code>sequence</code> is sorted, <code>false</code> otherwise.
*/
def isSorted(sequence: S): Boolean
}
/**
* Companion object for <code>Sortable</code> that provides implicit implementations for the following types:
*
* <ul>
* <li><code>scala.collection.GenSeq</code></li>
* <li><code>Array</code></li>
* <li><code>java.util.List</code></li>
* </ul>
*/
object Sortable {
import scala.language.higherKinds
// Sliding doesn't exist on GenSeq, and this is inherently sequential, so make them say .seq if they have a parallel Seq
// Actually on second thought, I think just do a .seq on it.
/**
* Enable <code>Sortable</code> implementation for <code>scala.collection.GenSeq</code>
*
* @param ordering <code>scala.math.Ordering</code></a> of type <code>E</code>
* @tparam E type of elements in the <code>scala.collection.GenSeq</code>
* @tparam SEQ any subtype of <code>scala.collection.GenSeq</code>
* @return <code>Sortable[SEQ[E]]</code> that supports <code>scala.collection.GenSeq</code> in <code>be</code> <code>sortable</code> syntax
*/
implicit def sortableNatureOfSeq[E, SEQ[e] <: scala.collection.GenSeq[e]](implicit ordering: Ordering[E]): Sortable[SEQ[E]] =
new Sortable[SEQ[E]] {
def isSorted(o: SEQ[E]): Boolean =
if (o.size > 1)
o.seq.sliding(2).forall { duo => ordering.lteq(duo(0), duo(1)) }
else
true
}
/**
* Enable <code>Sortable</code> implementation for <code>Array</code>
*
* @param ordering <code>scala.math.Ordering</code></a> of type <code>E</code>
* @tparam E type of elements in the <code>Array</code>
* @return <code>Sortable[Array[E]]</code> that supports <code>Array</code> in <code>be</code> <code>sortable</code> syntax
*/
implicit def sortableNatureOfArray[E](implicit ordering: Ordering[E]): Sortable[Array[E]] =
new Sortable[Array[E]] {
def isSorted(o: Array[E]): Boolean =
if (o.length > 1)
o.sliding(2).forall { duo => ordering.lteq(duo(0), duo(1)) }
else
true
}
/**
* Enable <code>Sortable</code> implementation for <code>String</code>
*
* @param ordering <code>scala.math.Ordering</code></a> of type <code>Char</code>
* @return <code>Sortable[String]</code> that supports <code>String</code> in <code>be</code> <code>sortable</code> syntax
*/
implicit def sortableNatureOfString(implicit ordering: Ordering[Char]): Sortable[String] =
new Sortable[String] {
def isSorted(o: String): Boolean =
if (o.length > 1)
o.sliding(2).forall { duo => ordering.lteq(duo(0), duo(1)) }
else
true
}
/**
* Enable <code>Sortable</code> implementation for <code>java.util.List</code>
*
* @param ordering <code>scala.math.Ordering</code></a> of type <code>E</code>
* @tparam E type of elements in the <code>java.util.List</code>
* @tparam JLIST any subtype of <code>java.util.List</code>
* @return <code>Sortable[JLIST[E]]</code> that supports <code>java.util.List</code> in <code>be</code> <code>sortable</code> syntax
*/
implicit def sortableNatureOfJavaList[E, JLIST[e] <: java.util.List[e]](implicit ordering: Ordering[E]): Sortable[JLIST[E]] =
new Sortable[JLIST[E]] {
def isSorted(o: JLIST[E]): Boolean =
if (o.size > 1)
o.asScala.sliding(2).forall { duo => ordering.lteq(duo(0), duo(1)) }
else
true
}
}
|
cheeseng/scalatest
|
scalactic/src/main/scala/org/scalactic/enablers/Sortable.scala
|
Scala
|
apache-2.0
| 5,533
|
package org.broadinstitute.clio.client.metadata
import java.net.URI
import org.broadinstitute.clio.client.dispatch.MoveExecutor.{CopyOp, MoveOp}
import org.broadinstitute.clio.transfer.model.arrays.{ArraysExtensions, ArraysMetadata}
import org.scalatest.{FlatSpec, Matchers}
class ArrayDelivererSpec extends FlatSpec with Matchers {
behavior of "ArrayDeliverer"
private val vcfName = s"the-vcf${ArraysExtensions.VcfGzExtension}"
private val vcfIndexName = s"$vcfName${ArraysExtensions.VcfGzTbiExtension}"
private val gtcName = s"the-gtc${ArraysExtensions.GtcExtension}"
private val grnName = s"the-grn${ArraysExtensions.GrnIdatExtension}"
private val redName = s"the-red${ArraysExtensions.RedIdatExtension}"
private val vcfPath = URI.create(s"gs://bucket/$vcfName")
private val vcfIndexPath = URI.create(s"gs://bucket/$vcfIndexName")
private val gtcPath = URI.create(s"gs://bucket/$gtcName")
private val grnPath = URI.create(s"gs://bucket/$grnName")
private val redPath = URI.create(s"gs://bucket/$redName")
private val metadata = ArraysMetadata(
vcfPath = Some(vcfPath),
vcfIndexPath = Some(vcfIndexPath),
gtcPath = Some(gtcPath),
grnIdatPath = Some(grnPath),
redIdatPath = Some(redPath)
)
private val destination = URI.create("gs://the-destination/")
private val deliverer = new ArrayDeliverer
it should "generate ops to move the vcf, index, and gtc + copy the idats" in {
val (delivered, ops) = deliverer.moveInto(metadata, destination)
val idatDestination = destination.resolve(deliverer.idatsDir)
delivered.vcfPath should be(Some(destination.resolve(vcfName)))
delivered.vcfIndexPath should be(Some(destination.resolve(vcfIndexName)))
delivered.gtcPath should be(Some(destination.resolve(gtcName)))
delivered.grnIdatPath should be(Some(idatDestination.resolve(grnName)))
delivered.redIdatPath should be(Some(idatDestination.resolve(redName)))
ops should contain theSameElementsAs Seq(
MoveOp(vcfPath, destination.resolve(vcfName)),
MoveOp(vcfIndexPath, destination.resolve(vcfIndexName)),
MoveOp(gtcPath, destination.resolve(gtcName)),
CopyOp(grnPath, idatDestination.resolve(grnName)),
CopyOp(redPath, idatDestination.resolve(redName))
)
}
it should "move the idats when delivering from an existing workspace" in {
val deliveredMetadata = metadata.copy(workspaceName = Some("firecloud-workspace"))
val (delivered, ops) = deliverer.moveInto(deliveredMetadata, destination)
val idatDestination = destination.resolve(deliverer.idatsDir)
delivered.vcfPath should be(Some(destination.resolve(vcfName)))
delivered.vcfIndexPath should be(Some(destination.resolve(vcfIndexName)))
delivered.gtcPath should be(Some(destination.resolve(gtcName)))
delivered.grnIdatPath should be(Some(idatDestination.resolve(grnName)))
delivered.redIdatPath should be(Some(idatDestination.resolve(redName)))
ops should contain theSameElementsAs Seq(
MoveOp(vcfPath, destination.resolve(vcfName)),
MoveOp(vcfIndexPath, destination.resolve(vcfIndexName)),
MoveOp(gtcPath, destination.resolve(gtcName)),
MoveOp(grnPath, idatDestination.resolve(grnName)),
MoveOp(redPath, idatDestination.resolve(redName))
)
}
it should "copy the idats if the workspace name is an empty string" in {
val deliveredMetadata = metadata.copy(workspaceName = Some(""))
val (delivered, ops) = deliverer.moveInto(deliveredMetadata, destination)
val idatDestination = destination.resolve(deliverer.idatsDir)
delivered.vcfPath should be(Some(destination.resolve(vcfName)))
delivered.vcfIndexPath should be(Some(destination.resolve(vcfIndexName)))
delivered.gtcPath should be(Some(destination.resolve(gtcName)))
delivered.grnIdatPath should be(Some(idatDestination.resolve(grnName)))
delivered.redIdatPath should be(Some(idatDestination.resolve(redName)))
ops should contain theSameElementsAs Seq(
MoveOp(vcfPath, destination.resolve(vcfName)),
MoveOp(vcfIndexPath, destination.resolve(vcfIndexName)),
MoveOp(gtcPath, destination.resolve(gtcName)),
CopyOp(grnPath, idatDestination.resolve(grnName)),
CopyOp(redPath, idatDestination.resolve(redName))
)
}
}
|
broadinstitute/clio
|
clio-client/src/test/scala/org/broadinstitute/clio/client/metadata/ArrayDelivererSpec.scala
|
Scala
|
bsd-3-clause
| 4,290
|
import com.oysterbooks.scavro.plugin.AvroCodegenPlugin
import sbt._
import sbt.Keys._
import AvroCodegenPlugin.autoImport._
object DemoBuild extends Build {
lazy val demoSettings = Defaults.defaultSettings ++ baseAvroCodegenSettings ++ Seq(
// General settings
organization := "oyster",
scalaVersion := "2.10.4",
version := "0.0.2",
libraryDependencies ++= Seq(
"com.oysterbooks" % "scavro_2.10_0.13" % "0.9.0" from
"https://oss.sonatype.org/content/repositories/releases/com/oysterbooks/scavro_2.10_0.13/0.9.0/scavro-0.9.0.jar",
"org.apache.avro" % "avro" % "1.7.7",
"org.apache.avro" % "avro-tools" % "1.7.7",
"org.scalatest" %% "scalatest" % "2.2.4" % "test"
),
resolvers ++= Seq(
// "Local Maven" at Path.userHome.asFile.toURI.toURL + ".ivy2/local",
Resolver.sonatypeRepo("releases"),
Resolver.sonatypeRepo("snapshots")
),
// scavro plugin settings
avroSchemaFiles := Seq(
(resourceDirectory in Compile).value / "item.avsc"
),
mainClass in (Compile, run) := Some("com.oysterbooks.scavrodemo.ReadWriteDemo")
)
lazy val root = Project(id = "demo", base = file("."))
.settings(demoSettings: _*)
.settings(excludeFilter in unmanagedResources := "*.avsc")
}
|
deathbymochi/scavro
|
demo/project/Build.scala
|
Scala
|
apache-2.0
| 1,280
|
package so.paws.db.slick
import so.paws.db.DbPlugin
import play.api.Application
class SlickDbPlugin(application: Application) extends DbPlugin[Any] {
override def db: Any = {}
// def getEntities(application: Application, ref: String): Set[Entity] = {
// val runtimeMirror = universe.runtimeMirror(application.classloader)
//
// val module = runtimeMirror.staticModule(ref)
//
// val obj = runtimeMirror.reflectModule(module)
//
// obj.instance match {
// case sormEntities: SormEntities => sormEntities.get
// case _ => throw new ClassCastException
// }
// }
}
|
dessens/paws
|
modules/db/src/main/scala/so/paws/db/slick/SlickDbPlugin.scala
|
Scala
|
mit
| 593
|
package org.biancama.algorithms.trampoline
import org.scalatest.{FlatSpec, Matchers}
class TrampolineTest extends FlatSpec with Matchers {
"Fibonacci of n " should "return correct calculation" in {
Trampoline.fib(6) shouldBe 8
Trampoline.fib(7) shouldBe 13
Trampoline.fib(40) shouldBe 102334155
}
"Fibonacci naive of n " should "throw stack overflow" in {
Trampoline.fibNaive(40) shouldBe 102334155
}
}
|
biancama/data-structures-scala
|
src/test/scala/org/biancama/algorithms/trampoline/TrampolineTest.scala
|
Scala
|
gpl-3.0
| 430
|
package davidkellis.seven.domain;
import java.time.{LocalTime, ZonedDateTime}
import davidkellis.seven.domain.CoreTypes.FillPriceFn
case class Trial(
startTime: ZonedDateTime,
endTime: ZonedDateTime,
dailyTradingTimes: Seq[LocalTime],
fillPriceFn: FillPriceFn
)
|
davidkellis/seven
|
src/main/scala/davidkellis/seven/domain/Trial.scala
|
Scala
|
mit
| 354
|
package com.twitter.finagle.thrift
import com.twitter.finagle._
import com.twitter.finagle.netty3.Conversions._
import com.twitter.finagle.netty3.{Ok, Error, Cancelled}
import com.twitter.finagle.tracing.{Trace, Annotation}
import com.twitter.finagle.util.ByteArrays
import com.twitter.io.Buf
import java.util.ArrayList
import java.util.logging.{Logger, Level}
import org.apache.thrift.protocol.{ TBinaryProtocol, TMessage, TMessageType, TProtocolFactory}
import org.apache.thrift.transport.TMemoryInputTransport
import org.jboss.netty.buffer.ChannelBuffers
import org.jboss.netty.channel.{
ChannelHandlerContext, ChannelPipelineFactory, Channels, MessageEvent,
SimpleChannelDownstreamHandler}
/**
* ThriftClientFramedCodec implements a framed thrift transport that
* supports upgrading in order to provide TraceContexts across
* requests.
*/
object ThriftClientFramedCodec {
/**
* Create a [[com.twitter.finagle.thrift.ThriftClientFramedCodecFactory]].
* Passing a ClientId will propagate that information to the server iff the server is a finagle
* server.
*/
def apply(clientId: Option[ClientId] = None) = new ThriftClientFramedCodecFactory(clientId)
def get() = apply()
}
class ThriftClientFramedCodecFactory(
clientId: Option[ClientId],
_useCallerSeqIds: Boolean,
_protocolFactory: TProtocolFactory)
extends CodecFactory[ThriftClientRequest, Array[Byte]]#Client
{
def this(clientId: Option[ClientId]) = this(clientId, false, Protocols.binaryFactory())
def this(clientId: ClientId) = this(Some(clientId))
// Fix this after the API/ABI freeze (use case class builder)
def useCallerSeqIds(x: Boolean): ThriftClientFramedCodecFactory =
new ThriftClientFramedCodecFactory(clientId, x, _protocolFactory)
/**
* Use the given protocolFactory in stead of the default `TBinaryProtocol.Factory`
*/
def protocolFactory(pf: TProtocolFactory) =
new ThriftClientFramedCodecFactory(clientId, _useCallerSeqIds, pf)
/**
* Create a [[com.twitter.finagle.thrift.ThriftClientFramedCodec]]
* with a default TBinaryProtocol.
*/
def apply(config: ClientCodecConfig) =
new ThriftClientFramedCodec(_protocolFactory, config, clientId, _useCallerSeqIds)
}
class ThriftClientFramedCodec(
protocolFactory: TProtocolFactory,
config: ClientCodecConfig,
clientId: Option[ClientId] = None,
useCallerSeqIds: Boolean = false
) extends Codec[ThriftClientRequest, Array[Byte]] {
private[this] val preparer = ThriftClientPreparer(
protocolFactory, config.serviceName,
clientId, useCallerSeqIds)
def pipelineFactory: ChannelPipelineFactory =
ThriftFramedTransportPipelineFactory
override def prepareConnFactory(
underlying: ServiceFactory[ThriftClientRequest, Array[Byte]]
) = preparer.prepare(underlying)
}
private case class ThriftClientPreparer(
protocolFactory: TProtocolFactory,
serviceName: String = "unknown",
clientId: Option[ClientId] = None,
useCallerSeqIds: Boolean = false) {
def prepare(
underlying: ServiceFactory[ThriftClientRequest, Array[Byte]]
) = underlying flatMap { service =>
// Attempt to upgrade the protocol the first time around by
// sending a magic method invocation.
val buffer = new OutputBuffer(protocolFactory)
buffer().writeMessageBegin(
new TMessage(ThriftTracing.CanTraceMethodName, TMessageType.CALL, 0))
val options = new thrift.ConnectionOptions
options.write(buffer())
buffer().writeMessageEnd()
service(new ThriftClientRequest(buffer.toArray, false)) map { bytes =>
val memoryTransport = new TMemoryInputTransport(bytes)
val iprot = protocolFactory.getProtocol(memoryTransport)
val reply = iprot.readMessageBegin()
val ttwitter = new TTwitterFilter(
serviceName,
reply.`type` != TMessageType.EXCEPTION,
clientId, protocolFactory)
val seqIdFilter =
if (protocolFactory.isInstanceOf[TBinaryProtocol.Factory] && !useCallerSeqIds)
new SeqIdFilter
else
Filter.identity[ThriftClientRequest, Array[Byte]]
val filtered = seqIdFilter andThen ttwitter andThen service
new ValidateThriftService(filtered, protocolFactory)
}
}
}
/**
* ThriftClientChannelBufferEncoder translates ThriftClientRequests to
* bytes on the wire. It satisfies the request immediately if it is a
* "oneway" request.
*/
private[thrift] class ThriftClientChannelBufferEncoder
extends SimpleChannelDownstreamHandler
{
override def writeRequested(ctx: ChannelHandlerContext, e: MessageEvent) =
e.getMessage match {
case request: ThriftClientRequest =>
Channels.write(ctx, e.getFuture, ChannelBuffers.wrappedBuffer(request.message))
if (request.oneway) {
// oneway RPCs are satisfied when the write is complete.
e.getFuture() {
case Ok(_) =>
Channels.fireMessageReceived(ctx, ChannelBuffers.EMPTY_BUFFER)
case Error(e) =>
Channels.fireExceptionCaught(ctx, e)
case Cancelled =>
Channels.fireExceptionCaught(ctx, new CancelledRequestException)
}
}
case _ =>
throw new IllegalArgumentException("No ThriftClientRequest on the wire")
}
}
/**
* TTwitterFilter implements the upnegotiated TTwitter transport, which
* has some additional features beyond TFramed:
*
* - Dapper-style RPC tracing
* - Passing client IDs
* - Request contexts
* - Name delegation
*
* @param isUpgraded Whether this connection is with a server that
* has been upgraded to TTwitter
*/
private[thrift] class TTwitterFilter(
serviceName: String,
isUpgraded: Boolean,
clientId: Option[ClientId],
protocolFactory: TProtocolFactory)
extends SimpleFilter[ThriftClientRequest, Array[Byte]]
{
private[this] val clientIdBuf = clientId map { id => Buf.Utf8(id.name) }
/**
* Produces an upgraded TTwitter ThriftClientRequest based on Trace,
* ClientId, and Dtab state.
*/
private[this] def mkTTwitterRequest(
baseRequest: ThriftClientRequest
): ThriftClientRequest = {
val header = new thrift.RequestHeader
clientId match {
case Some(clientId) =>
header.setClient_id(clientId.toThrift)
case None =>
}
header.setSpan_id(Trace.id.spanId.toLong)
Trace.id._parentId foreach { id => header.setParent_span_id(id.toLong) }
header.setTrace_id(Trace.id.traceId.toLong)
header.setFlags(Trace.id.flags.toLong)
Trace.id.sampled match {
case Some(s) => header.setSampled(s)
case None => header.unsetSampled()
}
val contexts = Context.emit().iterator
if (contexts.hasNext) {
val ctxs = new ArrayList[thrift.RequestContext]()
var i = 0
while (contexts.hasNext) {
val (k, buf) = contexts.next()
// Note: we need to skip the caller-provided client id here,
// since the existing value is derived from whatever code
// calls into here. This should never happen in practice;
// however if the ClientIdContext handler failed to load for
// some reason, a pass-through context would be used instead.
if (k != ClientIdContext.Key){
val c = new thrift.RequestContext(
Buf.toByteBuffer(k), Buf.toByteBuffer(buf))
ctxs.add(i, c)
i += 1
}
}
clientIdBuf match {
case Some(buf) =>
val ctx = new thrift.RequestContext(
Buf.toByteBuffer(ClientIdContext.Key),
Buf.toByteBuffer(buf))
ctxs.add(i, ctx)
case None => // skip
}
header.setContexts(ctxs)
}
val dtab = Dtab.local
if (dtab.nonEmpty) {
val delegations = new ArrayList[thrift.Delegation](dtab.size)
for (Dentry(src, dst) <- dtab)
delegations.add(new thrift.Delegation(src.show, dst.show))
header.setDelegations(delegations)
}
new ThriftClientRequest(
ByteArrays.concat(
OutputBuffer.messageToArray(header, protocolFactory),
baseRequest.message
),
baseRequest.oneway
)
}
def apply(
request: ThriftClientRequest,
service: Service[ThriftClientRequest, Array[Byte]]
) = {
// Create a new span identifier for this request.
val msg = new InputBuffer(request.message, protocolFactory)().readMessageBegin()
Trace.recordServiceName(serviceName)
Trace.recordRpc(msg.name)
val thriftRequest =
if (isUpgraded)
mkTTwitterRequest(request)
else
request
Trace.record(Annotation.ClientSend())
val reply = service(thriftRequest)
if (thriftRequest.oneway) {
// Oneway requests don't contain replies, so they can't be traced.
reply
} else {
reply map { response =>
Trace.record(Annotation.ClientRecv())
if (isUpgraded) {
// Peel off the ResponseHeader.
InputBuffer.peelMessage(response, new thrift.ResponseHeader, protocolFactory)
} else
response
}
}
}
}
|
JustinTulloss/finagle
|
finagle-thrift/src/main/scala/com/twitter/finagle/thrift/ThriftClientFramedCodec.scala
|
Scala
|
apache-2.0
| 9,049
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.batch.table.stringexpr
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.api.java.{DataSet => JDataSet}
import org.apache.flink.api.scala._
import org.apache.flink.table.api.Types
import org.apache.flink.table.api.scala._
import org.apache.flink.table.utils.{PojoTableFunc, TableFunc2, _}
import org.apache.flink.types.Row
import org.junit.Test
import org.mockito.Mockito.{mock, when}
class CorrelateStringExpressionTest extends TableTestBase {
@Test
def testCorrelateJoinsWithJoinLateral(): Unit = {
val util = batchTestUtil()
val typeInfo = new RowTypeInfo(Seq(Types.INT, Types.LONG, Types.STRING): _*)
val jDs = mock(classOf[JDataSet[Row]])
when(jDs.getType).thenReturn(typeInfo)
val sDs = mock(classOf[DataSet[Row]])
when(sDs.javaSet).thenReturn(jDs)
val jTab = util.javaTableEnv.fromDataSet(jDs, "a, b, c")
val sTab = util.tableEnv.fromDataSet(sDs, 'a, 'b, 'c)
// test cross join
val func1 = new TableFunc1
util.javaTableEnv.registerFunction("func1", func1)
var scalaTable = sTab.joinLateral(func1('c) as 's).select('c, 's)
var javaTable = jTab.joinLateral("func1(c).as(s)").select("c, s")
verifyTableEquals(scalaTable, javaTable)
// test left outer join
scalaTable = sTab.leftOuterJoinLateral(func1('c) as 's).select('c, 's)
javaTable = jTab.leftOuterJoinLateral("as(func1(c), s)").select("c, s")
verifyTableEquals(scalaTable, javaTable)
// test overloading
scalaTable = sTab.joinLateral(func1('c, "$") as 's).select('c, 's)
javaTable = jTab.joinLateral("func1(c, '$') as (s)").select("c, s")
verifyTableEquals(scalaTable, javaTable)
// test custom result type
val func2 = new TableFunc2
util.javaTableEnv.registerFunction("func2", func2)
scalaTable = sTab.joinLateral(func2('c) as('name, 'len)).select('c, 'name, 'len)
javaTable = jTab.joinLateral(
"func2(c).as(name, len)").select("c, name, len")
verifyTableEquals(scalaTable, javaTable)
// test hierarchy generic type
val hierarchy = new HierarchyTableFunction
util.javaTableEnv.registerFunction("hierarchy", hierarchy)
scalaTable = sTab.joinLateral(
hierarchy('c) as('name, 'adult, 'len)).select('c, 'name, 'len, 'adult)
javaTable = jTab.joinLateral("AS(hierarchy(c), name, adult, len)")
.select("c, name, len, adult")
verifyTableEquals(scalaTable, javaTable)
// test pojo type
val pojo = new PojoTableFunc
util.javaTableEnv.registerFunction("pojo", pojo)
scalaTable = sTab.joinLateral(pojo('c)).select('c, 'name, 'age)
javaTable = jTab.joinLateral("pojo(c)").select("c, name, age")
verifyTableEquals(scalaTable, javaTable)
// test with filter
scalaTable = sTab.joinLateral(
func2('c) as('name, 'len)).select('c, 'name, 'len).filter('len > 2)
javaTable = jTab.joinLateral("func2(c) as (name, len)")
.select("c, name, len").filter("len > 2")
verifyTableEquals(scalaTable, javaTable)
// test with scalar function
scalaTable = sTab.joinLateral(func1('c.substring(2)) as 's).select('a, 'c, 's)
javaTable = jTab.joinLateral(
"func1(substring(c, 2)) as (s)").select("a, c, s")
verifyTableEquals(scalaTable, javaTable)
}
}
|
fhueske/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/CorrelateStringExpressionTest.scala
|
Scala
|
apache-2.0
| 4,096
|
package skinny.engine.implicits
import scala.language.implicitConversions
import skinny.engine.context.SkinnyEngineContext
import skinny.engine.cookie.{ Cookie, CookieOptions, SweetCookies }
/**
* Implicit conversion for Cookie values.
*/
trait CookiesImplicits extends ServletApiImplicits {
implicit def cookieOptions(implicit ctx: SkinnyEngineContext): CookieOptions = {
ctx.servletContext.get(Cookie.CookieOptionsKey).orNull.asInstanceOf[CookieOptions]
}
def cookies(implicit ctx: SkinnyEngineContext): SweetCookies = {
ctx.request.get(Cookie.SweetCookiesKey).orNull.asInstanceOf[SweetCookies]
}
}
|
holycattle/skinny-framework
|
engine/src/main/scala/skinny/engine/implicits/CookiesImplicits.scala
|
Scala
|
mit
| 625
|
package org.jetbrains.plugins.scala.codeInsight.template.impl
import com.intellij.codeInsight.template.TemplateContextType
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiComment, PsiFile, PsiWhiteSpace}
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
/**
* @author Alefas
* @since 18/12/14.
*/
class ScalaCommentContextType extends TemplateContextType("SCALA_COMMENT", "Comment", classOf[ScalaLiveTemplateContextType]) {
override def isInContext(file: PsiFile, offset: Int): Boolean =
ScalaCommentContextType.isInContext(file, offset)
}
object ScalaCommentContextType {
def isInContext(file: PsiFile, offset: Int): Boolean = {
if (!file.isInstanceOf[ScalaFile]) return false
val element = file.findElementAt(offset) match {
case elem: PsiWhiteSpace if offset > 0 => file.findElementAt(offset - 1)
case elem => elem
}
val comment = PsiTreeUtil.getParentOfType(element, classOf[PsiComment], false)
comment != null
}
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/codeInsight/template/impl/ScalaCommentContextType.scala
|
Scala
|
apache-2.0
| 995
|
package domala.internal.macros.meta.generator
import domala.Insert
import domala.internal.macros.meta.QueryDefDecl
import domala.internal.macros.meta.args.DaoMethodCommonArgs
import scala.collection.immutable.Seq
import scala.meta._
object InsertGenerator extends DaoMethodGenerator {
override def annotationClass: Class[Insert] = classOf[Insert]
override def generate(trtName: Type.Name, _def: Decl.Def, internalMethodName: Term.Name, args: Seq[Term.Arg]): Defn.Def = {
val defDecl = QueryDefDecl.of(trtName, _def)
val commonArgs = DaoMethodCommonArgs.of(args, trtName.syntax, _def.name.syntax)
val excludeNull = args.collectFirst { case arg"excludeNull = $x" => x }.getOrElse(q"false")
val include = args.collectFirst { case arg"include = $x" => Some(x) }.flatten
val exclude = args.collectFirst { case arg"exclude = $x" => Some(x) }.flatten
if (commonArgs.hasSqlAnnotation || commonArgs.sqlFile) {
val query: (Term, Option[Term]) => Term.New =
if(commonArgs.hasSqlAnnotation) (entityAndEntityDesc, _) =>
q"new domala.jdbc.query.SqlAnnotationInsertQuery(${commonArgs.sql})($entityAndEntityDesc)"
else (entityAndEntityDesc, path) =>
q"new domala.jdbc.query.SqlFileInsertQuery(${path.get})($entityAndEntityDesc)"
val otherQuerySettings = Seq[Stat]()
val command = q"getCommandImplementors.createInsertCommand($internalMethodName, __query)"
SqlModifyQueryGenerator.generate(defDecl, commonArgs, internalMethodName, query, otherQuerySettings, command, q"false")
} else {
val includedPropertyNames = include match {
case Some(x: Term.Apply) => x.args
case _ => Nil
}
val excludedPropertyNames = exclude match {
case Some(x: Term.Apply) => x.args
case _ => Nil
}
val (paramName, paramTpe) = AutoModifyQueryGenerator.extractParameter(defDecl)
val query = q"getQueryImplementors.createAutoInsertQuery($internalMethodName, __desc)"
val command = q"getCommandImplementors.createInsertCommand($internalMethodName, __query)"
val validateEntityPropertyNames = DaoMethodGeneratorHelper.validateEntityPropertyNames(defDecl, paramTpe, includedPropertyNames, excludedPropertyNames)
val otherQueryArgs = validateEntityPropertyNames ++ Seq[Stat](
q"__query.setNullExcluded($excludeNull)",
q"__query.setIncludedPropertyNames(..$includedPropertyNames)",
q"__query.setExcludedPropertyNames(..$excludedPropertyNames)"
)
AutoModifyQueryGenerator.generate(defDecl, commonArgs, paramName, paramTpe, internalMethodName, query, otherQueryArgs, command)
}
}
}
|
bakenezumi/domala
|
meta/src/main/scala/domala/internal/macros/meta/generator/InsertGenerator.scala
|
Scala
|
apache-2.0
| 2,658
|
package com.evojam.mongodb.evolutions.command
import play.api.libs.json.Json
import com.evojam.mongodb.evolutions.config.ConfigurationComponent
import com.evojam.mongodb.evolutions.model.command._
import com.evojam.mongodb.evolutions.model.evolution.{Script, Evolution}
import com.evojam.mongodb.evolutions.model.journal.Entry
trait CommandsComponent {
this: ConfigurationComponent =>
def commands: Commands
class CommandsImpl extends Commands {
override lazy val acquireLock =
SingleResultCommand(
"lock/acquireLock.js.template",
config.lockCollection)
override lazy val releaseLock =
SingleResultCommand(
"lock/releaseLock.js.template",
config.lockCollection)
override lazy val getLock =
SingleResultCommand(
"lock/getLock.js.template",
config.lockCollection)
override def getEvolution(revision: Int) =
SingleResultCommand(
"command/findById.js.template",
config.evolutionsCollection,
revision.toString)
override lazy val getAllEvolutions =
QueryCommand(
config.evolutionsCollection,
"query/all.js.template")
override def insertEvolution(evolution: Evolution) =
RawCommand(
"command/insert.js.template",
config.evolutionsCollection,
Json.stringify(Json.toJson(evolution)))
override def saveEvolution(evolution: Evolution) =
RawCommand(
"command/save.js.template",
config.evolutionsCollection,
Json.stringify(Json.toJson(evolution)))
override def removeEvolution(revision: Int) =
RemoveCommand(
config.evolutionsCollection,
"query/revision.js.template",
revision.toString)
override lazy val removeAllEvolutions =
RemoveCommand(
config.evolutionsCollection,
"query/all.js.template")
override def applyScript(script: Script) =
ScriptCommand(script)
override def addToJournal(entry: Entry) =
RawCommand(
"command/insert.js.template",
config.journalCollection,
Json.stringify(Json.toJson(entry)))
}
}
|
evojam/mongodb-evolutions-scala
|
src/main/scala/com/evojam/mongodb/evolutions/command/CommandsComponent.scala
|
Scala
|
apache-2.0
| 2,131
|
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.features.kryo
import java.util.{Collection => jCollection, List => jList, Map => jMap}
import com.esotericsoftware.kryo.io.Input
import com.vividsolutions.jts.geom.Geometry
import org.geotools.filter.identity.FeatureIdImpl
import org.geotools.geometry.jts.ReferencedEnvelope
import org.geotools.process.vector.TransformProcess
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.features.SerializationOption._
import org.locationtech.geomesa.features.serialization.ObjectType
import org.locationtech.geomesa.utils.geotools.ImmutableFeatureId
import org.opengis.feature.`type`.Name
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.feature.{GeometryAttribute, Property}
import org.opengis.filter.expression.PropertyName
import org.opengis.filter.identity.FeatureId
import org.opengis.geometry.BoundingBox
import scala.collection.JavaConversions._
object LazySimpleFeature {
val NULL_BYTE = 0.asInstanceOf[Byte]
}
class KryoBufferSimpleFeature(sft: SimpleFeatureType,
readers: Array[(Input) => AnyRef],
readUserData: (Input) => jMap[AnyRef, AnyRef],
options: Set[SerializationOption]) extends SimpleFeature {
private var offset: Int = _
private var length: Int = _
private val input = new Input
private val offsets = Array.ofDim[Int](sft.getAttributeCount)
private var startOfOffsets: Int = -1
private var missingAttributes: Boolean = false
private lazy val geomIndex = sft.indexOf(sft.getGeometryDescriptor.getLocalName)
private var userData: jMap[AnyRef, AnyRef] = _
private var userDataOffset: Int = -1
private var id: String = ""
private var transforms: String = _
private var transformSchema: SimpleFeatureType = _
private var binaryTransform: () => Array[Byte] = input.getBuffer
private var reserializeTransform: () => Array[Byte] = input.getBuffer
/**
* Creates a new feature for later use - does not copy attribute bytes
*
* @return
*/
def copy(): KryoBufferSimpleFeature = {
val sf = new KryoBufferSimpleFeature(sft, readers, readUserData, options)
if (transforms != null) {
sf.setTransforms(transforms, transformSchema)
}
sf
}
/**
* Transform the feature into a serialized byte array
*
* @return
*/
def transform(): Array[Byte] =
// if attributes have been added to the sft, we have to reserialize to get the null serialized values
if (missingAttributes) { reserializeTransform() } else { binaryTransform() }
/**
* Set the serialized bytes to use for reading attributes
*
* @param bytes serialized byte array
*/
def setBuffer(bytes: Array[Byte]): Unit = setBuffer(bytes, 0, bytes.length)
/**
* Set the serialized bytes to use for reading attributes
*
* @param bytes serialized byte array
* @param offset offset into the byte array of valid bytes
* @param length number of valid bytes to read from the byte array
*/
def setBuffer(bytes: Array[Byte], offset: Int, length: Int): Unit = {
this.offset = offset
this.length = length
input.setBuffer(bytes, offset, offset + length)
// reset our offsets
input.setPosition(offset + 1) // skip version
startOfOffsets = offset + input.readInt()
input.setPosition(startOfOffsets) // set to offsets start
var i = 0
while (i < offsets.length && input.position < input.limit) {
offsets(i) = offset + input.readInt(true)
i += 1
}
if (i < offsets.length) {
// attributes have been added to the sft since this feature was serialized
missingAttributes = true
do { offsets(i) = -1; i += 1 } while (i < offsets.length)
} else {
missingAttributes = false
}
userData = null
userDataOffset = input.position()
}
def setId(id: String): Unit = this.id = id
def setTransforms(transforms: String, transformSchema: SimpleFeatureType): Unit = {
this.transforms = transforms
this.transformSchema = transformSchema
val tdefs = TransformProcess.toDefinition(transforms)
// transforms by evaluating the transform expressions and then serializing the resulting feature
// we use this for transform expressions and for data that was written using an old schema
reserializeTransform = {
val serializer = new KryoFeatureSerializer(transformSchema, options)
val sf = new ScalaSimpleFeature("", transformSchema)
() => {
sf.setId(getID)
var i = 0
while (i < tdefs.size) {
sf.setAttribute(i, tdefs(i).expression.evaluate(this))
i += 1
}
serializer.serialize(sf)
}
}
val indices = tdefs.map { t =>
t.expression match {
case p: PropertyName => sft.indexOf(p.getPropertyName)
case _ => -1
}
}
val shouldReserialize = indices.contains(-1)
// if we are just returning a subset of attributes, we can copy the bytes directly and avoid creating
// new objects, reserializing, etc
binaryTransform = if (!shouldReserialize) {
val mutableOffsetsAndLength = Array.ofDim[(Int,Int)](indices.length)
() => {
// NOTE: the input buffer is the raw buffer. we need to ensure that we use the
// offset into the raw buffer rather than the raw buffer directly
val buf = input.getBuffer
var length = offsets(0) - this.offset // space for version, offset block and ID
var idx = 0
while(idx < mutableOffsetsAndLength.length) {
val i = indices(idx)
val l = (if (i < offsets.length - 1) offsets(i + 1) else startOfOffsets) - offsets(i)
length += l
mutableOffsetsAndLength(idx) = (offsets(i), l)
idx += 1
}
val dst = Array.ofDim[Byte](length)
// copy the version, offset block and id
var dstPos = offsets(0) - this.offset
System.arraycopy(buf, this.offset, dst, 0, dstPos)
mutableOffsetsAndLength.foreach { case (o, l) =>
System.arraycopy(buf, o, dst, dstPos, l)
dstPos += l
}
// note that the offset block is incorrect - we couldn't use this in another lazy feature
// but the normal serializer doesn't care
dst
}
} else {
reserializeTransform
}
}
def getDateAsLong(index: Int): Long = {
val offset = offsets(index)
if (offset == -1) {
0L
} else {
input.setPosition(offset)
KryoBufferSimpleFeature.longReader(input).asInstanceOf[Long]
}
}
override def getAttribute(index: Int): AnyRef = {
val offset = offsets(index)
if (offset == -1) {
null
} else {
input.setPosition(offset)
readers(index)(input)
}
}
def getInput(index: Int): Input = {
val offset = offsets(index)
if (offset == -1) {
null
} else {
input.setPosition(offset)
input
}
}
override def getType: SimpleFeatureType = sft
override def getFeatureType: SimpleFeatureType = sft
override def getName: Name = sft.getName
override def getIdentifier: FeatureId = new ImmutableFeatureId(getID)
override def getID: String = {
if (options.withoutId) { id } else {
input.setPosition(5)
input.readString()
}
}
override def getAttribute(name: Name): AnyRef = getAttribute(name.getLocalPart)
override def getAttribute(name: String): Object = {
val index = sft.indexOf(name)
if (index == -1) null else getAttribute(index)
}
override def getDefaultGeometry: AnyRef = getAttribute(geomIndex)
override def getAttributeCount: Int = sft.getAttributeCount
override def getBounds: BoundingBox = getDefaultGeometry match {
case g: Geometry => new ReferencedEnvelope(g.getEnvelopeInternal, sft.getCoordinateReferenceSystem)
case _ => new ReferencedEnvelope(sft.getCoordinateReferenceSystem)
}
override def getAttributes: jList[AnyRef] = {
val attributes = new java.util.ArrayList[AnyRef](offsets.length)
var i = 0
while (i < offsets.length) {
attributes.add(getAttribute(i))
i += 1
}
attributes
}
override def getUserData: jMap[AnyRef, AnyRef] = {
if (userData == null) {
input.setPosition(userDataOffset)
userData = readUserData(input)
}
userData
}
override def getDefaultGeometryProperty = throw new NotImplementedError
override def getProperties: jCollection[Property] = throw new NotImplementedError
override def getProperties(name: Name) = throw new NotImplementedError
override def getProperties(name: String) = throw new NotImplementedError
override def getProperty(name: Name) = throw new NotImplementedError
override def getProperty(name: String) = throw new NotImplementedError
override def getValue = throw new NotImplementedError
override def getDescriptor = throw new NotImplementedError
override def setAttribute(name: Name, value: Object) = throw new NotImplementedError
override def setAttribute(name: String, value: Object) = throw new NotImplementedError
override def setAttribute(index: Int, value: Object) = throw new NotImplementedError
override def setAttributes(vals: jList[Object]) = throw new NotImplementedError
override def setAttributes(vals: Array[Object]) = throw new NotImplementedError
override def setDefaultGeometry(geo: Object) = throw new NotImplementedError
override def setDefaultGeometryProperty(geoAttr: GeometryAttribute) = throw new NotImplementedError
override def setValue(newValue: Object) = throw new NotImplementedError
override def setValue(values: jCollection[Property]) = throw new NotImplementedError
override def isNillable = true
override def validate() = throw new NotImplementedError
override def toString = s"KryoBufferSimpleFeature:$getID"
}
object KryoBufferSimpleFeature {
val longReader = KryoFeatureSerializer.matchReader(ObjectType.LONG)
}
|
spandanagrawal/geomesa
|
geomesa-features/geomesa-feature-kryo/src/main/scala/org/locationtech/geomesa/features/kryo/KryoBufferSimpleFeature.scala
|
Scala
|
apache-2.0
| 10,467
|
package com.typesafe.slick.examples.test
class PlainSQLTest extends RecordedDoctest {
def run = com.typesafe.slick.examples.jdbc.PlainSQL.main(null)
}
|
dvinokurov/slick
|
slick-testkit/src/doctest/scala/PlainSQLTest.scala
|
Scala
|
bsd-2-clause
| 154
|
package pulsar.client
import akka.util.ByteString
import rx.lang.scala.Observable
/**
* @author Alexander De Leon <me@alexdeleon.name>
*/
trait PulsarClient {
def register(taskType: String): Observable[ByteString]
}
|
devialab/pulsar
|
pulsar-client/src/main/scala/pulsar/client/PulsarClient.scala
|
Scala
|
mit
| 224
|
package org.jetbrains.plugins.scala.lang.completion.handlers
import com.intellij.codeInsight.CodeInsightUtilCore
import com.intellij.codeInsight.completion.{InsertHandler, InsertionContext}
import com.intellij.codeInsight.lookup.LookupElement
import com.intellij.codeInsight.template._
import com.intellij.codeInsight.template.impl.ConstantNode
import com.intellij.psi.PsiDocumentManager
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.lang.completion.ScalaCompletionUtil
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaRecursiveElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScCaseClause, ScTypedPattern}
import org.jetbrains.plugins.scala.lang.psi.api.base.types._
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScFunctionExpr
import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, Nothing, ScTypePresentation}
import org.jetbrains.plugins.scala.lang.psi.types.{ScAbstractType, ScType}
import scala.collection.mutable
/**
* @author Alexander Podkhalyuzin
*/
class ScalaGenerateAnonymousFunctionInsertHandler(params: Seq[ScType], braceArgs: Boolean) extends InsertHandler[LookupElement] {
def handleInsert(context: InsertionContext, item: LookupElement) {
def collectAbstracts(`type`: ScType): Seq[ScAbstractType] = {
val set: mutable.HashSet[ScAbstractType] = new mutable.HashSet[ScAbstractType]
`type`.recursiveUpdate(tp => {
tp match {
case a: ScAbstractType => set += a
case _ =>
}
(false, tp)
})
set.toSeq
}
val abstracts = new mutable.HashSet[ScAbstractType]
for (param <- params) abstracts ++= collectAbstracts(param)
val editor = context.getEditor
val document = editor.getDocument
context.setAddCompletionChar(false)
val text = ScalaCompletionUtil.generateAnonymousFunctionText(braceArgs, params, canonical = true,
arrowText = ScalaPsiUtil.functionArrow(editor.getProject))
document.insertString(editor.getCaretModel.getOffset, text)
val documentManager = PsiDocumentManager.getInstance(context.getProject)
documentManager.commitDocument(document)
val file = documentManager.getPsiFile(document)
val startOffset = context.getStartOffset
val endOffset = startOffset + text.length()
val commonParent = PsiTreeUtil.findCommonParent(file.findElementAt(startOffset),
file.findElementAt(endOffset - 1))
if (commonParent.getTextRange.getStartOffset != startOffset ||
commonParent.getTextRange.getEndOffset != endOffset) {
document.insertString(endOffset, " ")
editor.getCaretModel.moveToOffset(endOffset + 1)
return
}
ScalaPsiUtil.adjustTypes(commonParent)
val builder: TemplateBuilderImpl = TemplateBuilderFactory.getInstance().
createTemplateBuilder(commonParent).asInstanceOf[TemplateBuilderImpl]
val abstractNames = abstracts.map(at => at.parameterType.name + ScTypePresentation.ABSTRACT_TYPE_POSTFIX)
def seekAbstracts(te: ScTypeElement) {
val visitor = new ScalaRecursiveElementVisitor {
override def visitSimpleTypeElement(simple: ScSimpleTypeElement) {
simple.reference match {
case Some(ref) =>
val refName = ref.refName
if (abstractNames.contains(refName)) {
val postfixLength = ScTypePresentation.ABSTRACT_TYPE_POSTFIX.length
val node = abstracts.find(a => a.parameterType.name + ScTypePresentation.ABSTRACT_TYPE_POSTFIX == refName) match {
case Some(abstr) =>
abstr.simplifyType match {
case Any | Nothing =>
new ConstantNode(refName.substring(0, refName.length - postfixLength))
case tp =>
new ConstantNode(tp.presentableText)
}
case None =>
new ConstantNode(refName.substring(0, refName.length - postfixLength))
}
builder.replaceElement(simple, refName, node, false)
}
case None =>
}
}
}
te.accept(visitor)
}
commonParent match {
case f: ScFunctionExpr =>
for (parameter <- f.parameters) {
parameter.typeElement match {
case Some(te) =>
seekAbstracts(te)
case _ =>
}
builder.replaceElement(parameter.nameId, parameter.name)
}
case c: ScCaseClause => c.pattern match {
case Some(pattern) =>
for (binding <- pattern.bindings) {
binding match {
case tp: ScTypedPattern => tp.typePattern match {
case Some(tpe) =>
seekAbstracts(tpe.typeElement)
case _ =>
}
case _ =>
}
builder.replaceElement(binding.nameId, binding.name)
}
case _ =>
}
}
CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(commonParent)
val template = builder.buildTemplate()
for (name <- abstractNames) {
val actualName: String = name.substring(0, name.length - ScTypePresentation.ABSTRACT_TYPE_POSTFIX.length)
template.addVariable(name, actualName, actualName, false)
}
document.deleteString(commonParent.getTextRange.getStartOffset, commonParent.getTextRange.getEndOffset)
TemplateManager.getInstance(context.getProject).startTemplate(editor, template, new TemplateEditingAdapter {
override def templateFinished(template: Template, brokenOff: Boolean) {
if (!brokenOff) {
val offset = editor.getCaretModel.getOffset
document.insertString(offset, " ")
editor.getCaretModel.moveToOffset(offset + 1)
}
}
})
}
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaGenerateAnonymousFunctionInsertHandler.scala
|
Scala
|
apache-2.0
| 5,894
|
// ticked #3444
object Test {
def main(args: Array[String]): Unit = {
val it = (1 to 12).toSeq.iterator
assert(it.next() == 1)
assert(it.take(2).toList == List(2, 3))
val jt = (4 to 12).toSeq.iterator
assert(jt.next() == 4)
assert(jt.drop(5).toList == List(10, 11, 12))
val kt = (1 until 10).toSeq.iterator
assert(kt.drop(50).toList == Nil)
val mt = (1 until 5).toSeq.iterator
assert(mt.take(50).toList == List(1, 2, 3, 4))
}
}
|
som-snytt/dotty
|
tests/run/iterator3444.scala
|
Scala
|
apache-2.0
| 480
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.table
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.table.api._
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.table.planner.plan.batch.table.CalcTest.{MyHashCode, TestCaseClass, WC, giveMeCaseClass}
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class CalcTest extends TableTestBase {
@Test
def testMultipleFlatteningsTable(): Unit = {
val util = batchTestUtil()
val table = util.addTableSource[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c)
val result = table.select('a.flatten(), 'c, 'b.flatten())
util.verifyExecPlan(result)
}
@Test
def testNestedFlattening(): Unit = {
val util = batchTestUtil()
val table = util
.addTableSource[((((String, TestCaseClass), Boolean), String), String)]("MyTable", 'a, 'b)
val result = table.select('a.flatten(), 'b.flatten())
util.verifyExecPlan(result)
}
@Test
def testScalarFunctionAccess(): Unit = {
val util = batchTestUtil()
val table = util
.addTableSource[(String, Int)]("MyTable", 'a, 'b)
val result = table.select(
giveMeCaseClass().get("my"),
giveMeCaseClass().get("clazz"),
giveMeCaseClass().flatten())
util.verifyExecPlan(result)
}
// ----------------------------------------------------------------------------------------------
// Tests for all the situations when we can do fields projection. Like selecting few fields
// from a large field count source.
// ----------------------------------------------------------------------------------------------
@Test
def testSimpleSelect(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a, 'b)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectAllFields(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable1 = sourceTable.select('*)
val resultTable2 = sourceTable.select('a, 'b, 'c, 'd)
verifyTableEquals(resultTable1, resultTable2)
}
@Test
def testSelectAggregation(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a.sum, 'b.max)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectFunction(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
util.tableEnv.registerFunction("hashCode", MyHashCode)
val resultTable = sourceTable.select(call("hashCode", $"c"), $"b")
util.verifyExecPlan(resultTable)
}
@Test
def testSelectFromGroupedTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('a, 'c).select('a)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectAllFieldsFromGroupedTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('a, 'c).select('a, 'c)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectAggregationFromGroupedTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('c).select('a.sum)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectFromGroupedTableWithNonTrivialKey(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy('c.upperCase() as 'k).select('a.sum)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectFromGroupedTableWithFunctionKey(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.groupBy(MyHashCode('c) as 'k).select('a.sum)
util.verifyExecPlan(resultTable)
}
@Test
def testSelectFromAggregatedPojoTable(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[WC]("MyTable", 'word, 'frequency)
val resultTable = sourceTable
.groupBy('word)
.select('word, 'frequency.sum as 'frequency)
.filter('frequency === 2)
util.verifyExecPlan(resultTable)
}
@Test
def testMultiFilter(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a, 'b)
.filter('a > 0)
.filter('b < 2)
.filter(('a % 2) === 1)
util.verifyExecPlan(resultTable)
}
}
object CalcTest {
case class TestCaseClass(my: String, clazz: Int)
@SerialVersionUID(1L)
object giveMeCaseClass extends ScalarFunction {
def eval(): TestCaseClass = {
TestCaseClass("hello", 42)
}
override def getResultType(argTypes: Array[Class[_]]): TypeInformation[TestCaseClass] = {
createTypeInformation[TestCaseClass]
}
}
@SerialVersionUID(1L)
object MyHashCode extends ScalarFunction {
def eval(s: String): Int = s.hashCode()
}
case class WC(word: String, frequency: Long)
}
|
tillrohrmann/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/batch/table/CalcTest.scala
|
Scala
|
apache-2.0
| 6,526
|
package com.github.ilyamurzinov.scala.rest.example.domain
import java.util.Date
/**
* Customers search parameters.
*
* @param firstName first name
* @param lastName last name
* @param birthday date of birth
*/
case class CustomerSearchParameters(firstName: Option[String] = None,
lastName: Option[String] = None,
birthday: Option[Date] = None)
|
ilya-murzinov/scala-rest-example
|
src/main/scala/com/github/ilyamurzinov/scala/rest/example/domain/CustomerSearchParameters.scala
|
Scala
|
unlicense
| 425
|
package de.zalando.model
import de.zalando.apifirst.Application._
import de.zalando.apifirst.Domain._
import de.zalando.apifirst.ParameterPlace
import de.zalando.apifirst.naming._
import de.zalando.apifirst.Hypermedia._
import de.zalando.apifirst.Http._
import de.zalando.apifirst.Security
import java.net.URL
import Security._
//noinspection ScalaStyle
object nested_objects_yaml extends WithModel {
def types = Map[Reference, Type](
Reference("⌿definitions⌿NestedObjects") →
TypeDef(Reference("⌿definitions⌿NestedObjects"),
Seq(
Field(Reference("⌿definitions⌿NestedObjects⌿plain"), Opt(TypeDef(Reference("⌿definitions⌿NestedObjects⌿plain"),
Seq(
Field(Reference("⌿definitions⌿NestedObjects⌿plain⌿simple"), Str(None, TypeMeta(None, List("""pattern("""+"""""""""+"""the pattern"""+"""""""""+""".r)"""))))
), TypeMeta(Some("Named types: 1"), List())), TypeMeta(None, List()))),
Field(Reference("⌿definitions⌿NestedObjects⌿nested"), Opt(TypeDef(Reference("⌿definitions⌿NestedObjects⌿nested"),
Seq(
Field(Reference("⌿definitions⌿NestedObjects⌿nested⌿nested2"), TypeDef(Reference("⌿definitions⌿NestedObjects⌿nested⌿nested2"),
Seq(
Field(Reference("⌿definitions⌿NestedObjects⌿nested⌿nested2⌿nested3"), Opt(TypeDef(Reference("⌿definitions⌿NestedObjects⌿nested⌿nested2⌿nested3"),
Seq(
Field(Reference("⌿definitions⌿NestedObjects⌿nested⌿nested2⌿nested3⌿bottom"), Opt(Str(None, TypeMeta(None, List("maxLength(30)", "minLength(3)"))), TypeMeta(None, List())))
), TypeMeta(Some("Named types: 1"), List())), TypeMeta(None, List())))
), TypeMeta(Some("Named types: 1"), List())))
), TypeMeta(Some("Named types: 1"), List())), TypeMeta(None, List())))
), TypeMeta(Some("Named types: 2"), List()))
)
def parameters = Map[ParameterRef, Parameter](
)
def basePath: String = "/api"
def discriminators: DiscriminatorLookupTable = Map[Reference, Reference](
)
def securityDefinitions: SecurityDefinitionsTable = Map[String, Security.Definition](
)
def stateTransitions: StateTransitionsTable = Map[State, Map[State, TransitionProperties]]()
def calls: Seq[ApiCall] = Seq()
def packageName: Option[String] = None
def model = new StrictModel(calls, types, parameters, discriminators, basePath, packageName, stateTransitions, securityDefinitions)
}
|
zalando/play-swagger
|
api-first-core/src/test/scala/model/resources.nested_objects_yaml.scala
|
Scala
|
mit
| 2,416
|
package mesosphere.marathon
package core.task.termination
/**
* Enumeration for reasons why a task has been killed.
*
* This is not sealed on purpose as there might be different reasons for
* components build on top of the core.
*/
trait KillReason
object KillReason {
/** The task is killed because of an incoming http request */
case object KillingTasksViaApi extends KillReason
/** The task is killed because it didn't turn running within a given time frame */
case object Overdue extends KillReason
/** The task is killed because it is unknown */
case object Unknown extends KillReason
/** The task is killed because the instance owning this task is associated with a different taskId */
case object NotInSync extends KillReason
/** The task is killed because it exceeded the maximum number of consecutive health check failures */
case object FailedHealthChecks extends KillReason
}
|
mesosphere/marathon
|
src/main/scala/mesosphere/marathon/core/task/termination/KillReason.scala
|
Scala
|
apache-2.0
| 924
|
package io.buoyant.linkerd.protocol.h2
import com.twitter.finagle.{Service, ServiceFactory, SimpleFilter, Stack, Stackable}
import com.twitter.finagle.buoyant.h2.{Request, Response}
object DupRequest {
val role = Stack.Role("DupRequest")
object filter extends SimpleFilter[Request, Response] {
def apply(req: Request, service: Service[Request, Response]) = service(req.dup())
}
val module: Stackable[ServiceFactory[Request, Response]] =
new Stack.Module0[ServiceFactory[Request, Response]] {
val role = DupRequest.role
val description = "Provides the rest of the subsequent stack with a duplicate request"
def make(next: ServiceFactory[Request, Response]) = filter.andThen(next)
}
}
|
hhtpcd/linkerd
|
router/h2/src/main/scala/io/buoyant/router/h2/DupRequest.scala
|
Scala
|
apache-2.0
| 725
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.effect
import slamdata.Predef._
import quasar.fp._, ski._
import org.scalacheck.Arbitrary
import scalaz._, Scalaz._
abstract class KeyValueStoreSpec[K: Arbitrary: Equal, V: Arbitrary: Equal: Show] extends quasar.Qspec {
type S[A] = KeyValueStore[K, V, A]
def eval[A](program: Free[S, A]): A
val ops = KeyValueStore.Ops[K, V, S]
"KeyValueStore" should {
"retrieve an entry" >> prop { (key: K, value: V) =>
eval(
ops.put(key, value) *> ops.get(key).run
) must_= value.some
}
"delete an entry" >> prop { (key: K, value: V) =>
eval(
ops.put(key, value) *> ops.delete(key) *> ops.get(key).run
) must_= None
}
"modify an entry" >> prop { (key: K, value: V, newValue: V) =>
eval(
ops.put(key, value) *> ops.modify(key, κ(newValue)) *> ops.get(key).run
) must_= newValue.some
}
"move a key" >> prop { (key: K, value: V, newKey: K) =>
eval(
ops.put(key, value) *> ops.move(key, newKey) *> ops.get(newKey).run
) must_= value.some
}
"overwrite a key" >> prop { (key: K, value: V, newValue: V) =>
eval(
ops.put(key, value) *> ops.put(key, newValue) *> ops.get(key).run
) must_= newValue.some
}
"retrieve all keys" >> prop { (key: K, value: V, otherKey: K, otherValue: V) =>
key ≠ otherKey ==> {
eval(
ops.put(key, value) *> ops.put(otherKey, otherValue) *> ops.keys
) must contain(key, otherKey)
}
}
}
}
object DefaultEmptyImpl extends KeyValueStoreSpec[Int, String] {
def eval[A](program: Free[S, A]) =
KeyValueStore.impl.default[Int, String].flatMap(program foldMap _).unsafePerformSync
}
|
jedesah/Quasar
|
effect/src/test/scala/quasar/effect/KeyValueStoreSpec.scala
|
Scala
|
apache-2.0
| 2,312
|
package paperdoll.scalaz
import shapeless.{ :+:, CNil, Coproduct }
import scalaz.{ Monoid, Writer }
import paperdoll.core.effect.{ Effects, Arr, GenericBind, GenericHandler }
import paperdoll.core.effect.Effects.sendU
import paperdoll.core.layer.Layers
import scalaz.syntax.monad._
import scalaz.syntax.monoid._
import scala.collection.generic.CanBuildFrom
import scalaz.MonadTell
import paperdoll.core.effect.GenericTranslator
import paperdoll.core.layer.Layer
import paperdoll.core.effect.GenericSingleTranslator
object WriterLayer {
def sendWriter[W, A](writer: Writer[W, A]): Effects.One[Writer_[W], A] =
sendU(writer)
def sendTell[W](w: W): Effects.One[Writer_[W], Unit] =
sendWriter(Writer(w, {}))
/** Run the writer effect, producing a collection of all the written values
*/
def handleWriterCollection[W, CC <: TraversableOnce[W]](implicit cbf: CanBuildFrom[CC, W, CC]): GenericHandler[Writer_[W]] {
type O[X] = (CC, X)
} = new GenericBind[Writer_[W]] {
override type O[X] = (CC, X)
override def pure[A](a: A) = (cbf().result, a)
override def bind[V, RR <: Coproduct, RL <: Layers[RR], A](writer: Writer[W, V], arr: Arr[RR, RL, V, (CC, A)]) = {
val (log, result) = writer.run
arr(result) map { la ⇒ (cbf().+=(log).++=(la._1).result, la._2) }
}
}
/** Run the writer effect, merging all the written values.
* Notice how we can have multiple interpreters for the same effect,
* as we've decoupled the declaration of an effect from its implementation.
*/
def handleWriterMonoid[W](implicit monoid: Monoid[W]): GenericHandler[Writer_[W]] {
type O[X] = (W, X)
} = new GenericBind[Writer_[W]] {
override type O[X] = (W, X)
override def pure[A](a: A) = (monoid.zero, a)
override def bind[V, RR <: Coproduct, RL <: Layers[RR], A](writer: Writer[W, V], arr: Arr[RR, RL, V, O[A]]) = {
val (log, result) = writer.run
arr(result) map { la ⇒ (log |+| la._1, la._2) }
}
}
def translateWriter[F[_], W](implicit mt: MonadTell[F, W]): GenericTranslator[Writer_[W]] {
type OR = Layer.Aux[F] :+: CNil
type OL = Layers.One[Layer.Aux[F]]
} =
new GenericSingleTranslator[Writer_[W]] {
override type O = Layer.Aux[F]
override def handle[V](eff: Writer[W, V]) = {
val (w, v) = eff.run
sendU(mt.tell(w)) map { _ ⇒ v }
}
}
}
|
m50d/paperdoll
|
scalaz/src/main/scala/paperdoll/scalaz/WriterLayer.scala
|
Scala
|
apache-2.0
| 2,438
|
package services
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable
import scala.util.{Failure, Success, Try}
case class ProcessedWork(util: List[Double],
waits: List[Int],
residual: IndexedSeq[Double],
totalWait: Double,
excessWait: Double)
case class Cost(paxPenalty: Double, slaPenalty: Double, staffPenalty: Double, churnPenalty: Int, totalPenalty: Double)
case class OptimiserConfig(sla: Int, processors: WorkloadProcessorsLike)
object OptimiserWithFlexibleProcessors {
val log: Logger = LoggerFactory.getLogger(getClass)
val weightSla = 10
val weightChurn = 50
val weightPax = 0.05
val weightStaff = 3
val blockSize = 5
val targetWidth = 60
val rollingBuffer = 120
def crunch(workloads: Iterable[Double],
minDesks: Iterable[Int],
maxDesks: Iterable[Int],
config: OptimiserConfig): Try[OptimizerCrunchResult] = {
val indexedWork = workloads.toIndexedSeq
val indexedMinDesks = minDesks.toIndexedSeq
val bestMaxDesks = if (workloads.size >= 60) {
val fairMaxDesks = rollingFairXmax(indexedWork, indexedMinDesks, blockSize, (0.75 * config.sla).round.toInt, targetWidth, rollingBuffer, config.processors)
fairMaxDesks.zip(maxDesks).map { case (fair, orig) => List(fair, orig).min }
} else maxDesks.toIndexedSeq
if (bestMaxDesks.exists(_ < 0)) log.warn(s"Max desks contains some negative numbers")
for {
desks <- tryOptimiseWin(indexedWork, indexedMinDesks, bestMaxDesks, config.sla, weightChurn, weightPax, weightStaff, weightSla, config.processors)
processedWork <- tryProcessWork(indexedWork, desks, config.sla, IndexedSeq(), config.processors)
} yield OptimizerCrunchResult(desks.toIndexedSeq, processedWork.waits)
}
def runSimulationOfWork(workloads: Iterable[Double], desks: Iterable[Int], config: OptimiserConfig): Try[Seq[Int]] =
tryProcessWork(workloads.toIndexedSeq, desks.toIndexedSeq, config.sla, IndexedSeq(), config.processors).map(_.waits)
def approx(x: IndexedSeq[Int], y: IndexedSeq[Int], i: Seq[Double]): List[Double] = {
val diffX = x(1) - x.head
val diffY = y(1) - y.head
val ratio = diffY.toDouble / diffX
i.map(_ * ratio).toList
}
def leftwardDesks(work: IndexedSeq[Double],
xmin: IndexedSeq[Int],
xmax: IndexedSeq[Int],
blockSize: Int,
backlog: Double,
processors: WorkloadProcessorsLike): IndexedSeq[Int] = {
val workWithMinMaxDesks: Iterator[(IndexedSeq[Double], (IndexedSeq[Int], IndexedSeq[Int]))] = work.grouped(blockSize).zip(xmin.grouped(blockSize).zip(xmax.grouped(blockSize)))
workWithMinMaxDesks.foldLeft((List[Int](), backlog)) {
case ((desks, bl), (workBlock, (xminBlock, xmaxBlock))) =>
val capacity = capacityWithMinimumLimit(processors, 1)
var guess = List(((bl + workBlock.sum) / (blockSize * capacity)).round.toInt, xmaxBlock.head).min
while (cumulativeSum(workBlock.map(_ - processors.capacityForServers(guess))).min < 0 - bl && guess > xminBlock.head) {
guess = guess - 1
}
guess = List(guess, xminBlock.head).max
val guessCapacity = processors.capacityForServers(guess)
val newBacklog = (0 until blockSize).foldLeft(bl) {
case (accBl, i) => List(accBl + workBlock(i) - guessCapacity, 0).max
}
(desks ++ List.fill(blockSize)(guess), newBacklog)
}._1.toIndexedSeq
}
def capacityWithMinimumLimit(processors: WorkloadProcessorsLike, minimumLimit: Int): Int =
processors.capacityForServers(minimumLimit) match {
case c if c == 0 => 1
case c => c
}
def tryProcessWork(work: IndexedSeq[Double],
capacity: IndexedSeq[Int],
sla: Int,
qstart: IndexedSeq[Double],
processors: WorkloadProcessorsLike): Try[ProcessedWork] = {
if (capacity.length != work.length) {
Failure(new Exception(s"capacity & work don't match: ${capacity.length} vs ${work.length}"))
} else Try {
var q = qstart
var totalWait: Double = 0d
var excessWait: Double = 0d
val (finalWait, finalUtil) = work.indices.foldLeft((List[Int](), List[Double]())) {
case ((wait, util), minute) =>
q = work(minute) +: q
val totalResourceForMinute = processors.capacityForServers(capacity(minute))
var resource: Double = totalResourceForMinute.toDouble
var age = q.size
while (age > 0) {
val nextWorkToProcess = q(age - 1)
val surplus = resource - nextWorkToProcess
if (surplus >= 0) {
totalWait = totalWait + nextWorkToProcess * (age - 1)
if (age - 1 >= sla) excessWait = excessWait + nextWorkToProcess * (age - 1)
q = q.dropRight(1)
resource = surplus
age = age - 1
} else {
totalWait = totalWait + resource * (age - 1)
if (age - 1 >= sla) excessWait = excessWait + resource * (age - 1)
q = q.dropRight(1) :+ (nextWorkToProcess - resource)
resource = 0
age = 0
}
}
val nextUtil = if (totalResourceForMinute != 0) 1 - (resource / totalResourceForMinute) else 0
(q.size :: wait, nextUtil :: util) }
val waitReversed = finalWait.reverse
val utilReversed = finalUtil.reverse
ProcessedWork(utilReversed, waitReversed, q, totalWait, excessWait)
}
}
def rollingFairXmax(work: IndexedSeq[Double],
xmin: IndexedSeq[Int],
blockSize: Int,
sla: Int,
targetWidth: Int,
rollingBuffer: Int,
processors: WorkloadProcessorsLike): IndexedSeq[Int] = {
val workWithOverrun = work ++ List.fill(targetWidth)(0d)
val xminWithOverrun = xmin ++ List.fill(targetWidth)(xmin.takeRight(1).head)
var backlog = 0d
val result = (workWithOverrun.indices by targetWidth).foldLeft(IndexedSeq[Int]()) { case (acc, startSlot) =>
val winStart: Int = List(startSlot - rollingBuffer, 0).max
val i = startSlot + targetWidth + rollingBuffer
val i1 = workWithOverrun.size
val winStop: Int = List(i, i1).min
val winWork = workWithOverrun.slice(winStart, winStop)
val winXmin = xminWithOverrun.slice(winStart, winStop)
if (winStart == 0) backlog = 0
val runAv = runningAverage(winWork, List(blockSize, sla).min, processors)
val guessMax: Int = runAv.max.ceil.toInt
val capacity = capacityWithMinimumLimit(processors, 1)
val lowerLimit = List(winXmin.max, (winWork.sum / (winWork.size * capacity)).ceil.toInt).max
var winXmax = guessMax
var hasExcessWait = false
var lowerLimitReached = false
if (guessMax <= lowerLimit)
winXmax = lowerLimit
else {
do {
val trialDesks = leftwardDesks(winWork, winXmin, IndexedSeq.fill(winXmin.size)(winXmax), blockSize, backlog, processors)
val trialProcessExcessWait = tryProcessWork(winWork, trialDesks, sla, IndexedSeq(0), processors) match {
case Success(pw) => pw.excessWait
case Failure(t) => throw t
}
if (trialProcessExcessWait > 0) {
winXmax = List(winXmax + 1, guessMax).min
hasExcessWait = true
}
if (winXmax <= lowerLimit) lowerLimitReached = true
if (!lowerLimitReached && !hasExcessWait) winXmax = winXmax - 1
} while (!lowerLimitReached && !hasExcessWait)
}
val newXmax = acc ++ List.fill(targetWidth)(winXmax)
0 until targetWidth foreach { j =>
backlog = List(backlog + winWork(j) - newXmax(winStart), 0).max
}
newXmax
}.take(work.size)
result
}
def runningAverage(work: Iterable[Double], windowLength: Int, processors: WorkloadProcessorsLike): Seq[Int] = {
val slidingAverages = work
.sliding(windowLength)
.map(_.sum / windowLength).toList
(List.fill(windowLength - 1)(slidingAverages.head) ::: slidingAverages).map(processors.forWorkload)
}
def cumulativeSum(values: Iterable[Double]): Iterable[Double] = values
.foldLeft(List[Double]()) {
case (Nil, element) => List(element)
case (head :: tail, element) => element + head :: head :: tail
}.reverse
def blockMean(values: Iterable[Int], blockWidth: Int): Iterable[Int] = values
.grouped(blockWidth)
.flatMap(nos => List.fill(blockWidth)(nos.sum / blockWidth))
.toIterable
def seqR(from: Int, by: Int, length: Int): IndexedSeq[Int] = 0 to length map (i => (i + from) * by)
def totalDesksOpeningFromClosed(churnStart: Int, desks: IndexedSeq[Int]): Int = {
val desksPrefixed = churnStart +: desks
(1 until desksPrefixed.length)
.foldLeft(0) {
case (acc, idx) if desksPrefixed(idx - 1) < desksPrefixed(idx) => acc + (desksPrefixed(idx) - desksPrefixed(idx - 1))
case (acc, _) => acc
}
}
def cost(work: IndexedSeq[Double],
sla: Int,
weightChurn: Double,
weightPax: Double,
weightStaff: Double,
weightSla: Double,
qStart: IndexedSeq[Double],
previousDesksOpen: Int,
processors: WorkloadProcessorsLike)
(capacity: IndexedSeq[Int]): Cost = {
var simRes = tryProcessWork(work, capacity, sla, qStart, processors) match {
case Success(pw) => pw
case Failure(t) => throw t
}
var finalCapacity = capacity.takeRight(1).head
val backlog = simRes.residual.reverse
val totalBacklog = backlog.sum
if (backlog.nonEmpty) {
finalCapacity = List(finalCapacity, 1).max
val cumBacklog = cumulativeSum(backlog)
val cumCapacity = seqR(0, finalCapacity, (totalBacklog / finalCapacity).ceil.toInt)
val overrunSlots = cumCapacity.indices
val backlogBoundaries = approx(cumCapacity, overrunSlots, cumBacklog.toList)
val startSlots = 0d :: backlogBoundaries.dropRight(1).map(_.floor)
val endSlots = backlogBoundaries.map(_.floor)
val alreadyWaited = (1 to backlog.length).reverse
val meanWaits = startSlots
.zip(endSlots)
.map { case (x, y) => (x + y) / 2 }
.zip(alreadyWaited)
.map { case (x, y) => x + y }
val excessFilter = meanWaits.map(_ > sla)
val newTotalWait = simRes.totalWait + backlog.zip(meanWaits).map { case (x, y) => x * y }.sum
val newExcessWait = simRes.excessWait + excessFilter
.zip(backlog.zip(meanWaits))
.map {
case (true, (x, y)) => x * y
case _ => 0
}.sum
simRes = simRes.copy(totalWait = newTotalWait, excessWait = newExcessWait)
}
val paxPenalty = simRes.totalWait
val slaPenalty = simRes.excessWait
val staffPenalty = simRes.util.zip(capacity).map { case (u, c) => (1 - u) * c.toDouble }.sum
val churnPenalty = totalDesksOpeningFromClosed(previousDesksOpen, capacity :+ finalCapacity)
val totalPenalty = (weightPax * paxPenalty) +
(weightStaff * staffPenalty) +
(weightChurn * churnPenalty.toDouble) +
(weightSla * slaPenalty)
Cost(paxPenalty.toInt, slaPenalty.toInt, staffPenalty, churnPenalty, totalPenalty)
}
def neighbouringPoints(x0: Int, xmin: Int, xmax: Int): IndexedSeq[Int] = (xmin to xmax)
.filterNot(_ == x0)
.sortBy(x => (x - x0).abs)
def branchBound(startingX: IndexedSeq[Int],
cost: IndexedSeq[Int] => Cost,
xmin: IndexedSeq[Int],
xmax: IndexedSeq[Int],
concavityLimit: Int): Iterable[Int] = {
val desks = startingX.to[mutable.IndexedSeq]
var incumbent = startingX
val minutes = desks.length
var bestSoFar = cost(incumbent.toIndexedSeq).totalPenalty
val candidates = (0 until minutes)
.map(i => neighbouringPoints(startingX(i), xmin(i), xmax(i)))
.to[mutable.IndexedSeq]
var cursor = minutes - 1
while (cursor >= 0) {
while (candidates(cursor).nonEmpty) {
desks(cursor) = candidates(cursor).head
candidates(cursor) = candidates(cursor).drop(1)
val trialPenalty = cost(desks.toIndexedSeq).totalPenalty
if (trialPenalty > bestSoFar + concavityLimit) {
if (desks(cursor) > incumbent(cursor)) {
candidates(cursor) = candidates(cursor).filter(_ < desks(cursor))
} else {
candidates(cursor) = candidates(cursor).filter(_ > desks(cursor))
}
} else {
if (trialPenalty < bestSoFar) {
incumbent = desks.toIndexedSeq
bestSoFar = trialPenalty
}
if (cursor < minutes - 1) cursor = cursor + 1
}
}
candidates(cursor) = neighbouringPoints(incumbent(cursor), xmin(cursor), xmax(cursor))
desks(cursor) = incumbent(cursor)
cursor = cursor - 1
}
desks
}
def tryOptimiseWin(work: IndexedSeq[Double],
minDesks: IndexedSeq[Int],
maxDesks: IndexedSeq[Int],
sla: Int,
weightChurn: Double,
weightPax: Double,
weightStaff: Double,
weightSla: Double,
processors: WorkloadProcessorsLike): Try[IndexedSeq[Int]] = {
if (work.length != minDesks.length) {
Failure(new Exception(s"work & minDesks are not equal length: ${work.length} vs ${minDesks.length}"))
} else if (work.length != maxDesks.length) {
Failure(new Exception(s"work & maxDesks are not equal length: ${work.length} vs ${maxDesks.length}"))
} else Try {
val blockWidth = 15
val concavityLimit = 30
val winStep = 60
val smoothingWidth = blockWidth
val winWidth = List(90, work.length).min
var winStart = 0
var winStop = winWidth
var qStart = IndexedSeq(0d)
var lastDesksOpen = 0
val desks = blockMean(runningAverage(work, smoothingWidth, processors), blockWidth)
.map(_.ceil.toInt)
.zip(maxDesks)
.map {
case (d, max) => List(d, max).min
}
.zip(minDesks)
.map {
case (d, min) => List(d, min).max
}.to[mutable.IndexedSeq]
def myCost(costWork: IndexedSeq[Double], costQStart: IndexedSeq[Double], previousDesksOpen: Int)
(capacity: IndexedSeq[Int]): Cost =
cost(costWork, sla, weightChurn, weightPax, weightStaff, weightSla, costQStart, previousDesksOpen, processors)(capacity.flatMap(c => IndexedSeq.fill(blockWidth)(c)))
var shouldStop = false
do {
val currentWork = work.slice(winStart, winStop)
val blockGuess = desks.slice(winStart, winStop).grouped(blockWidth).map(_.head).toIndexedSeq
val xminCondensed = minDesks.slice(winStart, winStop).grouped(blockWidth).map(_.head).toIndexedSeq
val xmaxCondensed = maxDesks.slice(winStart, winStop).grouped(blockWidth).map(_.head).toIndexedSeq
val windowIndices = winStart until winStop
branchBound(blockGuess, myCost(currentWork, qStart, lastDesksOpen), xminCondensed, xmaxCondensed, concavityLimit)
.flatMap(o => List.fill(blockWidth)(o))
.zip(windowIndices)
.foreach {
case (d, i) => desks(i) = d
}
shouldStop = winStop == work.length
if (!shouldStop) {
val stop = winStart + winStep
val workToProcess = work.slice(winStart, stop)
val desksToProcess = desks.slice(winStart, stop)
qStart = tryProcessWork(workToProcess.toIndexedSeq, desksToProcess.toIndexedSeq, sla, qStart.toIndexedSeq, processors) match {
case Success(pw) => pw.residual
case Failure(t) => throw t
}
lastDesksOpen = desks(stop)
winStart = winStart + winStep
winStop = List(winStop + winStep, work.length).min
}
} while (!shouldStop)
desks
}
}
}
|
UKHomeOffice/drt-scalajs-spa-exploration
|
server/src/main/scala/services/OptimiserWithFlexibleProcessors.scala
|
Scala
|
apache-2.0
| 16,259
|
class Bar {
def foo(baz: Boolean) { }
def foo(o: Object) { }
def other {
foo(/* line: 2 */baz = true) // baz is red, code compiles
}
}
|
LPTK/intellij-scala
|
testdata/resolve2/bug3/SCL4697.scala
|
Scala
|
apache-2.0
| 147
|
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack.Continue
import monix.execution.exceptions.UpstreamTimeoutException
import monix.reactive.{Observable, Observer}
import monix.execution.exceptions.DummyException
import monix.reactive.subjects.PublishSubject
import scala.concurrent.TimeoutException
import scala.concurrent.duration._
object TimeoutOnSlowUpstreamSuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val source = Observable.now(sourceCount.toLong).delayOnComplete(1.hour)
val o = source.timeoutOnSlowUpstream(1.second).onErrorHandleWith {
case UpstreamTimeoutException(_) =>
Observable.now(20L)
}
Sample(o, 2, sourceCount + 20, Duration.Zero, 1.second)
}
def observableInError(sourceCount: Int, ex: Throwable) = {
val ex = DummyException("dummy")
val source = Observable.now(sourceCount.toLong).endWithError(ex)
val o = source.timeoutOnSlowUpstream(1.second)
Some(Sample(o, 1, 1, Duration.Zero, 1.second))
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) =
None
override def cancelableObservables() = {
val o = Observable.now(1L).delayOnComplete(1.hour).timeoutOnSlowUpstream(1.second)
Seq(Sample(o, 1, 1, 0.seconds, 0.seconds))
}
test("should emit timeout after time passes") { implicit s =>
val p = PublishSubject[Int]()
var received = 0
var errorThrown: Throwable = null
p.timeoutOnSlowUpstream(10.seconds)
.subscribe(new Observer.Sync[Int] {
def onComplete() = ()
def onError(ex: Throwable) = {
errorThrown = ex
}
def onNext(elem: Int) = {
received += elem
Continue
}
})
p.onNext(1)
assertEquals(received, 1)
s.tick(9.seconds)
p.onNext(2)
assertEquals(received, 3)
s.tick(9.seconds)
assertEquals(received, 3)
assertEquals(errorThrown, null)
s.tick(1.second)
assert(
errorThrown != null && errorThrown.isInstanceOf[TimeoutException],
"errorThrown should be a TimeoutException")
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/TimeoutOnSlowUpstreamSuite.scala
|
Scala
|
apache-2.0
| 2,777
|
package gui
import pentaminoes._
import Game.grid
import scala.swing._
import scala.swing.event._
import java.awt.Color
import scala.swing.GridBagPanel._
import javax.swing.{ UIManager, ImageIcon }
import java.io.File
import javax.sound.sampled.AudioSystem
private object GameWindow extends SimpleSwingApplication {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName)
var Grid = Game.grid
//Variables defined
val gridSize = Grid.size
val blockSize = 50
val smallBlockSize = 25
val gridDimesnion = new Dimension(gridSize * blockSize, gridSize * blockSize)
val nextGridSize = 5
val nextGridDimension = new Dimension(nextGridSize * smallBlockSize, nextGridSize * smallBlockSize)
val windowSize = new Dimension(1000, 900)
var mousePosx = 3
var mousePosy = 3
//ImageIcons as pictures
val verticalPic = new ImageIcon("Icons/flipVertical.png")
val horizontalPic = new ImageIcon("Icons/flipHorizontal.png")
val clockwisePic = new ImageIcon("Icons/rotateClockwise.png")
val counterclockwisePic = new ImageIcon("Icons/rotateCounterclockwise.png")
val backgroundPic = new ImageIcon("Icons/background.png")
val defaultFont = new Font("Castellar", 0, 30)
//Main game grid
val grid = new Display(gridSize, gridSize, Grid.colors, Grid.edges, blockSize)
//The next 2 pentaminoes displayed
val currentPentamino = new Display(nextGridSize, nextGridSize, Game.currentPentamino.toVector,
Game.currentPentamino.twoBooleanEdges, smallBlockSize)
val nextPentamino = new Display(nextGridSize, nextGridSize, Game.nextPentamino.toVector,
Game.nextPentamino.twoBooleanEdges, smallBlockSize)
//Text strings for the scoreboard in game
def scoreText = "Score: " + Game.score
def levelText = "Level: " + Game.level
def rowsText = "Rows: " + Game.rowsToNextLevel
//Update methods for game elements
def updateLabels() = {
score.text = scoreText
level.text = levelText
rows.text = rowsText
}
def updateHighscores() = {
val scores = Highscore.getHighscoreListAsString
for (i <- 0 until highscores.size) {
highscores(i).text = s"${i + 1}: ${scores(i)}"
highscores(i).foreground = Color.WHITE
}
}
def updateGrids() = {
grid.colors = Grid.colors
grid.edges = Grid.edges
currentPentamino.colors = Game.currentPentamino.toVector
currentPentamino.edges = Game.currentPentamino.twoBooleanEdges
nextPentamino.colors = Game.nextPentamino.toVector
nextPentamino.edges = Game.nextPentamino.twoBooleanEdges
}
//Shows what the move would do if the next pentamino was placed on the current position
def showHypo() = {
def hypoGrid = Grid.hypotheticalAdd(Game.currentPentamino, mousePosx, mousePosy)
grid.colors = hypoGrid.colors
grid.edges = hypoGrid.edges
}
//Scoreboard created here
val score = new Label { text = scoreText; preferredSize = new Dimension(250, 45); font = defaultFont }
val level = new Label { text = levelText; preferredSize = new Dimension(200, 45); font = defaultFont }
val rows = new Label { text = rowsText; preferredSize = new Dimension(250, 45); font = defaultFont }
val scoreBoard = new FlowPanel {
contents += score
contents += level
contents += rows
}
//Array to score highscores within labels
val highscores =
Array.fill[Label](Highscore.getHighscoreList.size)(new Label { font = defaultFont; foreground = Color.WHITE })
//Buttons created here
val flipHorizontally = new Button {
preferredSize = new Dimension(100, 100)
icon = horizontalPic
}
val flipVertically = new Button {
preferredSize = new Dimension(100, 100)
icon = verticalPic
}
val rotateClockwise = new Button {
preferredSize = new Dimension(100, 100)
icon = clockwisePic
}
val rotateCounterclockwise = new Button {
preferredSize = new Dimension(100, 100)
icon = counterclockwisePic
}
val playButton = new Button {
preferredSize = new Dimension(250, 50)
text = "Play"
font = defaultFont
}
val scoreButton = new Button {
preferredSize = new Dimension(250, 50)
text = "Hi-Scores"
font = defaultFont
}
val menuButton = new Button {
text = "Menu"
font = defaultFont
}
val quitButton = new Button {
preferredSize = new Dimension(250, 50)
text = "Quit"
font = defaultFont
}
val infoButton = new Button {
preferredSize = new Dimension(250,50)
text = "Help"
font = defaultFont
}
val backButton = new Button {
preferredSize = new Dimension(250,50)
text = "Back"
font = defaultFont
}
//The main game screen, lots of insets to align components
val gameScreen = new Screen {
focusable = true
c.gridx = 0
c.gridy = 0
c.gridwidth = 6
c.insets = new Insets(0, 0, 25, 0)
layout(scoreBoard) = c
c.gridx = 0
c.gridy = 1
c.gridwidth = 3
c.gridheight = 3
c.insets = new Insets(0, 0, 0, 25)
layout(grid) = c
c.gridwidth = 1
c.gridheight = 1
c.gridx = 3
c.gridy = 1
c.insets = new Insets(0, 100, 0, 0)
layout(flipHorizontally) = c
c.gridx = 5
c.gridy = 1
c.insets = new Insets(0, -100, 0, 0)
layout(flipVertically) = c
c.gridx = 3
c.gridy = 2
c.insets = new Insets(0, 0, 0, 0)
layout(rotateCounterclockwise) = c
c.gridx = 5
c.gridy = 2
c.insets = new Insets(0, 0, 0, 0)
layout(rotateClockwise) = c
c.gridx = 4
c.gridy = 2
c.insets = new Insets(25, -25, 0, 25)
layout(currentPentamino) = c
c.gridx = 4
c.gridy = 3
c.insets = new Insets(25, -50, 0, 0)
layout(nextPentamino) = c
}
//The main menu screen
val menuScreen = new Screen {
c.insets = new Insets(13, 0, 13, 0)
layout(playButton) = c
c.gridy = 2
layout(scoreButton) = c
c.gridy = 4
layout(infoButton) = c
c.gridy = 6
layout(quitButton) = c
}
//Highscore screen, labels under each other
val highscoreScreen = new Screen {
val scoreInfo = new Label { text = "Name, Score, Level, Rows"; font = defaultFont; foreground = Color.WHITE }
c.gridx = 0
c.gridy = 0
c.ipady = 25
layout(scoreInfo) = c
c.gridy += 1
for (score <- highscores) {
layout(score) = c
c.gridy += 1
}
layout(menuButton) = c
}
//Instruction screen, instructions as string and a demonstration of creating a row
val infoScreen = new Screen {
val instructions = new TextArea(5,30) {
text = "-The goal of the game is to create as many rows of at least 4 of the same color as possible.\\n " +
"-Longers rows give more points, but are harder to make. \\n" +
"-A row clears all pentaminoes that have blocks in it. \\n" +
"-You can rotate and flip the pentaminoes with the buttons on screen, the mouse wheel or the WASD keys.\\n " +
"-You can move the pentaminoes with your mouse or the arrow keys.\\n " +
"-You can set music on/off with 'm' and soundeffects with 'n'."
wordWrap = true
lineWrap = true
font = defaultFont
editable = false
opaque = false
foreground = Color.WHITE
}
val infoGridData = new Grid
infoGridData.add(Pentamino('f',2,3,3,1,1).flipVertical(), 2, 2)
infoGridData.add(Pentamino('p',1,1,2,2,1).flipVertical().rotateClockwise(), 4, 3)
val infoGrid0 = new Display(7,7,infoGridData.colors,infoGridData.edges,30)
infoGridData.add(Pentamino('l',3,3,2,2,3).rotateCounterClockwise(), 3,5)
val infoGrid1 = new Display(7,7,infoGridData.colors,infoGridData.edges,30)
infoGridData.remove(2,2)
infoGridData.remove(3,5)
val infoGrid2 = new Display(7,7,infoGridData.colors, infoGridData.edges, 30)
c.gridwidth = 5
c.gridx = 0
c.gridy = 0
layout(instructions) = c
c.gridwidth = 1
c.gridy += 1
c.gridx = 1
c.insets = new Insets(30,60,0,0)
layout(infoGrid0) = c
c.gridx = 2
layout(infoGrid1) = c
c.gridx = 3
layout(infoGrid2) = c
c.gridy += 1
c.gridx = 2
layout(backButton) = c
}
//Resets the game and display and displays the gameScreen
def newGame = {
Game.newGame
endGame.enabled = true
mousePosx = 3
mousePosy = 3
updateLabels()
updateGrids()
frame.contents = gameScreen
frame.repaint()
}
//Menu items to start a game and end the current game
val startGame = new MenuItem(Action("New game")(newGame))
val endGame = new MenuItem(Action("End game")(gameOver)) { enabled = false }
//Called when the game ends, checks if your score is high enough to get on the highscores, if so asks for a name.
def gameOver: Unit = {
if (Highscore.isScoreEnough(Game.score, Game.level, Game.rows)) {
val popup = Dialog.showInput(gameScreen, "Your score is eligible for the Highscore list!", "Highscore!", Dialog.Message.Info, initial = "Insert name")
val name = popup.getOrElse("Anonymous").replace(' ', '_')
val newRank = Highscore.setNewScore(name, Game.score, Game.level, Game.rows)
frame.contents = highscoreScreen
updateHighscores()
highscores(newRank).foreground = Color.RED
Game.newGame
} else {
val popup = Dialog.showConfirmation(gameScreen, "Game over! Do you want to play again?", "Game over", Dialog.Options.YesNo)
if (popup == Dialog.Result.No) {
frame.contents = menuScreen
GameSounds.stopMusic()
}
else newGame
}
endGame.enabled = frame.contents(0) == gameScreen
frame.repaint()
}
//The MainFrame of the program
val frame: MainFrame = new MainFrame {
title = "Pentaminoes"
resizable = false
location = new Point(200, 50)
preferredSize = windowSize
menuBar = new MenuBar {
contents += new Menu("Game") {
contents += startGame
contents += endGame
}
}
contents = menuScreen
menuScreen.requestFocus
listenTo(grid.mouse.clicks, grid.mouse.moves, grid.mouse.wheel)
listenTo(gameScreen.mouse.moves, gameScreen.keys)
listenTo(flipHorizontally, flipVertically, rotateClockwise, rotateCounterclockwise)
listenTo(playButton, scoreButton, menuButton, infoButton, backButton, quitButton)
reactions += {
//Place the next pentamino at the mouse position
case MouseClicked(_, point, _, _, _) => {
Game.placePentamino(point.x / blockSize, point.y / blockSize)
updateGrids()
showHypo()
updateLabels()
frame.repaint()
if (!Game.gameOn) gameOver
}
//Show the next move at the current mouse position
case MouseMoved(component, point, _) => {
if (component == grid) {
mousePosx = point.x / blockSize
mousePosy = point.y / blockSize
showHypo()
} else updateGrids()
frame.repaint()
gameScreen.requestFocus
}
//Reactions of all the buttons in the displays
case ButtonClicked(source) => {
if (source == flipHorizontally) Game.currentPentamino.flipHorizontal()
else if (source == flipVertically) Game.currentPentamino.flipVertical()
else if (source == rotateClockwise) Game.currentPentamino.rotateClockwise()
else if (source == rotateCounterclockwise) Game.currentPentamino.rotateCounterClockwise()
else if (source == playButton) newGame
else if (source == scoreButton) { this.contents = highscoreScreen; updateHighscores() }
else if (source == menuButton) this.contents = menuScreen
else if (source == infoButton) this.contents = infoScreen
else if (source == backButton) this.contents = menuScreen
else if (source == quitButton) dispose()
endGame.enabled = this.contents(0) == gameScreen
updateGrids()
frame.repaint()
gameScreen.requestFocus
}
//Game can be controlled with keys, move and rotate pentaminoes and mute/enable sounds.
case KeyPressed(_, key, _, _) => {
if (key == Key.A) Game.currentPentamino.rotateCounterClockwise()
else if (key == Key.D) Game.currentPentamino.rotateClockwise()
else if (key == Key.W) Game.currentPentamino.flipVertical()
else if (key == Key.S) Game.currentPentamino.flipHorizontal()
else if (key == Key.Up) mousePosy = Math.max(mousePosy - 1, 0)
else if (key == Key.Down) mousePosy = Math.min(mousePosy + 1, gridSize - 1)
else if (key == Key.Right) mousePosx = Math.min(mousePosx + 1, gridSize - 1)
else if (key == Key.Left) mousePosx = Math.max(mousePosx - 1, 0)
else if (key == Key.M) GameSounds.muteMusic()
else if (key == Key.N) GameSounds.muteEffects()
else if (key == Key.Enter) {
Game.placePentamino(mousePosx, mousePosy)
mousePosx = 3
mousePosy = 3
}
updateGrids()
showHypo()
updateLabels()
frame.repaint()
if (!Game.gameOn) gameOver
}
//Rotate pentaminoes with the mouse wheel
case MouseWheelMoved(_,_,_,rot) => {
if (rot > 0) Game.currentPentamino.rotateClockwise()
else if (rot < 0) Game.currentPentamino.rotateCounterClockwise()
updateGrids()
showHypo()
frame.repaint()
}
}
}
//This is called when the program is run.
def top: MainFrame = frame
}
|
Gugguru/Pentaminoes
|
src/gui/GameWindow.scala
|
Scala
|
artistic-2.0
| 13,431
|
package io.getquill.context.async.postgres
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.{ global => ec }
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import io.getquill.context.sql.DepartmentsSpec
class DepartmentsPostgresAsyncSpec extends DepartmentsSpec {
val context = testContext
import testContext._
def await[T](future: Future[T]) = Await.result(future, Duration.Inf)
override def beforeAll =
await {
testContext.transaction { implicit ec =>
for {
_ <- testContext.run(query[Department].delete)
_ <- testContext.run(query[Employee].delete)
_ <- testContext.run(query[Task].delete)
_ <- testContext.run(liftQuery(departmentEntries).foreach(e => departmentInsert(e)))
_ <- testContext.run(liftQuery(employeeEntries).foreach(e => employeeInsert(e)))
_ <- testContext.run(liftQuery(taskEntries).foreach(e => taskInsert(e)))
} yield {}
}
}
"Example 8 - nested naive" in {
await(testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`)))) mustEqual `Example 8 expected result`
}
"Example 9 - nested db" in {
await(testContext.run(`Example 9 expertise`(lift(`Example 9 param`)))) mustEqual `Example 9 expected result`
}
}
|
jcranky/quill
|
quill-async-postgres/src/test/scala/io/getquill/context/async/postgres/DepartmentsPostgresAsyncSpec.scala
|
Scala
|
apache-2.0
| 1,325
|
package com.teamisotope.techexpansion
import com.teamisotope.techexpansion.block.TEBlocks
import com.teamisotope.techexpansion.item.TEItems
import net.minecraftforge.fml.common.{Mod, SidedProxy}
import net.minecraftforge.fml.common.Mod._
import net.minecraftforge.fml.common.event._
import com.teamisotope.techexpansion.util._
import com.teamisotope.techexpansion.proxy._
import com.teamisotope.techexpansion.tab._
@Mod(modid=Ref.MODID,version=Ref.VERSION,modLanguage="scala",name=Ref.NAME,acceptedMinecraftVersions="1.10.2",dependencies="required-after:cofhcore@4.1.12.17")
object TechExpansion {
@SidedProxy(clientSide = Ref.CLIENT_PROXY, serverSide = Ref.SERVER_PROXY)
var proxy: CommonProxy = null
/* Tabs have temporary icons, until I have created the actual items that would represent them */
val tab_misc: TabTEMisc = new TabTEMisc()
val tab_equipment: TabTEEquipment = new TabTEEquipment()
val tab_blocks: TabTEBlocks = new TabTEBlocks()
val tab_resources: TabTEResources = new TabTEResources()
@EventHandler
def preInit(event: FMLPreInitializationEvent): Unit = {
proxy.preInit(event)
}
@EventHandler
def init(event: FMLInitializationEvent): Unit = {
proxy.init(event)
}
@EventHandler
def postInit(event: FMLPostInitializationEvent): Unit = {
proxy.postInit(event)
}
}
|
collaborationmods/TechExpansion
|
src/main/scala/com/teamisotope/techexpansion/TechExpansion.scala
|
Scala
|
gpl-3.0
| 1,329
|
import scala.annotation.tailrec
import scala.quoted._
import scala.quoted.autolift
object Macro {
inline def unrolledForeach(inline unrollSize: Int, seq: Array[Int])(f: => Int => Unit): Unit = // or f: Int => Unit
${unrolledForeachImpl('unrollSize, 'seq, 'f)}
private def unrolledForeachImpl(unrollSizeExpr: Expr[Int], seq: Expr[Array[Int]], f: Expr[Int => Unit]) (using QuoteContext): Expr[Unit] =
unrolledForeachImpl(unrollSizeExpr.unliftOrError, seq, f)
private def unrolledForeachImpl(unrollSize: Int, seq: Expr[Array[Int]], f: Expr[Int => Unit])(using QuoteContext): Expr[Unit] = '{
val size = $seq.length
assert(size % (${unrollSize}) == 0) // for simplicity of the implementation
var i = 0
while (i < size) {
println("<log> start loop")
${
@tailrec def loop(j: Int, acc: Expr[Unit]): Expr[Unit] =
if (j >= 0) loop(j - 1, '{ ${Expr.betaReduce(f)('{$seq(i + ${j})})}; $acc })
else acc
loop(unrollSize - 1, '{})
}
i += ${unrollSize}
}
}
}
|
som-snytt/dotty
|
tests/run-macros/quote-unrolled-foreach/quoted_1.scala
|
Scala
|
apache-2.0
| 1,039
|
/*
* The Bluejelly project, Copyright 2012.
*
* This source code is distributed under the terms of
* the BSD license, see the LICENSE file for details.
*/
package bluejelly.l4.test
import bluejelly.l4.Var
import bluejelly.l4.Expr
import bluejelly.l4.ELit
import bluejelly.l4.ECon
import bluejelly.l4.App
import bluejelly.l4.NApp
import bluejelly.l4.Let
import bluejelly.l4.LetRec
import bluejelly.l4.Eval
import bluejelly.l4.Match
import bluejelly.l4.Pat
import bluejelly.l4.PLit
import bluejelly.l4.PVar
import bluejelly.l4.PCon
import bluejelly.l4.Alt
import bluejelly.l4.Decl
import bluejelly.l4.FunDecl
import bluejelly.l4.Module
import bluejelly.l4.ELit
import bluejelly.l4.ECon
import bluejelly.l4.Note
/**
* General utilities for AST testing.
* @author ppedemon
*/
class AstUtils {
// ---------------------------------------------------------------------
// Check for module, function and expression isomorphisms
// ---------------------------------------------------------------------
type E = Map[Var,Var]
type M = Map[Var,FunDecl]
/**
* Are the given modules isomorphic? They will, if both modules
* have the same number of functions, and if for every function
* f in m0, there is a function g in m1 where f and g are
* isomorphic.
*/
def isoMod(m0:Module,m1:Module) = isoDecls(m0.decls, m1.decls)
private def isoDecls(ds0:List[Decl],ds1:List[Decl]) = {
val m0 = collectFuns(ds0)
val m1 = collectFuns(ds1)
m0.size == m1.size && m0.forall {
case (n,f) =>
if (m1 isDefinedAt n) {
val g = m1(n)
isoFun(f,g)
} else false
}
}
private def collectFuns(ds:List[Decl]):M = ds match {
case Nil => Map()
case (f@FunDecl(n,_,_))::ds => collectFuns(ds) + (n->f)
case _::ds => collectFuns(ds)
}
/**
* Two functions are isomorphic if they have the same number of
* arguments and if their bodies are isomorphic.
*/
def isoFun(f:FunDecl, g:FunDecl) = {
if (f.args.length != g.args.length) false else {
val e = (f.args,g.args).zipped.foldLeft(Map():E){case (m,(u,v)) => m + (u->v)}
iso(e)(f.body,g.body)
}
}
/**
* Check if the given two expressions are isomorphic. Our definition
* isomorphic is: expressions are alpha-convertible.
*
* However, we are senitive to qualifiers: f M.x and f x are, with M
* the name of the enclosing module, are *not* isomorphic. So, you must
* be sure that you are passing two expressions where qualifiers won't
* force false negatives.
*
* <p> Since we will use this for testing only, we can always be sure
* that we are going to compare expressions where name qualifiers are
* not an issue. We, in the role of testers using this class, will
* take this as a precondition.
*/
def iso(e:E)(x:Expr,y:Expr):Boolean = (x,y) match {
case (ELit(x),ELit(y)) => x == y
case (ECon(c,xs),ECon(d,ys)) => c == d && isoList(e)(xs,ys)
case (App(f,xs),App(g,ys)) =>
val (extEnv,ok) = isoVar(e)(f,g)
if (ok) isoList(extEnv)(xs,ys) else false
case (NApp(f,xs),NApp(g,ys)) =>
val (extEnv,ok) = isoVar(e)(f,g)
if (ok) isoList(extEnv)(xs,ys) else false
case (Let(x,exp0,b0),Let(y,exp1,b1)) =>
val ok = iso(e)(exp0,exp1)
ok && iso(e + (x->y))(b0,b1)
case (Eval(x,exp0,b0),Eval(y,exp1,b1)) =>
val ok = iso(e)(exp0,exp1)
ok && iso(e + (x->y))(b0,b1)
case (LetRec(ds0,b0),LetRec(ds1,b1)) =>
val (extEnv,ok) = isoDecls(e)(ds0,ds1)
ok && iso(extEnv)(b0,b1)
case (Match(u,as0),Match(v,as1)) =>
val (extEnv,ok) = isoVar(e)(u,v)
(as0,as1).zipped.forall(isoAlt(extEnv))
// Ignore occurrence info
case (Note(_,x),y) => iso(e)(x,y)
case (x,Note(_,y)) => iso(e)(x,y)
case _ => false
}
def isoList(e:E)(xs:List[Expr], ys:List[Expr]) =
xs.length == ys.length && (xs,ys).zipped.forall(iso(e))
private def isoVar(e:E)(u:Var,v:Var) =
if (e isDefinedAt u) (e,e(u) == v) else (e + (u->v),true)
private def isoDecls(e:E)(xs:List[(Var,Expr)], ys:List[(Var,Expr)]) = {
val (ds0,es0) = xs.unzip
val (ds1,es1) = ys.unzip
val extEnv = (ds0,ds1).zipped.foldLeft(e){case (e,(u,v)) => e + (u->v)}
(extEnv, es0.length == es1.length && isoList(extEnv)(es0,es1))
}
private def isoPat(e:E)(p:Pat,q:Pat) = (p,q) match {
case (PLit(l0),PLit(l1)) => (e,l0 == l1)
case (PVar(u),PVar(v)) => (e + (u->v),true)
case (PCon(c,xs),PCon(d,ys)) =>
val extEnv = (xs,ys).zipped.foldLeft(e){case (e,(u,v)) => e + (u->v)}
(extEnv, c == d)
case _ => (e,false)
}
private def isoAlt(e:E)(a0:Alt,a1:Alt) = {
val (extEnv,ok) = isoPat(e)(a0.p,a1.p)
ok && iso(extEnv)(a0.e,a1.e)
}
// ---------------------------------------------------------------------
// Check for module, function and expression exact equality
// ---------------------------------------------------------------------
def eqMod(m0:Module,m1:Module)=
m0.n == m1.n && eqDecls(m0.decls, m1.decls)
private def eqDecls(ds0:List[Decl],ds1:List[Decl]) = {
val m0 = collectFuns(ds0)
val m1 = collectFuns(ds1)
m0.size == m1.size && m0.forall {
case (n,f) =>
if (m1 isDefinedAt n) {
val g = m1(n)
eqFun(f,g)
} else false
}
}
def eqFun(f:FunDecl,g:FunDecl) =
f.args == g.args && f.body == g.body
}
|
ppedemon/Bluejelly
|
bluejelly-l4/src/test/scala/bluejelly/l4/test/AstUtils.scala
|
Scala
|
bsd-3-clause
| 5,457
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.planner.logical.cardinality
import org.mockito.Mockito.when
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.Selectivity
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.plans.IdName
import org.neo4j.cypher.internal.compiler.v2_3.planner.{Predicate, Selections}
import org.neo4j.cypher.internal.compiler.v2_3.spi.GraphStatistics
import org.neo4j.cypher.internal.frontend.v2_3.ast._
import org.neo4j.cypher.internal.frontend.v2_3.helpers.NonEmptyList
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
import org.neo4j.cypher.internal.frontend.v2_3.{InputPosition, LabelId, PropertyKeyId, SemanticTable}
class ExpressionSelectivityCalculatorTest extends CypherFunSuite with AstConstructionTestSupport {
test("Should consider parameter expressions when calculating index selectivity") {
implicit val semanticTable = SemanticTable()
semanticTable.resolvedLabelIds.put("Page", LabelId(0))
semanticTable.resolvedPropertyKeyNames.put("title", PropertyKeyId(0))
implicit val selections = Selections(Set(Predicate(Set(IdName("n")), HasLabels(ident("n"), Seq(LabelName("Page")_))_)))
val stats = mock[GraphStatistics]
when(stats.nodesWithLabelCardinality(None)).thenReturn(1000.0)
when(stats.indexSelectivity(LabelId(0), PropertyKeyId(0))).thenReturn(Some(Selectivity.of(0.1d).get))
val calculator = ExpressionSelectivityCalculator(stats, IndependenceCombiner)
val result = calculator(In(Property(ident("n"), PropertyKeyName("title")_)_, Parameter("titles")_)_)
result.factor should equal (0.92 +- 0.01)
}
test("Should peek inside sub predicates") {
implicit val semanticTable = SemanticTable()
semanticTable.resolvedLabelIds.put("Page", LabelId(0))
implicit val selections = Selections(Set(Predicate(Set(IdName("n")), HasLabels(ident("n"), Seq(LabelName("Page")_))_)))
val stats = mock[GraphStatistics]
when(stats.nodesWithLabelCardinality(None)).thenReturn(2000.0)
when(stats.nodesWithLabelCardinality(Some(LabelId(0)))).thenReturn(1000.0)
val calculator = ExpressionSelectivityCalculator(stats, IndependenceCombiner)
val result = calculator(PartialPredicate[HasLabels](HasLabels(ident("n"), Seq(LabelName("Page")_))_, mock[HasLabels]))
result.factor should equal(0.5)
}
test("Should look at range predicates that could benefit from using an index") {
implicit val semanticTable = SemanticTable()
semanticTable.resolvedLabelIds.put("Person", LabelId(0))
val n_is_Person = Predicate(Set(IdName("n")), HasLabels(ident("n"), Seq(LabelName("Person") _)) _)
val n_prop: Property = Property(ident("n"), PropertyKeyName("prop")_)_
val n_gt_3_and_lt_4 = Predicate(Set(IdName("n")), AndedPropertyInequalities(ident("n"), n_prop, NonEmptyList(
GreaterThan(n_prop, SignedDecimalIntegerLiteral("3")_)_,
LessThan(n_prop, SignedDecimalIntegerLiteral("4")_)_
)))
implicit val selections = Selections(Set(n_is_Person, n_gt_3_and_lt_4))
val stats = mock[GraphStatistics]
when(stats.nodesWithLabelCardinality(None)).thenReturn(2000.0)
when(stats.nodesWithLabelCardinality(Some(LabelId(0)))).thenReturn(1000.0)
val calculator = ExpressionSelectivityCalculator(stats, IndependenceCombiner)
val result = calculator(n_gt_3_and_lt_4.expr)
result.factor should equal(0.03)
}
test("Should optimize selectivity with respect to prefix length for STARTS WITH predicates") {
implicit val semanticTable = SemanticTable()
semanticTable.resolvedLabelIds.put("A", LabelId(0))
semanticTable.resolvedPropertyKeyNames.put("prop", PropertyKeyId(0))
implicit val selections = mock[Selections]
val label = LabelName("A")(InputPosition.NONE)
val propKey = PropertyKeyName("prop")(InputPosition.NONE)
when(selections.labelsOnNode(IdName("a"))).thenReturn(Set(label))
val stats = mock[GraphStatistics]
when(stats.indexSelectivity(LabelId(0), PropertyKeyId(0))).thenReturn(Some(Selectivity.of(.01).get))
when(stats.indexPropertyExistsSelectivity(LabelId(0), PropertyKeyId(0))).thenReturn(Some(Selectivity.ONE))
val calculator = ExpressionSelectivityCalculator(stats, IndependenceCombiner)
val prefixes = Map("p" -> 0.23384596099184043,
"p2" -> 0.2299568541948447,
"p33" -> 0.22801230079634685,
"p5555" -> 0.22606774739784896,
"reallylong" -> 0.22429997158103274)
prefixes.foreach { case (prefix, selectivity) =>
val actual = calculator(StartsWith(Property(Identifier("a") _, propKey) _, StringLiteral(prefix)(InputPosition.NONE)) _)
assert( actual.factor === selectivity +- selectivity * 0.000000000000001)
}
}
test("Selectivity should never be worse than corresponding existence selectivity") {
implicit val semanticTable = SemanticTable()
semanticTable.resolvedLabelIds.put("A", LabelId(0))
semanticTable.resolvedPropertyKeyNames.put("prop", PropertyKeyId(0))
implicit val selections = mock[Selections]
val label = LabelName("A")(InputPosition.NONE)
val propKey = PropertyKeyName("prop")(InputPosition.NONE)
when(selections.labelsOnNode(IdName("a"))).thenReturn(Set(label))
val stats = mock[GraphStatistics]
when(stats.indexSelectivity(LabelId(0), PropertyKeyId(0))).thenReturn(Some(Selectivity.of(0.01).get))
val existenceSelectivity = .2285
when(stats.indexPropertyExistsSelectivity(LabelId(0), PropertyKeyId(0))).thenReturn(Some(Selectivity.of(existenceSelectivity).get))
val calculator = ExpressionSelectivityCalculator(stats, IndependenceCombiner)
val prefixes = Map("p" -> existenceSelectivity,
"p2" -> existenceSelectivity,
"p33" -> 0.22801230079634685,
"p5555" -> 0.22606774739784896,
"reallylong" -> 0.22429997158103274)
prefixes.foreach { case (prefix, selectivity) =>
val actual = calculator(StartsWith(Property(Identifier("a") _, propKey) _, StringLiteral(prefix)(InputPosition.NONE)) _)
assert( actual.factor === selectivity +- selectivity * 0.000000000000001)
}
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/planner/logical/cardinality/ExpressionSelectivityCalculatorTest.scala
|
Scala
|
apache-2.0
| 7,099
|
/*
# Copyright 2016 Georges Lipka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
package com.glipka.easyReactJS.react
import scala.scalajs.js
import scala.scalajs.js._
import org.scalajs.dom.html
import js.{ UndefOr, Any, Function => JFn }
import js.annotation.{ JSBracketAccess, JSName }
import js.{ Any => jAny }
// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/react/react.d.ts
@js.native
trait Factory[P] extends js.Any {
def apply(props: P, children: Any*): ReactElement[P] = js.native
def apply(): ReactElement[P] = js.native
//def apply(props: Attributes, children: Any*): ReactElement[P] = null.asInstanceOf[ ReactElement[P]]
}
|
glipka/Easy-React-With-ScalaJS
|
src/main/scala/com/glipka/easyReactJS/react/Factory.scala
|
Scala
|
apache-2.0
| 1,167
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.dsl.plan
import akka.actor.ActorSystem
import org.apache.gearpump.streaming.partitioner.{CoLocationPartitioner, GroupByPartitioner, HashPartitioner, Partitioner}
import org.apache.gearpump.streaming.Processor
import org.apache.gearpump.streaming.task.Task
import org.apache.gearpump.util.Graph
/**
* This class is responsible for turning the high level
* [[org.apache.gearpump.streaming.dsl.scalaapi.Stream]] DSL into low level
* [[org.apache.gearpump.streaming.Processor]] API.
*/
class Planner {
/**
* This method interprets a Graph of [[Op]] and creates a Graph of
* [[org.apache.gearpump.streaming.Processor]].
*
* It firstly reversely traverses the Graph from a leaf Op and merges it with
* its downstream Op according to the following rules.
*
* 1. The Op has only one outgoing edge and the downstream Op has only one incoming edge
* 2. Neither Op is [[ProcessorOp]]
* 3. The edge is [[Direct]]
*
* Finally the vertices of the optimized Graph are translated to Processors
* and the edges to Partitioners.
*/
def plan(dag: Graph[Op, OpEdge])
(implicit system: ActorSystem): Graph[Processor[_ <: Task], _ <: Partitioner] = {
val graph = optimize(dag)
graph.mapEdge { (_, edge, node2) =>
edge match {
case Shuffle =>
node2 match {
case op: GroupByOp[_, _] =>
new GroupByPartitioner(op.groupBy)
case _ => new HashPartitioner
}
case Direct =>
// FIXME: This is never used
new CoLocationPartitioner
}
}.mapVertex(_.toProcessor)
}
private def optimize(dag: Graph[Op, OpEdge])
(implicit system: ActorSystem): Graph[Op, OpEdge] = {
val graph = dag.copy
val nodes = graph.topologicalOrderIterator.toList.reverse
for (node <- nodes) {
val outGoingEdges = graph.outgoingEdgesOf(node)
for (edge <- outGoingEdges) {
merge(graph, edge._1, edge._3)
}
}
graph
}
private def merge(graph: Graph[Op, OpEdge], node1: Op, node2: Op)
(implicit system: ActorSystem): Unit = {
if (graph.outDegreeOf(node1) == 1 &&
graph.inDegreeOf(node2) == 1 &&
// For processor node, we don't allow it to merge with downstream operators
!node1.isInstanceOf[ProcessorOp[_ <: Task]] &&
!node2.isInstanceOf[ProcessorOp[_ <: Task]]) {
val (_, edge, _) = graph.outgoingEdgesOf(node1).head
if (edge == Direct) {
val chainedOp = node1.chain(node2)
graph.addVertex(chainedOp)
for (incomingEdge <- graph.incomingEdgesOf(node1)) {
graph.addEdge(incomingEdge._1, incomingEdge._2, chainedOp)
}
for (outgoingEdge <- graph.outgoingEdgesOf(node2)) {
graph.addEdge(chainedOp, outgoingEdge._2, outgoingEdge._3)
}
// Remove the old vertex
graph.removeVertex(node1)
graph.removeVertex(node2)
}
}
}
}
|
manuzhang/incubator-gearpump
|
streaming/src/main/scala/org/apache/gearpump/streaming/dsl/plan/Planner.scala
|
Scala
|
apache-2.0
| 3,781
|
def flatMap[A,B](f: Rand[A])(g: A => Rand[B]): Rand[B] =
rng => {
val (a, r1) = f(rng)
g(a)(r1) // We pass the new state along
}
def positiveLessThan(n: Int): Rand[Int] = {
flatMap(positiveInt) { i =>
val mod = i % n
if (i + (n-1) - mod > 0) unit(mod) else positiveLessThan(n)
}
}
|
willcodejavaforfood/fpinscala
|
answerkey/state/10.answer.scala
|
Scala
|
mit
| 305
|
/*
* Copyright (c) 2015-2017 EpiData, Inc.
*/
package com.epidata.spark
import com.datastax.spark.connector._
import java.sql.Timestamp
import com.epidata.lib.models.{ Measurement => BaseMeasurement, MeasurementCleansed => BaseMeasurementCleansed, MeasurementSummary, SensorMeasurement => BaseSensorMeasurement, AutomatedTest => BaseAutomatedTest, MeasurementsKeys => BaseMeasurementsKeys }
import com.epidata.spark.ops.{ Identity, OutlierDetector, MeasStatistics, FillMissingValue }
import com.epidata.spark.utils.DataFrameUtils
import org.apache.spark.SparkContext
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{ DataFrame, SQLContext }
import org.apache.spark.sql.functions.lit
import org.apache.spark.streaming.Seconds
/**
* The context of an Epidata connection to Spark, constructed with a provided
* SparkContext.
*/
class EpidataContext(private val sparkContext: SparkContext) {
/** Constructor used from Java and Python. */
def this(javaSparkContext: JavaSparkContext) = this(javaSparkContext.sc)
private val sqlContext = new SQLContext(sparkContext)
// Configuration parameters.
private lazy val cassandraKeyspaceName = sparkContext.getConf.get("spark.epidata.cassandraKeyspaceName")
private lazy val measurementClass =
sparkContext.getConf.get("spark.epidata.measurementClass")
private lazy val kafkaBrokers = sparkContext.getConf.get("spark.epidata.kafkaBrokers", "localhost:9092")
private lazy val streamingBatchDuration: Int = sparkContext.getConf.get("spark.epidata.streaming.batchDuration", EpidataStreamingContext.BatchDurationInSecond.toString).toInt
def getCassandraKeyspaceName = cassandraKeyspaceName
def getKafkaBrokers = kafkaBrokers
def query(
fieldQuery: Map[String, List[String]],
beginTime: Timestamp,
endTime: Timestamp
): DataFrame = {
query(fieldQuery, beginTime, endTime, com.epidata.lib.models.Measurement.DBTableName)
}
private def getUnionRDD(
fieldQuery: Map[String, List[String]],
beginTime: Timestamp,
endTime: Timestamp,
tableName: String
): RDD[Measurement] = {
import MeasurementHelpers._
// Find the equality queries for the partition key fields.
val partitionFieldsQuery = genericPartitionFields
.map(partitionFieldsMap)
.map(fieldQuery)
val table = sparkContext.cassandraTable[Measurement](cassandraKeyspaceName, tableName)
// Find all epochs covered by the query.
val epochs = Measurement.epochForTs(beginTime) to Measurement.epochForTs(endTime)
// Create an RDD for a specified epoch, using a CQL query.
def rddForPartition(partition: List[Any]): RDD[Measurement] =
table.where(
DataFrameUtils.whereStatementForTable(tableName),
partition ++ List(beginTime, endTime): _*
).withAscOrder
val partitions = for (
a <- partitionFieldsQuery(0);
b <- partitionFieldsQuery(1);
c <- partitionFieldsQuery(2);
d <- partitionFieldsQuery(3);
e <- epochs
) yield List(a, b, c, d, e)
// Create and concatenate the RDDs for all epochs in range.
val unionRDD = partitions
.map(rddForPartition)
.reduceLeft(_ ++ _)
unionRDD
}
private def getUnionRDDMeasurementCleansed(
fieldQuery: Map[String, List[String]],
beginTime: Timestamp,
endTime: Timestamp,
tableName: String
): RDD[MeasurementCleansed] = {
import MeasurementHelpers._
// Find the equality queries for the partition key fields.
val partitionFieldsQuery = genericPartitionFields
.map(partitionFieldsMap)
.map(fieldQuery)
val table = sparkContext.cassandraTable[MeasurementCleansed](cassandraKeyspaceName, tableName)
// Find all epochs covered by the query.
val epochs = Measurement.epochForTs(beginTime) to Measurement.epochForTs(endTime)
// Create an RDD for a specified epoch, using a CQL query.
def rddForPartition(partition: List[Any]): RDD[MeasurementCleansed] =
table.where(
DataFrameUtils.whereStatementForTable(tableName),
partition ++ List(beginTime, endTime): _*
).withAscOrder
val partitions = for (
a <- partitionFieldsQuery(0);
b <- partitionFieldsQuery(1);
c <- partitionFieldsQuery(2);
d <- partitionFieldsQuery(3);
e <- epochs
) yield List(a, b, c, d, e)
// Create and concatenate the RDDs for all epochs in range.
val unionRDD = partitions
.map(rddForPartition)
.reduceLeft(_ ++ _)
unionRDD
}
private def getUnionRDDMeasurementSummary(
fieldQuery: Map[String, List[String]],
beginTime: Timestamp,
endTime: Timestamp,
tableName: String
): RDD[MeasurementSummary] = {
import MeasurementHelpers._
// Find the equality queries for the partition key fields.
val partitionFieldsQuery = genericPartitionFields
.map(partitionFieldsMap)
.map(fieldQuery)
val table = sparkContext.cassandraTable[MeasurementSummary](cassandraKeyspaceName, tableName)
// Create an RDD for a specified epoch, using a CQL query.
def rddForPartition(partition: List[Any]): RDD[MeasurementSummary] =
table.where(
DataFrameUtils.whereStatementForTable(tableName),
partition ++ List(beginTime, endTime): _*
).withAscOrder
val partitions = for (
a <- partitionFieldsQuery(0);
b <- partitionFieldsQuery(1);
c <- partitionFieldsQuery(2);
d <- partitionFieldsQuery(3)
) yield List(a, b, c, d)
// Create and concatenate the RDDs for all epochs in range.
val unionRDD = partitions
.map(rddForPartition)
.reduceLeft(_ ++ _)
unionRDD
}
private def getDataFrame(
fieldQuery: Map[String, List[String]],
beginTime: Timestamp,
endTime: Timestamp,
tableName: String
): DataFrame = {
tableName match {
case BaseMeasurement.DBTableName =>
val unionRDD = getUnionRDD(fieldQuery, beginTime, endTime, tableName)
measurementClass match {
case BaseAutomatedTest.NAME => sqlContext.createDataFrame(unionRDD.map(AutomatedTest.measurementToAutomatedTest))
case BaseSensorMeasurement.NAME => sqlContext.createDataFrame(unionRDD.map(SensorMeasurement.measurementToSensorMeasurement))
}
case BaseMeasurementCleansed.DBTableName =>
val unionRDD = getUnionRDDMeasurementCleansed(fieldQuery, beginTime, endTime, tableName)
measurementClass match {
case BaseAutomatedTest.NAME => sqlContext.createDataFrame(unionRDD.map(AutomatedTestCleansed.measurementCleansedToAutomatedTestCleansed))
case BaseSensorMeasurement.NAME => sqlContext.createDataFrame(unionRDD.map(SensorMeasurementCleansed.measurementCleansedToSensorMeasurementCleansed))
}
case MeasurementSummary.DBTableName =>
val unionRDD = getUnionRDDMeasurementSummary(fieldQuery, beginTime, endTime, tableName)
measurementClass match {
case BaseAutomatedTest.NAME => sqlContext.createDataFrame(unionRDD.map(BaseAutomatedTest.measurementSummaryToAutomatedTestSummary))
case BaseSensorMeasurement.NAME => sqlContext.createDataFrame(unionRDD.map(BaseSensorMeasurement.measurementSummaryToSensorMeasurementSummary))
}
}
}
/**
* Read Measurements from Cassandra into a DataFrame. The Measurements
* matching the query and falling between beginTime and endTime are returned.
*
* @param fieldQuery Map indicating required values for specified fields.
* Some fields may be required, but the names of these
* fields will vary based on the system configuration.
*/
def query(
fieldQuery: Map[String, List[String]],
beginTime: Timestamp,
endTime: Timestamp,
tableName: String
): DataFrame = {
if (beginTime.getTime > endTime.getTime) {
throw new IllegalArgumentException("beginTime must not be after endTime. ")
}
if (!partitionFieldsMap.values.toSet.subsetOf(fieldQuery.keySet)) {
throw new IllegalArgumentException("Required field missing from fieldQuery. " +
s"Required fields: ${partitionFieldsMap.values.toList}")
}
if (fieldQuery.filter(_._2.isEmpty).nonEmpty) {
throw new IllegalArgumentException(
"All fieldQuery entries must have at least one match value."
)
}
val dataFrame = getDataFrame(fieldQuery, beginTime, endTime, tableName)
if (!fieldQuery.keySet.subsetOf(dataFrame.columns.toSet)) {
throw new IllegalArgumentException("Unexpected field in fieldQuery.")
}
// Find the equality queries for the non partition key fields.
val nonpartitionFields = fieldQuery.keySet.diff(genericPartitionFields.map(partitionFieldsMap).toSet)
val nonpartitionFieldsQuery = fieldQuery.filterKeys(nonpartitionFields)
// Filter by any applicable non partition key fields.
val filtered = nonpartitionFieldsQuery.foldLeft(dataFrame)((df, filter) =>
df.filter(df.col(filter._1).isin(filter._2.map(lit(_)): _*)))
filtered
}
/** Query interface for Java and Python. */
def query(
fieldQuery: java.util.Map[String, java.util.List[String]],
beginTime: Timestamp,
endTime: Timestamp
): DataFrame = {
import scala.collection.JavaConversions._
query(fieldQuery.toMap.mapValues(_.toList), beginTime, endTime, BaseMeasurement.DBTableName)
}
/** Query interface for Java and Python. */
def queryMeasurementCleansed(
fieldQuery: java.util.Map[String, java.util.List[String]],
beginTime: Timestamp,
endTime: Timestamp
): DataFrame = {
import scala.collection.JavaConversions._
query(fieldQuery.toMap.mapValues(_.toList), beginTime, endTime, BaseMeasurementCleansed.DBTableName)
}
/** Query interface for Java and Python. */
def queryMeasurementSummary(
fieldQuery: java.util.Map[String, java.util.List[String]],
beginTime: Timestamp,
endTime: Timestamp
): DataFrame = {
import scala.collection.JavaConversions._
query(fieldQuery.toMap.mapValues(_.toList), beginTime, endTime, MeasurementSummary.DBTableName)
}
/** List the values of the currently saved partition key fields. */
def listKeys(): DataFrame = {
import MeasurementHelpers._
import AutomatedTestKey._
import SensorMeasurementKey._
val table = sparkContext.cassandraTable[MeasurementKey](cassandraKeyspaceName, BaseMeasurementsKeys.DBTableName)
measurementClass match {
case BaseAutomatedTest.NAME => sqlContext.createDataFrame(table.map(keyToAutomatedTest))
case BaseSensorMeasurement.NAME => sqlContext.createDataFrame(table.map(keyToSensorMeasurement))
}
}
@deprecated
def createStream(op: String, meas_names: List[String], params: java.util.Map[String, String]): EpidataStreamingContext = {
val esc = new EpidataStreamingContext(
this,
Seconds(streamingBatchDuration),
com.epidata.lib.models.Measurement.KafkaTopic
)
op match {
case "Identity" => esc.saveToCassandra(new Identity())
case "FillMissingValue" => esc.saveToCassandra(new FillMissingValue(meas_names, "rolling", 3))
case "OutlierDetector" => esc.saveToCassandra(new OutlierDetector("meas_value", "quartile"))
case "MeasStatistics" => esc.saveToCassandra(new MeasStatistics(meas_names, "standard"))
}
esc
}
def getSQLContext = sqlContext
def getSparkContext = sparkContext
private val genericPartitionFields = List("customer", "customer_site", "collection", "dataset")
private def partitionFieldsMap = measurementClass match {
case BaseAutomatedTest.NAME => Map(
"customer" -> "company",
"customer_site" -> "site",
"collection" -> "device_group",
"dataset" -> "tester"
)
case BaseSensorMeasurement.NAME => Map(
"customer" -> "company",
"customer_site" -> "site",
"collection" -> "station",
"dataset" -> "sensor"
)
case _ => throw new IllegalArgumentException(
"Invalid spark.epidata.measurementClass configuration."
)
}
}
|
epidataio/epidata-community
|
spark/src/main/scala/com/epidata/spark/EpidataContext.scala
|
Scala
|
apache-2.0
| 12,094
|
package com.github.alixba.vast
import scala.xml.Node
case class IconClickThrough(value: String) extends VASTElement {
/**
* Serializes this to a Node.
*/
def toXML: Node =
<IconClickThrough>{ value.asCData }</IconClickThrough>
}
object IconClickThrough extends VASTElementCompanion[IconClickThrough] {
/**
* Deserializes a Node to a T.
* The highest tag of the Node should match
* the T.
*
* {{{
* val elem = <Ad><SomeTags/></Ad>
* val ad = Ad.fromXML(elem)
* }}}
*/
def fromXML(node: Node): IconClickThrough =
IconClickThrough(node.text)
}
|
AlixBa/vast
|
src/main/scala/com/github/alixba/vast/IconClickThrough.scala
|
Scala
|
mit
| 599
|
package com.twitter.scalding.parquet
import org.slf4j.LoggerFactory
object HasColumnProjection {
val LOG = LoggerFactory.getLogger(this.getClass)
def requireNoSemiColon(glob: String) = {
require(!glob.contains(";"), "A column projection glob cannot contain a ; character")
}
}
trait HasColumnProjection {
import com.twitter.scalding.parquet.HasColumnProjection._
/**
* Deprecated. Use withColumnProjections, which uses a different glob syntax.
*
* The format for specifying columns is described here:
* https://github.com/apache/parquet-mr/blob/3df3372a1ee7b6ea74af89f53a614895b8078609/parquet_cascading.md#2-projection-pushdown
* (Note that this link is different from the one below in withColumnProjections)
*
* Note that the format described there says that multiple globs can be combined with a ; character.
* Instead, we use a Set() here and will eventually join the set on the ; character for you.
*/
@deprecated(message = "Use withColumnProjections, which uses a different glob syntax", since = "0.15.1")
def withColumns: Set[String] = Set()
/**
* The format for specifying columns is described here:
* https://github.com/apache/parquet-mr/blob/master/parquet_cascading.md#21-projection-pushdown-with-thriftscrooge-records
*
* Note that the format described there says that multiple globs can be combined with a ; character.
* Instead, we use a Set() here and will eventually join the set on the ; character for you.
*/
def withColumnProjections: Set[String] = Set()
/**
* Parquet accepts globs separated by the ; character
*/
protected[parquet] final def columnProjectionString: Option[ColumnProjectionString] = {
val deprecated = withColumns
val strict = withColumnProjections
require(deprecated.isEmpty || strict.isEmpty,
"Cannot provide both withColumns and withColumnProjections")
deprecated.foreach(requireNoSemiColon)
strict.foreach(requireNoSemiColon)
if (deprecated.nonEmpty) {
LOG.warn("withColumns is deprecated. Please use withColumnProjections, which uses a different glob syntax")
Some(DeprecatedColumnProjectionString(deprecated))
} else if (strict.nonEmpty) {
Some(StrictColumnProjectionString(strict))
} else {
None
}
}
}
sealed trait ColumnProjectionString {
def globStrings: Set[String]
def asSemicolonString: String = globStrings.mkString(";")
}
final case class DeprecatedColumnProjectionString(globStrings: Set[String]) extends ColumnProjectionString
final case class StrictColumnProjectionString(globStrings: Set[String]) extends ColumnProjectionString
|
tdyas/scalding
|
scalding-parquet/src/main/scala/com/twitter/scalding/parquet/HasColumnProjection.scala
|
Scala
|
apache-2.0
| 2,643
|
package juju.infrastructure.local
import akka.actor.ActorRef
import akka.testkit.TestProbe
import juju.domain.{SagaFactory, Saga}
import juju.domain.Saga.{SagaCorrelationIdResolution, SagaHandlersResolution}
import juju.infrastructure.SagaRouter.SagaIsUp
import juju.messages.DomainEvent
import juju.testkit.LocalDomainSpec
import juju.testkit.infrastructure.SagaRouterSpec
import akka.pattern.gracefulStop
import scala.concurrent.duration._
import scala.reflect.ClassTag
class LocalSagaRouterSpec extends LocalDomainSpec("LocalSagaRouter") with SagaRouterSpec {
override protected def createSagaRouter[S <: Saga : ClassTag : SagaHandlersResolution : SagaCorrelationIdResolution : SagaFactory](tenant: String, probe: TestProbe): ActorRef = {
system.eventStream.subscribe(probe.ref, classOf[SagaIsUp])
LocalSagaRouter.localSagaRouterFactory(tenant).getOrCreate
}
override protected def publish(tenant: String, sagaRouterRef : ActorRef, event: DomainEvent, probe: TestProbe) = {
probe.send(sagaRouterRef, event)
}
override protected def shutdownRouter[S <: Saga : ClassTag](tenant: String, sagaRouterRef: ActorRef, probe: TestProbe): Unit = {
system.eventStream.unsubscribe(probe.ref, classOf[SagaIsUp])
gracefulStop(sagaRouterRef, 10 seconds)
}
}
|
brokersquare/juju
|
core/src/test/scala/juju/infrastructure/local/LocalSagaRouterSpec.scala
|
Scala
|
apache-2.0
| 1,283
|
package net.ruippeixotog.scalascraper.model
/** The result of a query to an [[Element]]. It works as a collection of `Element` instances and provides a way to
* further query the elements.
*/
trait ElementQuery[+E <: Element] extends Iterable[E] {
/** Executes an additional query over the elements of this query using a CSS selector.
*
* Semantically, the result of returned composite query is equivalent to iterating over the elements of this query,
* applying the CSS selector on each individual node and flattening the result while eliminating duplicate results.
*
* @param query
* the CSS selector used to select elements to be returned
* @return
* an `ElementQuery` instance representing the result of the composed query
*/
def select(query: String): ElementQuery[E]
}
private[model] class RootElementQuery[E <: Element](private val target: E, exec: String => Iterator[E])
extends ElementQuery[E] {
def iterator = Iterator(target)
def select(query: String): ElementQuery[E] =
new LazyElementQuery(query.split(","), target, exec)
override def equals(obj: Any) =
obj match {
case q: ElementQuery[_] => iterator.sameElements(q.iterator)
case _ => false
}
override def hashCode() = iterator.toSeq.hashCode()
override def toString() = s"RootElementQuery($target)"
}
private[model] class LazyElementQuery[E <: Element](
private val queries: Seq[String],
private val target: E,
exec: String => Iterator[E]
) extends ElementQuery[E] {
def iterator = exec(queries.mkString(","))
def select(query: String): ElementQuery[E] = {
val newQueries = for { q1 <- queries; q2 <- query.split(",") } yield s"$q1 $q2"
new LazyElementQuery(newQueries, target, exec)
}
override def equals(obj: Any) =
obj match {
case q: ElementQuery[_] => iterator.sameElements(q.iterator)
case _ => false
}
override def hashCode() = iterator.toSeq.hashCode()
override def toString() = s"LazyElementQuery($queries, $target)"
}
object ElementQuery {
def apply[E <: Element.Upper[E]](target: E): ElementQuery[E] =
new RootElementQuery(target, target.select(_).iterator)
def apply[E <: Element.Upper[E]](cssQuery: String, target: E): ElementQuery[E] =
new LazyElementQuery(cssQuery.split(",").toList, target, target.select(_).iterator)
def apply[E <: Element.Upper[E]](cssQuery: String, target: E, exec: String => Iterator[E]): ElementQuery[E] =
new LazyElementQuery(cssQuery.split(",").toList, target, exec)
}
|
ruippeixotog/scala-scraper
|
core/src/main/scala/net/ruippeixotog/scalascraper/model/ElementQuery.scala
|
Scala
|
mit
| 2,549
|
package com.eltimn.scamongo
/*
* Copyright 2010 Tim Nelson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
import java.util.Date
import java.util.regex.Pattern
import org.specs.Specification
import org.specs.runner.{Runner, JUnit}
import net.liftweb.json.DefaultFormats
import net.liftweb.json.JsonAST._
import net.liftweb.json.JsonParser._
import net.liftweb.json.JsonDSL._
import com.mongodb.{BasicDBObject, BasicDBObjectBuilder, DBObject}
//class DirectExampleTest extends Runner(Examples) with JUnit
object DirectExamples extends Specification {
doBeforeSpec {
// define the db
MongoDB.defineDb(DefaultMongoIdentifier, MongoAddress(MongoHost(), "test_direct"))
}
import com.mongodb.util.JSON // Mongo parser/serializer
val debug = false
def date(s: String) = DefaultFormats.dateFormat.parse(s).get
"Mongo tutorial example" in {
// build the DBObject
val doc = new BasicDBObject
doc.put("name", "MongoDB")
doc.put("type", "database")
doc.put("count", 1)
val info = new BasicDBObject
info.put("x", 203)
info.put("y", 102)
doc.put("info", info)
// use the Mongo instance directly
MongoDB.use(DefaultMongoIdentifier) ( db => {
val coll = db.getCollection("testCollection")
// save the doc to the db
coll.save(doc)
// get the doc back from the db and compare them
coll.findOne must_== doc
// upsert
doc.put("type", "document")
doc.put("count", 2)
val q = new BasicDBObject("name", "MongoDB") // the query to select the document(s) to update
val o = doc // the new object to update with, replaces the entire document, except possibly _id
val upsert = false // if the database should create the element if it does not exist
val apply = false // if an _id field should be added to the new object
coll.update(q, o, upsert, apply)
// get the doc back from the db and compare
coll.findOne.get("type") must_== "document"
coll.findOne.get("count") must_== 2
// modifier operations $inc, $set, $push...
val o2 = new BasicDBObject
o2.put("$inc", new BasicDBObject("count", 1)) // increment count by 1
o2.put("$set", new BasicDBObject("type", "docdb")) // set type
coll.update(q, o2, false, false)
// get the doc back from the db and compare
coll.findOne.get("type") must_== "docdb"
coll.findOne.get("count") must_== 3
if (!debug) {
// delete it
coll.remove(new BasicDBObject("_id", doc.get("_id")))
coll.find.count must_== 0
}
// server-side eval
val six = db.eval(" function() { return 3+3; } ")
six must_== 6
})
}
"Mongo tutorial 2 example" in {
// use a DBCollection directly
MongoDB.useCollection("iDoc") ( coll => {
// insert multiple documents
for (i <- List.range(1, 101)) {
coll.insert(new BasicDBObject().append("i", i))
}
// create an index
coll.createIndex(new BasicDBObject("i", 1)) // create index on "i", ascending
// count the docs
coll.getCount must_== 100
// get the count using a query
coll.getCount(new BasicDBObject("i", new BasicDBObject("$gt", 50))) must_== 50
// use a cursor to get all docs
val cur = coll.find
// do something with the cursor
while(cur.hasNext) {
val dbo = cur.next
if (debug) println(dbo)
}
cur.count must_== 100
// get a single document with a query ( i = 71 )
val query = new BasicDBObject
query.put("i", 71)
val cur2 = coll.find(query)
cur2.count must_== 1
cur2.next.get("i") must_== 71
// get a set of documents with a query
// e.g. find all where i > 50
val cur3 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 50)))
cur3.count must_== 50
// range - 20 < i <= 30
val cur4 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 20).append("$lte", 30)))
cur4.count must_== 10
// limiting result set
val cur5 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 50))).limit(3)
var cntr5 = 0
while(cur5.hasNext) {
cur5.next
cntr5 += 1
}
cntr5 must_== 3
// skip
val cur6 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 50))).skip(10)
var cntr6 = 0
while(cur6.hasNext) {
cntr6 += 1
cur6.next.get("i") must_== 60+cntr6
}
cntr6 must_== 40
/* skip and limit */
val cur7 = coll.find.skip(10).limit(20)
var cntr7 = 0
while(cur7.hasNext) {
cntr7 += 1
cur7.next.get("i") must_== 10+cntr7
}
cntr7 must_== 20
// sorting
val cur8 = coll.find.sort(new BasicDBObject("i", -1)) // descending
var cntr8 = 100
while(cur8.hasNext) {
cur8.next.get("i") must_== cntr8
cntr8 -= 1
}
// remove some docs by a query
coll.remove(new BasicDBObject("i", new BasicDBObject("$gt", 50)))
coll.find.count must_== 50
if (!debug) {
// delete the rest of the rows
coll.remove(new BasicDBObject("i", new BasicDBObject("$lte", 50)))
coll.find.count must_== 0
}
})
}
"Mongo useSession example" in {
// use a Mongo instance directly with a session
MongoDB.useSession ( db => {
val coll = db.getCollection("testCollection")
// create a unique index on name
coll.ensureIndex(new BasicDBObject("name", 1), new BasicDBObject("unique", true))
// build the DBObjects
val doc = new BasicDBObject
val doc2 = new BasicDBObject
val doc3 = new BasicDBObject
doc.put("name", "MongoSession")
doc.put("type", "db")
doc.put("count", 1)
doc2.put("name", "MongoSession")
doc2.put("type", "db")
doc2.put("count", 1)
doc3.put("name", "MongoDB")
doc3.put("type", "db")
doc3.put("count", 1)
// save the docs to the db
coll.save(doc)
db.getLastError.get("err") must beNull
coll.save(doc2) // this should return an error
db.getLastError.get("err").toString must startWith("E11000 duplicate key error index")
coll.save(doc3)
db.getLastError.get("err") must beNull
// query for the docs by type
val qry = new BasicDBObject("type", "db")
coll.find(qry).count must_== 2
// modifier operations $inc, $set, $push...
val o2 = new BasicDBObject
o2.put("$inc", new BasicDBObject("count", 1)) // increment count by 1
//o2.put("$set", new BasicDBObject("type", "docdb")) // set type
coll.update(qry, o2, false, false)
db.getLastError.get("updatedExisting") must_== true
/* The update method only updates one document. see:
http://jira.mongodb.org/browse/SERVER-268
*/
db.getLastError.get("n") must_== 1
/* this works now
// try updating against the unique key
val o3 = new BasicDBObject
o3.put("$set", new BasicDBObject("name", "MongoDB")) // set type
coll.update(qry, o3, true, false)
db.getLastError.get("err").toString must startWith("E12011 can't $inc/$set an indexed field")
db.getLastError.get("n") must_== 0
*/
// this update query won't find any docs to update
coll.update(new BasicDBObject("name", "None"), o2, false, false)
db.getLastError.get("updatedExisting") must_== false
db.getLastError.get("n") must_== 0
// regex query example
val key = "name"
val regex = "^Mongo"
val cur = coll.find(
BasicDBObjectBuilder.start.add(key, Pattern.compile(regex)).get)
cur.count must_== 2
// use regex and another dbobject
val cur2 = coll.find(
BasicDBObjectBuilder.start.add(key, Pattern.compile(regex)).add("count", 1).get)
cur2.count must_== 1
if (!debug) {
// delete them
coll.remove(new BasicDBObject("type", "db"))
db.getLastError.get("n") must_== 2
coll.find.count must_== 0
}
})
}
doAfterSpec {
if (!debug) {
/* drop the collections */
MongoDB.useCollection(DefaultMongoIdentifier, "testCollection") ( coll => {
coll.drop
})
MongoDB.useCollection("iDoc") ( coll => {
coll.drop
})
// drop the database
MongoDB.use {
db => db.dropDatabase()
}
}
// clear the mongo instances
MongoDB.close
}
}
|
eltimn/scamongo
|
src/test/scala/com/eltimn/scamongo/DirectExamples.scala
|
Scala
|
apache-2.0
| 8,536
|
package io.udash.bootstrap.utils
object BootstrapTags {
import scalatags.JsDom.all._
final val dataBackdrop = data("backdrop")
final val dataBind = data("bind")
final val dataContent = data("content")
final val dataDismiss = data("dismiss")
final val dataKeyboard = data("keyboard")
final val dataLabel = data("label")
final val dataParent = data("parent")
final val dataOriginalTitle = data("original-title")
final val dataRide = data("ride")
final val dataShow = data("show")
final val dataSlide = data("slide")
final val dataSlideTo = data("slide-to")
final val dataTarget = data("target")
final val dataToggle = data("toggle")
}
|
UdashFramework/udash-core
|
bootstrap4/.js/src/main/scala/io/udash/bootstrap/utils/BootstrapTags.scala
|
Scala
|
apache-2.0
| 665
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.renewal
import forms.{EmptyForm, Form2, InvalidForm, ValidForm}
import jto.validation.{Path, ValidationError}
import models.Country
import models.moneyservicebusiness.MostTransactions
import org.scalatest.MustMatchers
import play.api.i18n.Messages
import utils.{AmlsViewSpec, AutoCompleteServiceMocks}
import views.Fixture
import views.html.renewal.most_transactions
class most_transactionsSpec extends AmlsViewSpec with MustMatchers {
trait ViewFixture extends Fixture with AutoCompleteServiceMocks{
lazy val most_transactions = app.injector.instanceOf[most_transactions]
implicit val requestWithToken = addTokenForView()
}
"most_transactions view" must {
"have correct title" in new ViewFixture {
val form2: ValidForm[MostTransactions] = Form2(MostTransactions(Seq.empty[Country]))
def view = most_transactions(form2, true, mockAutoComplete.getCountries)
doc.title must startWith(Messages("renewal.msb.most.transactions.title") + " - " + Messages("summary.renewal"))
}
"have correct headings" in new ViewFixture {
val form2: ValidForm[MostTransactions] = Form2(MostTransactions(Seq.empty[Country]))
def view = most_transactions(form2, true, mockAutoComplete.getCountries)
heading.html must be(Messages("renewal.msb.most.transactions.title"))
subHeading.html must include(Messages("summary.renewal"))
}
"show errors in the correct locations" in new ViewFixture {
val form2: InvalidForm = InvalidForm(Map.empty,
Seq(
(Path \\ "mostTransactionsCountries") -> Seq(ValidationError("not a message Key"))
))
def view = most_transactions(form2, true, mockAutoComplete.getCountries)
errorSummary.html() must include("not a message Key")
doc.getElementById("mostTransactionsCountries")
.getElementsByClass("error-notification").first().html() must include("not a message Key")
}
"have a back link" in new ViewFixture {
def view = most_transactions(EmptyForm, true, mockAutoComplete.getCountries)
doc.getElementsByAttributeValue("class", "link-back") must not be empty
}
}
}
|
hmrc/amls-frontend
|
test/views/renewal/most_transactionsSpec.scala
|
Scala
|
apache-2.0
| 2,763
|
package io.github.mandar2812.dynaml.utils.sumac
import org.scalatest.FunSuite
import org.scalatest.Matchers
/**
* Test the Map parser combinator
* User: andrews
* Date: 3/24/14
*/
class MapCombinatorParserTest extends FunSuite with Matchers {
test("should not parse something wrong") {
an[IllegalArgumentException] should be thrownBy {
MapCombinatorParser("akdfaskdf")
}
}
test("should not parse unquoted things") {
an[IllegalArgumentException] should be thrownBy {
MapCombinatorParser("akdfaskdf:kaklfjd,;dlksjdf")
}
}
test("should parse a single entry") {
val parsed = MapCombinatorParser("key:value")
parsed("key") should be("value")
parsed.size should be(1)
}
test("should parse a set of entries") {
val parsed = MapCombinatorParser("key:value,foo:bar")
parsed("key") should be("value")
parsed("foo") should be("bar")
parsed.size should be(2)
}
test("should parse a entry with , in the key") {
val parsed = MapCombinatorParser(""""key,foo":value""")
parsed("key,foo") should be("value")
parsed.size should be(1)
}
test("should parse a entry with , in the value") {
val parsed = MapCombinatorParser("""key:"foo,value"""")
parsed("key") should be("foo,value")
parsed.size should be(1)
}
test("should parse a entry with : in the key") {
val parsed = MapCombinatorParser(""""key:foo":value""")
parsed("key:foo") should be("value")
parsed.size should be(1)
}
test("should parse a entry with : in the value") {
val parsed = MapCombinatorParser("""key:"foo:value"""")
parsed("key") should be("foo:value")
parsed.size should be(1)
}
test("should parse a entry with : or , in both key and values") {
val parsed = MapCombinatorParser(""""key,foo":"foo:value"""")
parsed("key,foo") should be("foo:value")
parsed.size should be(1)
}
test("should parse a sequence of entries with : or , in both key and values") {
val parsed = MapCombinatorParser(""""key,foo":"foo:value","foo:bar":foo,bar:"foo,bar"""")
parsed("key,foo") should be("foo:value")
parsed("foo:bar") should be("foo")
parsed("bar") should be("foo,bar")
parsed.size should be(3)
}
test("allow entries with a single double quote in them") {
val parsed = MapCombinatorParser("""key"bar:value,foo:bar""")
parsed("key\"bar") should be("value")
parsed("foo") should be("bar")
}
test("should be happy with single quotes") {
val parsed = MapCombinatorParser("""'key"bar':value,'foo:bar':"foo,value"""")
parsed("key\"bar") should be("value")
parsed("foo:bar") should be("foo,value")
}
}
|
transcendent-ai-labs/DynaML
|
dynaml-core/src/test/scala/io/github/mandar2812/dynaml/utils/sumac/MapCombinatorParserTest.scala
|
Scala
|
apache-2.0
| 2,682
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.scheduler
import java.util.Properties
import spark.scheduler.cluster.TaskInfo
import spark.util.Distribution
import spark.{Logging, SparkContext, TaskEndReason, Utils}
import spark.executor.TaskMetrics
sealed trait SparkListenerEvents
case class SparkListenerStageSubmitted(stage: Stage, taskSize: Int) extends SparkListenerEvents
case class StageCompleted(val stageInfo: StageInfo) extends SparkListenerEvents
case class SparkListenerTaskEnd(task: Task[_], reason: TaskEndReason, taskInfo: TaskInfo,
taskMetrics: TaskMetrics) extends SparkListenerEvents
case class SparkListenerJobStart(job: ActiveJob, properties: Properties = null)
extends SparkListenerEvents
case class SparkListenerJobEnd(job: ActiveJob, jobResult: JobResult)
extends SparkListenerEvents
trait SparkListener {
/**
* Called when a stage is completed, with information on the completed stage
*/
def onStageCompleted(stageCompleted: StageCompleted) { }
/**
* Called when a stage is submitted
*/
def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) { }
/**
* Called when a task ends
*/
def onTaskEnd(taskEnd: SparkListenerTaskEnd) { }
/**
* Called when a job starts
*/
def onJobStart(jobStart: SparkListenerJobStart) { }
/**
* Called when a job ends
*/
def onJobEnd(jobEnd: SparkListenerJobEnd) { }
}
/**
* Simple SparkListener that logs a few summary statistics when each stage completes
*/
class StatsReportListener extends SparkListener with Logging {
override def onStageCompleted(stageCompleted: StageCompleted) {
import spark.scheduler.StatsReportListener._
implicit val sc = stageCompleted
this.logInfo("Finished stage: " + stageCompleted.stageInfo)
showMillisDistribution("task runtime:", (info, _) => Some(info.duration))
//shuffle write
showBytesDistribution("shuffle bytes written:",(_,metric) => metric.shuffleWriteMetrics.map{_.shuffleBytesWritten})
//fetch & io
showMillisDistribution("fetch wait time:",(_, metric) => metric.shuffleReadMetrics.map{_.fetchWaitTime})
showBytesDistribution("remote bytes read:", (_, metric) => metric.shuffleReadMetrics.map{_.remoteBytesRead})
showBytesDistribution("task result size:", (_, metric) => Some(metric.resultSize))
//runtime breakdown
val runtimePcts = stageCompleted.stageInfo.taskInfos.map{
case (info, metrics) => RuntimePercentage(info.duration, metrics)
}
showDistribution("executor (non-fetch) time pct: ", Distribution(runtimePcts.map{_.executorPct * 100}), "%2.0f %%")
showDistribution("fetch wait time pct: ", Distribution(runtimePcts.flatMap{_.fetchPct.map{_ * 100}}), "%2.0f %%")
showDistribution("other time pct: ", Distribution(runtimePcts.map{_.other * 100}), "%2.0f %%")
}
}
object StatsReportListener extends Logging {
//for profiling, the extremes are more interesting
val percentiles = Array[Int](0,5,10,25,50,75,90,95,100)
val probabilities = percentiles.map{_ / 100.0}
val percentilesHeader = "\t" + percentiles.mkString("%\t") + "%"
def extractDoubleDistribution(stage:StageCompleted, getMetric: (TaskInfo,TaskMetrics) => Option[Double]): Option[Distribution] = {
Distribution(stage.stageInfo.taskInfos.flatMap{
case ((info,metric)) => getMetric(info, metric)})
}
//is there some way to setup the types that I can get rid of this completely?
def extractLongDistribution(stage:StageCompleted, getMetric: (TaskInfo,TaskMetrics) => Option[Long]): Option[Distribution] = {
extractDoubleDistribution(stage, (info, metric) => getMetric(info,metric).map{_.toDouble})
}
def showDistribution(heading: String, d: Distribution, formatNumber: Double => String) {
val stats = d.statCounter
logInfo(heading + stats)
val quantiles = d.getQuantiles(probabilities).map{formatNumber}
logInfo(percentilesHeader)
logInfo("\t" + quantiles.mkString("\t"))
}
def showDistribution(heading: String, dOpt: Option[Distribution], formatNumber: Double => String) {
dOpt.foreach { d => showDistribution(heading, d, formatNumber)}
}
def showDistribution(heading: String, dOpt: Option[Distribution], format:String) {
def f(d:Double) = format.format(d)
showDistribution(heading, dOpt, f _)
}
def showDistribution(heading:String, format: String, getMetric: (TaskInfo,TaskMetrics) => Option[Double])
(implicit stage: StageCompleted) {
showDistribution(heading, extractDoubleDistribution(stage, getMetric), format)
}
def showBytesDistribution(heading:String, getMetric: (TaskInfo,TaskMetrics) => Option[Long])
(implicit stage: StageCompleted) {
showBytesDistribution(heading, extractLongDistribution(stage, getMetric))
}
def showBytesDistribution(heading: String, dOpt: Option[Distribution]) {
dOpt.foreach{dist => showBytesDistribution(heading, dist)}
}
def showBytesDistribution(heading: String, dist: Distribution) {
showDistribution(heading, dist, (d => Utils.memoryBytesToString(d.toLong)): Double => String)
}
def showMillisDistribution(heading: String, dOpt: Option[Distribution]) {
showDistribution(heading, dOpt, (d => StatsReportListener.millisToString(d.toLong)): Double => String)
}
def showMillisDistribution(heading: String, getMetric: (TaskInfo, TaskMetrics) => Option[Long])
(implicit stage: StageCompleted) {
showMillisDistribution(heading, extractLongDistribution(stage, getMetric))
}
val seconds = 1000L
val minutes = seconds * 60
val hours = minutes * 60
/**
* reformat a time interval in milliseconds to a prettier format for output
*/
def millisToString(ms: Long) = {
val (size, units) =
if (ms > hours) {
(ms.toDouble / hours, "hours")
} else if (ms > minutes) {
(ms.toDouble / minutes, "min")
} else if (ms > seconds) {
(ms.toDouble / seconds, "s")
} else {
(ms.toDouble, "ms")
}
"%.1f %s".format(size, units)
}
}
case class RuntimePercentage(executorPct: Double, fetchPct: Option[Double], other: Double)
object RuntimePercentage {
def apply(totalTime: Long, metrics: TaskMetrics): RuntimePercentage = {
val denom = totalTime.toDouble
val fetchTime = metrics.shuffleReadMetrics.map{_.fetchWaitTime}
val fetch = fetchTime.map{_ / denom}
val exec = (metrics.executorRunTime - fetchTime.getOrElse(0l)) / denom
val other = 1.0 - (exec + fetch.getOrElse(0d))
RuntimePercentage(exec, fetch, other)
}
}
|
lyogavin/spark
|
core/src/main/scala/spark/scheduler/SparkListener.scala
|
Scala
|
apache-2.0
| 7,302
|
package chat.tox.antox.wrapper
import im.tox.tox4j.core.data.ToxNickname
class GroupPeer(var key: PeerKey,
var name: ToxNickname,
var ignored: Boolean) {
}
|
wiiam/Antox
|
app/src/main/scala/chat/tox/antox/wrapper/GroupPeer.scala
|
Scala
|
gpl-3.0
| 190
|
(0, (_./*resolved: true*/toInt)): Tuple2[Int, Int => Int] // red
(0, (_./*resolved: true*/toInt)): (Int, Int => Int) // okay
|
ilinum/intellij-scala
|
testdata/resolve2/bug3/SCL3592A.scala
|
Scala
|
apache-2.0
| 131
|
package com.atomist.tree.pathexpression
import java.util.Objects
import com.atomist.source.StringFileArtifact
import com.atomist.tree.utils.NodeUtils._
import com.atomist.tree.pathexpression.PathExpressionParsingConstants._
import com.atomist.util.scalaparsing.CommonTypesParser
/**
* Scala parser combinator for path expressions
*/
trait PathExpressionParser extends CommonTypesParser {
/**
* We allow - and _ in navigation axis, as they are valid in
* GitHub branch names, and . as it may be used in files
*/
private def legalNavigationOrChild: Parser[String] =
"""[\\.a-zA-Z0-9_\\-\\$#]+""".r
private def nodeName: Parser[String] = identifierRefString(Set.empty, legalNavigationOrChild)
private def functionName: Parser[String] = identifierRefString(Set.empty, "[a-zA-Z][a-zA-Z0-9\\\\-]+".r)
private def objectType: Parser[String] = identifierRefString(Set.empty, ident)
private def child: Parser[AxisSpecifier] = opt(s"$ChildAxis::") ^^ {
case Some(_) => Child
case _ => Child
}
private def navigationAxis: Parser[AxisSpecifier] =
identifierRefString(StandardAxes, legalNavigationOrChild) <~ "::" ^^
(s => NavigationAxis(s))
private def nodeTypeTest: Parser[NodesWithTag] = objectType <~ "()" ^^ (p => NodesWithTag(p))
private def propertyTest: Parser[Predicate] =
"@" ~> nodeName ~ EqualsToken ~ (singleQuotedString | doubleQuotedString) ^^ {
case prop ~ op ~ literal => prop match {
case "name" =>
NodeNamePredicate(literal)
case "type" =>
NodeTypePredicate(literal)
case propName: String =>
PropertyValuePredicate(propName, literal)
}
}
private def nullLiteral: Parser[Object] = "null" ^^ (_ => null)
private def integer: Parser[Integer] = decimalNumber ^^ (s => s.toInt)
private def literal: Parser[Any] = nullLiteral | singleQuotedString | integer
private def methodInvocationTest: Parser[Predicate] = "." ~> nodeName ~ args ~ EqualsToken ~ literal ^^ {
case methodName ~ args ~ _ ~ literal =>
FunctionPredicate(s".$methodName", (n, among) => {
val rO = invokeMethodIfPresent[Any](n, methodName, args)
rO.exists(invoked => Objects.equals(literal, invoked))
})
}
private def arg: Parser[String] = singleQuotedString
private def args: Parser[Seq[String]] = "(" ~> repsep(arg, ",") <~ ")"
private def stringLiteralFunctionArg: Parser[StringLiteralFunctionArg] =
(singleQuotedString | doubleQuotedString) ^^ (
s => StringLiteralFunctionArg(s)
)
private def relativePathFunctionArg: Parser[RelativePathFunctionArg] =
nonEmptyRelativePathExpression ^^ (
s => RelativePathFunctionArg(s)
)
private def functionArg: Parser[FunctionArg] =
stringLiteralFunctionArg |
relativePathFunctionArg
private def functionArgs: Parser[Seq[FunctionArg]] = "(" ~> rep1sep(functionArg, ",") <~ ")"
private def booleanMethodInvocation: Parser[Predicate] = "." ~> functionName ~ args ^^ {
case methodName ~ args =>
FunctionPredicate(s".$methodName", (n, among) =>
invokeMethodIfPresent[Boolean](n, methodName, args).getOrElse(false)
)
}
private def functionCall: Parser[Predicate] = functionName ~ functionArgs ^^ {
case functionName ~ args =>
XPathStyleFunctionPredicate(functionName, args)
}
private def index: Parser[Predicate] = integer ^^ {
n => IndexPredicate(n)
}
private def truePredicate: Parser[Predicate] = "true" ^^ (_ => TruePredicate)
private def falsePredicate: Parser[Predicate] = "false" ^^ (_ => FalsePredicate)
private def nestedPathExpressionPredicate: Parser[Predicate] =
pathExpression ^^ (pe => NestedPathExpressionPredicate(pe))
private def predicateTerm: Parser[Predicate] =
methodInvocationTest |
propertyTest |
booleanMethodInvocation |
truePredicate |
falsePredicate |
functionCall |
index |
nestedPathExpressionPredicate
private def negatedPredicate: Parser[Predicate] = "not" ~> "(" ~> predicateExpression <~ ")" ^^ {
pred => NegationOfPredicate(pred)
}
private def logicalOp: Parser[String] = "and" | "or"
private def predicateAnd: Parser[Predicate] = predicateTerm ~ logicalOp ~ predicateExpression ^^ {
case a ~ "and" ~ b => a and b
case a ~ "or" ~ b => a or b
}
private def predicateExpression: Parser[Predicate] = predicateAnd | negatedPredicate | predicateTerm
private def predicate: Parser[Predicate] =
PredicateOpen ~> predicateExpression ~ PredicateClose ~ opt(PredicateOptional) ^^ {
case p ~ _ ~ Some(_) => OptionalPredicate(p)
case p ~ _ ~ None => p
}
private def nodeNameTest: Parser[NodeTest] = nodeName ^^
(s => NamedNodeTest(s))
private def allNodes: Parser[NodeTest] = "*" ^^ (_ => All)
private def nodeTest: Parser[NodeTest] = nodeTypeTest | nodeNameTest | allNodes
private def property: Parser[AxisSpecifier] = (s"$PropertyAxis::" | "@") ^^ (s => Attribute)
private def descendant: Parser[AxisSpecifier] = (s"$DescendantAxis::" | "/") ^^
(s => Descendant)
private def selfAxis: Parser[AxisSpecifier] = (s"$SelfAxis::" | ".") ^^
(s => Self)
private def axis: Parser[AxisSpecifier] =
property |
navigationAxis |
descendant |
selfAxis |
child
private def locationStep: Parser[LocationStep] = axis ~ nodeTest ~ rep(predicate) ^^ {
case a ~ t ~ preds => LocationStep(a, t, preds)
}
private val slashSeparator = "/"
def relativePathExpression: Parser[PathExpression] = (selfAxis | repsep(locationStep, slashSeparator)) ^^ {
case Self => PathExpression(List(LocationStep(Self, All, Nil)))
case steps: List[LocationStep]@unchecked => PathExpression(steps)
}
private def nonEmptyRelativePathExpression: Parser[PathExpression] = new Parser[PathExpression] {
override def apply(in: Input): ParseResult[PathExpression] = {
relativePathExpression(in) match {
case s: Success[PathExpression]@unchecked if s.result.locationSteps.nonEmpty => s
case s: Success[PathExpression]@unchecked => Failure(s"Path expression has no steps: [${s.result}]", in)
case x => x
}
}
}
def absolutePathExpression: Parser[PathExpression] = slashSeparator ~> relativePathExpression
def pathExpression: Parser[PathExpression] = absolutePathExpression | relativePathExpression
def parsePathExpression(expr: String): PathExpression = {
try {
parseTo(StringFileArtifact("<input>", expr), phrase(absolutePathExpression))
} catch {
case e: IllegalArgumentException =>
throw new IllegalArgumentException(s"Path expression '$expr' is invalid: [${e.getMessage}]", e)
}
}
}
/**
* Default implementation of PathExpressionParser. Import this
* class for default conversion from Strings to path expressions.
*/
object PathExpressionParser extends PathExpressionParser {
implicit def parseString(expr: String): PathExpression = PathExpressionParser.parsePathExpression(expr)
}
private object PathExpressionParsingConstants {
val DotSeparator = "."
val SlashSeparator = "/"
val AmongSeparator = "$"
val SlashSlash = "//"
val PredicateOpen = "["
val PredicateClose = "]"
val PredicateOptional = "?"
val PropertyAxis = "property"
val ChildAxis = "child"
val DescendantAxis = "descendant"
val SelfAxis = "self"
/**
* Axes that can't be a property
*/
val StandardAxes = Set(PropertyAxis, ChildAxis, DescendantAxis, SelfAxis)
}
|
atomist/rug
|
src/main/scala/com/atomist/tree/pathexpression/PathExpressionParser.scala
|
Scala
|
gpl-3.0
| 7,551
|
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package models
import play.api.libs.json.Json
case class ReturnedProblem(device_id : String, nickname : String, problem : String)
object ReturnedProblem {
implicit val format = Json.format[ReturnedProblem]
}
|
chrisjwwalker/cjww-diagnostics
|
app/models/ReturnedProblem.scala
|
Scala
|
apache-2.0
| 944
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.torch
import com.intel.analytics.bigdl.nn.Euclidean
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class EuclideanSpec extends TorchSpec {
"A Euclidean " should "generate correct output and grad with input one dimension" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val input = Tensor[Double](7).apply1(e => Random.nextDouble())
val gradOutput = Tensor[Double](7).apply1(e => Random.nextDouble())
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.Euclidean(7, 7)\\n" +
"weight = module.weight\\n" +
"module:zeroGradParameters()\\n" +
"local i = 0\\n" +
"while i < 5 do\\n" +
"output = module:forward(input)\\n" +
"gradInput = module:backward(input, gradOutput)\\n" +
"i = i + 1\\n" +
"end\\n" +
"weight = module.weight\\n" +
"gradWeight = module.gradWeight\\n" +
"_repeat2 = module._repeat2\\n"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput", "weight", "gradWeight", "_repeat2"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val module = new Euclidean[Double](7, 7)
val start = System.nanoTime()
var output : Tensor[Double] = null
var gradInput : Tensor[Double] = null
var i = 0
while (i < 5) {
output = module.forward(input)
gradInput = module.backward(input, gradOutput)
i += 1
}
val weight = module.weight
val gradWeight = module.gradWeight
val end = System.nanoTime()
val scalaTime = end - start
weight should be(luaWeight)
output should be(luaOutput1)
gradInput should be(luaOutput2)
gradWeight should be(luaGradWeight)
println("Test case : Euclidean, Torch : " + luaTime +
" s, Scala : " + scalaTime / 1e9 + " s")
}
"A Euclidean " should "generate correct output and grad with input two dimensions" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val input = Tensor[Double](8, 7).apply1(e => Random.nextDouble())
val gradOutput = Tensor[Double](8, 7).apply1(e => Random.nextDouble())
val code = "torch.manualSeed(" + seed + ")\\n" +
"module = nn.Euclidean(7, 7)\\n" +
"weight = module.weight\\n" +
"output = module:forward(input)\\n" +
"module:zeroGradParameters()\\n" +
"gradInput = module:backward(input,gradOutput)\\n" +
"gradWeight = module.gradWeight\\n" +
"_repeat2 = module._repeat2\\n"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput", "weight", "gradWeight", "_repeat2"))
val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]]
val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val module = new Euclidean[Double](7, 7)
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val weight = module.weight
val gradWeight = module.gradWeight
val end = System.nanoTime()
val scalaTime = end - start
weight should be(luaWeight)
output should be(luaOutput1)
gradInput should be(luaOutput2)
gradWeight should be(luaGradWeight)
println("Test case : Euclidean, Torch : " + luaTime +
" s, Scala : " + scalaTime / 1e9 + " s")
}
}
|
jenniew/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/torch/EuclideanSpec.scala
|
Scala
|
apache-2.0
| 4,495
|
package scorex.crypto.authds.avltree
import org.scalatest.propspec.AnyPropSpec
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import scorex.crypto.authds.avltree.batch._
import scorex.crypto.authds.{ADKey, ADValue, TwoPartyTests}
import scorex.crypto.hash.{Blake2b256, Digest32, Sha256}
class AVLDeleteSpecification extends AnyPropSpec with ScalaCheckDrivenPropertyChecks with TwoPartyTests {
val KL = 26
val VL = 8
property("Batch delete") {
var newProver = new BatchAVLProver[Digest32, Blake2b256.type](KL, Some(VL))
val aKey = ADKey @@ Sha256("key 1").take(KL)
val aValue = ADValue @@ Sha256("value 1").take(VL)
newProver.performOneOperation(Insert(aKey, aValue)).isSuccess shouldBe true
newProver.generateProof()
newProver.performOneOperation(Update(aKey, aValue)).isSuccess shouldBe true
newProver.generateProof()
newProver.performOneOperation(Remove(aKey)).isSuccess shouldBe true
newProver.performOneOperation(Update(aKey, aValue)).isSuccess shouldBe false
}
}
|
ScorexProject/scrypto
|
src/test/scala/scorex/crypto/authds/avltree/AVLDeleteSpecification.scala
|
Scala
|
cc0-1.0
| 1,043
|
/**
* Generated by Scrooge
* version: 3.13.0
* rev: 0921444211eb6b3d2ac9fd31a1bf189f94c6ae85
* built at: 20140325-114520
*/
package com.twitter.zipkin.gen
import com.twitter.scrooge.ThriftEnum
@javax.annotation.Generated(value = Array("com.twitter.scrooge.Compiler"))
case object ResultCode {
case object Ok extends com.twitter.zipkin.gen.ResultCode {
val value = 0
val name = "Ok"
}
case object TryLater extends com.twitter.zipkin.gen.ResultCode {
val value = 1
val name = "TryLater"
}
/**
* Find the enum by its integer value, as defined in the Thrift IDL.
* @throws NoSuchElementException if the value is not found.
*/
def apply(value: Int): com.twitter.zipkin.gen.ResultCode = {
value match {
case 0 => com.twitter.zipkin.gen.ResultCode.Ok
case 1 => com.twitter.zipkin.gen.ResultCode.TryLater
case _ => throw new NoSuchElementException(value.toString)
}
}
/**
* Find the enum by its integer value, as defined in the Thrift IDL.
* Returns None if the value is not found
*/
def get(value: Int): Option[com.twitter.zipkin.gen.ResultCode] = {
value match {
case 0 => scala.Some(com.twitter.zipkin.gen.ResultCode.Ok)
case 1 => scala.Some(com.twitter.zipkin.gen.ResultCode.TryLater)
case _ => scala.None
}
}
def valueOf(name: String): Option[com.twitter.zipkin.gen.ResultCode] = {
name.toLowerCase match {
case "ok" => scala.Some(com.twitter.zipkin.gen.ResultCode.Ok)
case "trylater" => scala.Some(com.twitter.zipkin.gen.ResultCode.TryLater)
case _ => scala.None
}
}
lazy val list: List[com.twitter.zipkin.gen.ResultCode] = scala.List[com.twitter.zipkin.gen.ResultCode](
com.twitter.zipkin.gen.ResultCode.Ok,
com.twitter.zipkin.gen.ResultCode.TryLater
)
}
@javax.annotation.Generated(value = Array("com.twitter.scrooge.Compiler"))
sealed trait ResultCode extends ThriftEnum with Serializable
|
pkoryzna/zipkin
|
zipkin-scrooge/target/src_managed/main/com/twitter/zipkin/gen/ResultCode.scala
|
Scala
|
apache-2.0
| 1,964
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.subscription
import org.apache.pulsar.client.api.PulsarClient
case class PClient (serviceUrl: String) extends PulsarClientService {
lazy val client: PulsarClient = PulsarClient.builder().serviceUrl(serviceUrl).build()
}
|
adarro/ddo-calc
|
incubating/ddo-provider/src/main/scala/io/truthencode/ddo/subscription/PClient.scala
|
Scala
|
apache-2.0
| 896
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel
package scala
import dsl.languages.LanguageFunction
/**
* Scala implementation for an Apache Camel Expression
*/
class ScalaExpression(val expression: Exchange => Any) extends Expression {
def evaluate(exchange: Exchange) = {
val value = expression(exchange)
value match {
case f : LanguageFunction => f.evaluate(exchange, classOf[Object])
case _ => value.asInstanceOf[Object]
}
}
def evaluate[Target](exchange: Exchange, toType: Class[Target]) = {
val value = evaluate(exchange)
exchange.getContext().getTypeConverter().convertTo(toType, value)
}
}
|
engagepoint/camel
|
components/camel-scala/src/main/scala/org/apache/camel/scala/ScalaExpression.scala
|
Scala
|
apache-2.0
| 1,428
|
package objektwerks.types
trait Semigroup[A] {
def append(x: A, y: => A): A
}
trait Monoid[A] extends Semigroup[A] {
def zero: A
}
trait Functor[F[_]] {
def map[A, B](fa: F[A])(f: A => B): F[B]
}
trait Monad[F[_]] extends Functor[F] {
def point[A](a: => A): F[A]
def flatMap[A, B](fa: F[A])(f: A => F[B]): F[B]
}
trait Applicative[F[_]] extends Functor[F] {
def point[A](a: => A): F[A]
def apply[A, B](fa: F[A])(f: F[A => B]): F[B]
override def map[A, B](fa: F[A])(f: A => B): F[B] = apply(fa)(point(f))
}
object CategoryTheory {
val adderMonoid = new Monoid[Int] {
override def zero: Int = 0
override def append(x: Int, y: => Int): Int = x + y
}
val listFunctor = new Functor[List] {
override def map[A, B](xs: List[A])(f: A => B): List[B] = xs map f
}
val optionMonad = new Monad[Option] {
override def point[A](a: => A): Option[A] = Option(a)
override def map[A, B](oa: Option[A])(f: A => B): Option[B] = oa map f
override def flatMap[A, B](oa: Option[A])(f: A => Option[B]): Option[B] = oa flatMap f
}
val optionApplicative = new Applicative[Option] {
override def point[A](a: => A): Option[A] = Some(a)
override def apply[A, B](fa: Option[A])(ff: Option[A => B]): Option[B] = (fa, ff) match {
case (None, _) => None
case (Some(_), None) => None
case (Some(a), Some(f)) => Some(f(a))
}
}
def isAssociative[T](op: (T, T) => T, x: T, y: T, z: T): Boolean = {
op(op(x, y), z) == op(x, op(y, z))
}
def isCommutative[T](op: (T, T) => T, x: T, y: T): Boolean = {
op(x, y) == op(y, x)
}
def isIdempotent[T](op: T => T, x: T): Boolean = {
val f = op
val g = op compose op
f(x) == g(x)
}
}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class CategoryTheoryTest extends AnyFunSuite with Matchers {
import CategoryTheory._
test("monoid") {
assert(adderMonoid.zero == 0)
assert(adderMonoid.append(1, 1) == 2)
}
test("functor") {
val listOfNumbers = List(1, 2, 3)
val listOfStrings = listFunctor.map(listOfNumbers)(_.toString)
val expectedMorphism = List("1", "2", "3")
assert(listOfStrings == expectedMorphism)
}
test("monad") {
val option: Option[Int] = optionMonad.point(1)
val mappedOption: Option[Int] = optionMonad.map(option)(i => i * 3)
val flatMappedOption: Option[Int] = optionMonad.flatMap(option)(i => Some(i))
option.get shouldEqual 1
mappedOption.get shouldEqual 3
flatMappedOption.get shouldEqual 1
(option != mappedOption) shouldBe true
option shouldEqual flatMappedOption
}
test("applicative") {
val option: Option[Int] = optionApplicative.point(1)
val mappedOption: Option[Int] = optionApplicative.map(option)(i => i * 3)
option.get shouldEqual 1
mappedOption.get shouldEqual 3
}
test("is associative") {
isAssociative[Int](_ + _, 1, 2, 3) shouldBe true
!isAssociative[Double](_ / _, 1, 2, 3) shouldBe true
}
test("is commutative") {
isCommutative[Int](_ + _, 3, 6) shouldBe true
!isCommutative[String](_ + _, "a", "b") shouldBe true
}
test("is idempotent") {
def toUpper(s: String): String = s.toUpperCase
def increment(i: Int) = i + 1
isIdempotent(toUpper, "AbCdEfG") shouldBe true
!isIdempotent(increment, 0) shouldBe true
}
}
|
objektwerks/scala
|
src/test/scala/objektwerks/types/CategoryTheoryTest.scala
|
Scala
|
apache-2.0
| 3,353
|
/*
* Scala classfile decoder (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.scalap
import scala.collection.mutable
import mutable.ListBuffer
object Arguments {
case class Parser(optionPrefix: Char) {
val options: mutable.Set[String] = new mutable.HashSet
val prefixes: mutable.Set[String] = new mutable.HashSet
val optionalArgs: mutable.Set[String] = new mutable.HashSet
val prefixedBindings: mutable.Map[String, Char] = new mutable.HashMap
val optionalBindings: mutable.Map[String, Char] = new mutable.HashMap
def argumentError(message: String): Unit = Console.println(message)
def withOption(option: String): Parser = {
options += option
this
}
def withOptionalArg(option: String): Parser = {
optionalArgs += option
this
}
def withOptionalBinding(option: String, separator: Char): Parser = {
optionalBindings(option) = separator
this
}
def withPrefixedArg(prefix: String): Parser = {
prefixes += prefix
this
}
def withPrefixedBinding(prefix: String, separator: Char): Parser = {
prefixedBindings(prefix) = separator
this
}
def parseBinding(str: String, separator: Char): (String, String) = (str indexOf separator) match {
case -1 => argumentError(s"missing '$separator' in binding '$str'") ; ("", "")
case idx => ((str take idx).trim, (str drop (idx + 1)).trim)
}
def parse(args: Array[String]): Arguments = {
val res = new Arguments
parse(args, res)
res
}
def parse(args: Array[String], res: Arguments): Unit = {
if (args != null) {
var i = 0
while (i < args.length)
if ((args(i) == null) || (args(i).length() == 0))
i += 1
else if (args(i).charAt(0) != optionPrefix) {
res.addOther(args(i))
i += 1
} else if (options(args(i))) {
res.addOption(args(i))
i += 1
} else if (optionalArgs contains args(i)) {
if ((i + 1) == args.length) {
argumentError(s"missing argument for '${args(i)}'")
i += 1
} else {
res.addArgument(args(i), args(i + 1))
i += 2
}
} else if (optionalBindings contains args(i)) {
if ((i + 1) == args.length) {
argumentError(s"missing argument for '${args(i)}'")
i += 1
} else {
res.addBinding(args(i),
parseBinding(args(i + 1), optionalBindings(args(i))))
i += 2
}
} else {
val iter = prefixes.iterator
val j = i
while ((i == j) && iter.hasNext) {
val prefix = iter.next()
if (args(i) startsWith prefix) {
res.addPrefixed(prefix, args(i).substring(prefix.length()).trim())
i += 1
}
}
if (i == j) {
val iter = prefixedBindings.keysIterator
while ((i == j) && iter.hasNext) {
val prefix = iter.next()
if (args(i) startsWith prefix) {
val arg = args(i).substring(prefix.length()).trim()
i = i + 1
res.addBinding(prefix,
parseBinding(arg, prefixedBindings(prefix)))
}
}
if (i == j) {
argumentError(s"unknown option '${args(i)}'")
i = i + 1
}
}
}
}
}
}
def parse(options: String*)(args: Array[String]): Arguments = {
val parser = new Parser('-')
options foreach parser.withOption
parser parse args
}
}
class Arguments {
private val options = new mutable.HashSet[String]
private val arguments = new mutable.HashMap[String, String]
private val prefixes = new mutable.HashMap[String, mutable.HashSet[String]]
private val bindings = new mutable.HashMap[String, mutable.HashMap[String, String]]
private val others = new ListBuffer[String]
def addOption(option: String): Unit = options += option
def addArgument(option: String, arg: String): Unit = arguments(option) = arg
def addPrefixed(prefix: String, arg: String): Unit =
prefixes.getOrElseUpdate(prefix, new mutable.HashSet) += arg
def addBinding(tag: String, key: String, value: String): Unit =
if (key.length > 0)
bindings.getOrElseUpdate(tag, new mutable.HashMap)(key) = value
def addBinding(tag: String, binding: (String, String)): Unit =
addBinding(tag, binding._1, binding._2)
def addOther(arg: String): Unit = others += arg
def contains(option: String): Boolean = options(option)
def getArgument(option: String): Option[String] = arguments get option
def getSuffixes(prefix: String): mutable.Set[String] =
prefixes.getOrElse(prefix, new mutable.HashSet)
def containsSuffix(prefix: String, suffix: String): Boolean =
prefixes get prefix exists (set => set(suffix))
def getBindings(tag: String): mutable.Map[String, String] =
bindings.getOrElse(tag, new mutable.HashMap)
def getBinding(option: String, key: String): Option[String] =
bindings get option flatMap (_ get key)
def getOthers: List[String] = others.toList
}
|
scala/scala
|
src/scalap/scala/tools/scalap/Arguments.scala
|
Scala
|
apache-2.0
| 5,611
|
package com.nthportal.euler
package h0.t1
import com.nthportal.euler.util.calendar.Date
import com.nthportal.euler.util.calendar.DayOfWeek.{Monday, Sunday}
import com.nthportal.euler.util.calendar.Month.January
object Problem19 extends ProjectEulerProblem {
private val startingDate = Date(new January(1900), 1, Monday)
override def apply(): Long = {
dateStream(startingDate)
.dropWhile(_.year < 1901)
.takeWhile(_.year < 2001)
.count(d => d.day == 1 && d.dayOfWeek == Sunday)
}
private def dateStream(date: Date): Stream[Date] = date #:: dateStream(date.nextDay)
}
|
NthPortal/euler-n-scala
|
src/main/scala/com/nthportal/euler/h0/t1/Problem19.scala
|
Scala
|
mit
| 600
|
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.pattern
import akka.actor._
import akka.util.{ Timeout }
import akka.dispatch.sysmsg.{ Unwatch, Watch }
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import scala.util.Success
import scala.concurrent.duration.FiniteDuration
trait GracefulStopSupport {
/**
* Returns a [[scala.concurrent.Future]] that will be completed with success (value `true`) when
* existing messages of the target actor has been processed and the actor has been
* terminated.
*
* Useful when you need to wait for termination or compose ordered termination of several actors,
* which should only be done outside of the ActorSystem as blocking inside Actors is discouraged.
*
* <b>IMPORTANT NOTICE:</b> the actor being terminated and its supervisor
* being informed of the availability of the deceased actor’s name are two
* distinct operations, which do not obey any reliable ordering. Especially
* the following will NOT work:
*
* {{{
* def receive = {
* case msg =>
* Await.result(gracefulStop(someChild, timeout), timeout)
* context.actorOf(Props(...), "someChild") // assuming that that was someChild’s name, this will NOT work
* }
* }}}
*
* If the target actor isn't terminated within the timeout the [[scala.concurrent.Future]]
* is completed with failure [[akka.pattern.AskTimeoutException]].
*
* If you want to invoke specialized stopping logic on your target actor instead of PoisonPill, you can pass your
* stop command as a parameter:
* {{{
* gracefulStop(someChild, timeout, MyStopGracefullyMessage).onComplete {
* // Do something after someChild being stopped
* }
* }}}
*/
def gracefulStop(target: ActorRef, timeout: FiniteDuration, stopMessage: Any = PoisonPill): Future[Boolean] = {
val internalTarget = target.asInstanceOf[InternalActorRef]
val ref = PromiseActorRef(internalTarget.provider, Timeout(timeout), target, stopMessage.getClass.getName)
internalTarget.sendSystemMessage(Watch(internalTarget, ref))
target.tell(stopMessage, Actor.noSender)
ref.result.future.transform(
{
case Terminated(t) if t.path == target.path ⇒ true
case _ ⇒ { internalTarget.sendSystemMessage(Unwatch(target, ref)); false }
},
t ⇒ { internalTarget.sendSystemMessage(Unwatch(target, ref)); t })(ref.internalCallingThreadExecutionContext)
}
}
|
jmnarloch/akka.js
|
akka-js-actor/jvm/src/main/scala/akka/pattern/GracefulStopSupport.scala
|
Scala
|
bsd-3-clause
| 2,548
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Prettifier
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import scala.collection.mutable.ListBuffer
import exceptions.TestFailedException
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class ListShouldContainInOrderElementsOfSpec extends AnyFunSpec with Matchers {
private val prettifier = Prettifier.default
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
//ADDITIONAL//
describe("a List") {
val fumList: List[String] = List("fex", "fum", "fum", "foe", "fie", "fie", "fie", "fee", "fee")
val toList: List[String] = List("happy", "happy", "happy", "birthday", "to", "you", "too")
describe("when used with contain inOrderElementsOf Seq(..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should contain inOrderElementsOf Seq("fum", "foe", "fie", "fee")
val e1 = intercept[TestFailedException] {
fumList should contain inOrderElementsOf Seq("fee", "fie", "foe", "fum")
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.didNotContainAllElementsOfInOrder(prettifier, fumList, Seq("fee", "fie", "foe", "fum")))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should contain inOrderElementsOf Seq("FUM", "FOE", "FIE", "FEE")
intercept[TestFailedException] {
fumList should contain inOrderElementsOf Seq("fee", "fie", "foe", "fum")
}
}
it("should use an explicitly provided Equality") {
(fumList should contain inOrderElementsOf Seq("FUM", "FOE", "FIE", "FEE")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should contain inOrderElementsOf Seq("fee", "fie", "foe", "fum")) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should contain inOrderElementsOf Seq(" FUM ", " FOE ", " FIE ", " FEE ")
}
(fumList should contain inOrderElementsOf Seq(" FUM ", " FOE ", " FIE ", " FEE ")) (after being lowerCased and trimmed)
}
it("should do nothing when RHS contain duplicated value") {
fumList should contain inOrderElementsOf Seq("fum", "fum", "foe", "fie", "fee")
}
}
describe("when used with (contain inOrderElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain inOrderElementsOf Seq("fum", "foe", "fie", "fee"))
val e1 = intercept[TestFailedException] {
fumList should (contain inOrderElementsOf Seq("fee", "fie", "foe", "fum"))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.didNotContainAllElementsOfInOrder(prettifier, fumList, Seq("fee", "fie", "foe", "fum")))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain inOrderElementsOf Seq("FUM", "FOE", "FIE", "FEE"))
intercept[TestFailedException] {
fumList should (contain inOrderElementsOf Seq("fee", "fie", "foe", "fum"))
}
}
it("should use an explicitly provided Equality") {
(fumList should (contain inOrderElementsOf Seq("FUM", "FOE", "FIE", "FEE"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(fumList should (contain inOrderElementsOf Seq("fee", "fie", "foe", "fum"))) (decided by upperCaseStringEquality)
}
intercept[TestFailedException] {
fumList should (contain inOrderElementsOf Seq(" FUM ", " FOE ", " FIE ", " FEE "))
}
(fumList should (contain inOrderElementsOf Seq(" FUM ", " FOE ", " FIE ", " FEE "))) (after being lowerCased and trimmed)
}
it("should do nothing when RHS contain duplicated value") {
fumList should (contain inOrderElementsOf Seq("fum", "fum", "foe", "fie", "fum"))
}
}
describe("when used with not contain inOrderElementsOf Seq(..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList should not contain inOrderElementsOf (Seq("you", "to", "birthday", "happy"))
val e1 = intercept[TestFailedException] {
toList should not contain inOrderElementsOf (Seq("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOfInOrder(prettifier, toList, Seq("happy", "birthday", "to", "you")))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList should not contain inOrderElementsOf (Seq("YOU", "TO", "BIRTHDAY", "HAPPY"))
intercept[TestFailedException] {
toList should not contain inOrderElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
it("should use an explicitly provided Equality") {
(toList should not contain inOrderElementsOf (Seq("YOU", "TO", "BIRTHDAY", "HAPPY"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should not contain inOrderElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList should not contain inOrderElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList should not contain inOrderElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
toList should not contain inOrderElementsOf (Seq("fee", "fie", "foe", "fie", "fum"))
}
}
describe("when used with (not contain inOrderElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList should (not contain inOrderElementsOf (Seq("you", "to", "birthday", "happy")))
val e1 = intercept[TestFailedException] {
toList should (not contain inOrderElementsOf (Seq("happy", "birthday", "to", "you")))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOfInOrder(prettifier, toList, Seq("happy", "birthday", "to", "you")))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList should (not contain inOrderElementsOf (Seq("YOU", "TO", "BIRTHDAY", "HAPPY")))
intercept[TestFailedException] {
toList should (not contain inOrderElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU")))
}
}
it("should use an explicitly provided Equality") {
(toList should (not contain inOrderElementsOf (Seq("YOU", "TO", "BIRTHDAY", "HAPPY")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList should (not contain inOrderElementsOf (Seq("HAPPY", "BIRTHDAY", "TO", "YOU")))) (decided by upperCaseStringEquality)
}
toList should (not contain inOrderElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))
intercept[TestFailedException] {
(toList should (not contain inOrderElementsOf (Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
toList should (not contain inOrderElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
}
describe("when used with shouldNot contain inOrderElementsOf Seq(..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList shouldNot contain inOrderElementsOf Seq("you", "to", "birthday", "happy")
val e1 = intercept[TestFailedException] {
toList shouldNot contain inOrderElementsOf Seq("happy", "birthday", "to", "you")
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOfInOrder(prettifier, toList, Seq("happy", "birthday", "to", "you")))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList shouldNot contain inOrderElementsOf Seq("YOU", "TO", "BIRTHDAY", "HAPPY")
intercept[TestFailedException] {
toList shouldNot contain inOrderElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU")
}
}
it("should use an explicitly provided Equality") {
(toList shouldNot contain inOrderElementsOf Seq("YOU", "TO", "BIRTHDAY", "HAPPY")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot contain inOrderElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU")) (decided by upperCaseStringEquality)
}
toList shouldNot contain inOrderElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")
intercept[TestFailedException] {
(toList shouldNot contain inOrderElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU ")) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
toList shouldNot contain inOrderElementsOf Seq("fee", "fie", "foe", "fie", "fum")
}
}
describe("when used with shouldNot (contain inOrderElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
toList shouldNot (contain inOrderElementsOf Seq("you", "to", "birthday", "happy"))
val e1 = intercept[TestFailedException] {
toList shouldNot (contain inOrderElementsOf Seq("happy", "birthday", "to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message.get should be (FailureMessages.containedAllElementsOfInOrder(prettifier, toList, Seq("happy", "birthday", "to", "you")))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
toList shouldNot (contain inOrderElementsOf Seq("YOU", "TO", "BIRTHDAY", "HAPPY"))
intercept[TestFailedException] {
toList shouldNot (contain inOrderElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))
}
}
it("should use an explicitly provided Equality") {
(toList shouldNot (contain inOrderElementsOf Seq("YOU", "TO", "BIRTHDAY", "HAPPY"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(toList shouldNot (contain inOrderElementsOf Seq("HAPPY", "BIRTHDAY", "TO", "YOU"))) (decided by upperCaseStringEquality)
}
toList shouldNot (contain inOrderElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))
intercept[TestFailedException] {
(toList shouldNot (contain inOrderElementsOf Seq(" HAPPY ", " BIRTHDAY ", " TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
toList shouldNot (contain inOrderElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
}
}
describe("a col of Lists") {
val list1s: Vector[List[Int]] = Vector( List(0, 1, 2, 2, 3), List(0, 1, 1, 2, 3, 3, 3), List(0, 1, 2, 3))
val lists: Vector[List[Int]] = Vector( List(0, 1, 2, 2, 3, 3, 3), List(0, 1, 1, 1, 2, 3), List(8, 2, 2, 3, 4))
val listsNil: Vector[List[Int]] = Vector( List(0, 1, 1, 1, 2, 2, 2, 3, 3, 3), List(0, 1, 2, 2, 3), Nil)
val hiLists: Vector[List[String]] = Vector( List("hello", "hi", "hi", "he"), List("hello", "hi", "he", "he", "he"), List("hello", "hi", "he"))
val toLists: Vector[List[String]] = Vector( List("nice", "to", "you"), List("nice", "to", "you"), List("nice", "to", "you"))
describe("when used with contain inOrderElementsOf Seq(..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should contain inOrderElementsOf Seq(1, 2, 3)
atLeast (2, lists) should contain inOrderElementsOf Seq(1, 2, 3)
atMost (2, lists) should contain inOrderElementsOf Seq(1, 2, 3)
no (lists) should contain inOrderElementsOf Seq(3, 4, 5)
val e1 = intercept[TestFailedException] {
all (lists) should contain inOrderElementsOf Seq(1, 2, 3)
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(prettifier, lists(2)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(1, 2, 3)) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, lists)))
val e3 = intercept[TestFailedException] {
all (listsNil) should contain inOrderElementsOf Seq(1, 2, 3)
}
e3.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 3)
e3.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(prettifier, listsNil(2)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(1, 2, 3)) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, listsNil)))
}
it("should use the implicit Equality in scope") {
all (hiLists) should contain inOrderElementsOf Seq("hi", "he")
intercept[TestFailedException] {
all (hiLists) should contain inOrderElementsOf Seq("hi", "ho")
}
{
implicit val ise = upperCaseStringEquality
all (hiLists) should contain inOrderElementsOf Seq("HI", "HE")
intercept[TestFailedException] {
all (hiLists) should contain inOrderElementsOf Seq("HI", "HO")
}
}
}
it("should use an explicitly provided Equality") {
(all (hiLists) should contain inOrderElementsOf Seq("HI", "HE")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should contain inOrderElementsOf Seq("HI", "HO")) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should contain inOrderElementsOf Seq("hi", "he")) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should contain inOrderElementsOf Seq("hi", "ho")) (decided by defaultEquality[String])
}
}
it("should do nothing when RHS contain duplicated value") {
all (list1s) should contain inOrderElementsOf Seq(1, 2, 2, 3)
}
}
describe("when used with (contain inOrderElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (contain inOrderElementsOf Seq(1, 2, 3))
atLeast (2, lists) should (contain inOrderElementsOf Seq(1, 2, 3))
atMost (2, lists) should (contain inOrderElementsOf Seq(1, 2, 3))
no (lists) should (contain inOrderElementsOf Seq(3, 4, 5))
no (listsNil) should (contain inOrderElementsOf Seq(3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain inOrderElementsOf Seq(1, 2, 3))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(prettifier, lists(2)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(1, 2, 3)) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, lists)))
val e4 = intercept[TestFailedException] {
all (listsNil) should (contain inOrderElementsOf Seq(1, 2, 3))
}
e4.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e4.failedCodeLineNumber.get should be (thisLineNumber - 3)
e4.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, " + decorateToStringValue(prettifier, listsNil(2)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(1, 2, 3)) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, listsNil)))
}
it("should use the implicit Equality in scope") {
all (hiLists) should (contain inOrderElementsOf Seq("hi", "he"))
intercept[TestFailedException] {
all (hiLists) should (contain inOrderElementsOf Seq("he", "hi"))
}
{
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain inOrderElementsOf Seq("HI", "HE"))
intercept[TestFailedException] {
all (hiLists) should (contain inOrderElementsOf Seq("HI", "HO"))
}
}
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (contain inOrderElementsOf Seq("HI", "HE"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (hiLists) should (contain inOrderElementsOf Seq("HI", "HO"))) (decided by upperCaseStringEquality)
}
implicit val ise = upperCaseStringEquality
(all (hiLists) should (contain inOrderElementsOf Seq("hi", "he"))) (decided by defaultEquality[String])
intercept[TestFailedException] {
(all (hiLists) should (contain inOrderElementsOf Seq("he", "hi"))) (decided by defaultEquality[String])
}
}
it("should do nothing when RHS contain duplicated value") {
all (list1s) should (contain inOrderElementsOf Seq(1, 2, 2, 3))
}
}
describe("when used with not contain inOrderElementsOf Seq(..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) should not contain inOrderElementsOf (Seq("you", "to"))
val e1 = intercept[TestFailedException] {
all (toLists) should not contain inOrderElementsOf (Seq("to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, toLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("to", "you")) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) should not contain inOrderElementsOf (Seq("YOU", "TO"))
intercept[TestFailedException] {
all (toLists) should not contain inOrderElementsOf (Seq("TO", "YOU"))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) should not contain inOrderElementsOf (Seq("YOU", "TO"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should not contain inOrderElementsOf (Seq("TO", "YOU"))) (decided by upperCaseStringEquality)
}
all (toLists) should not contain inOrderElementsOf (Seq(" TO ", " YOU "))
intercept[TestFailedException] {
(all (toLists) should not contain inOrderElementsOf (Seq(" TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
all (toLists) should not contain inOrderElementsOf (Seq("fee", "fie", "foe", "fie", "fum"))
}
}
describe("when used with (not contain inOrderElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) should (not contain inOrderElementsOf (Seq("you", "to")))
val e1 = intercept[TestFailedException] {
all (toLists) should (not contain inOrderElementsOf (Seq("to", "you")))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, toLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("to", "you")) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) should (not contain inOrderElementsOf (Seq("YOU", "TO")))
intercept[TestFailedException] {
all (toLists) should (not contain inOrderElementsOf (Seq("TO", "YOU")))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) should (not contain inOrderElementsOf (Seq("YOU", "TO")))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) should (not contain inOrderElementsOf (Seq("TO", "YOU")))) (decided by upperCaseStringEquality)
}
all (toLists) should (not contain inOrderElementsOf (Seq(" TO ", " YOU ")))
intercept[TestFailedException] {
(all (toLists) should (not contain inOrderElementsOf (Seq(" TO ", " YOU ")))) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
all (toLists) should (not contain inOrderElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
}
describe("when used with shouldNot contain inOrderElementsOf Seq(..)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) shouldNot contain inOrderElementsOf Seq("you", "to")
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot contain inOrderElementsOf Seq("to", "you")
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, toLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("to", "you")) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot contain inOrderElementsOf Seq("YOU", "TO")
intercept[TestFailedException] {
all (toLists) shouldNot contain inOrderElementsOf Seq("TO", "YOU")
}
}
it("should use an explicitly provided Equality") {
(all (toLists) shouldNot contain inOrderElementsOf Seq("YOU", "TO")) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot contain inOrderElementsOf Seq("TO", "YOU")) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot contain inOrderElementsOf Seq(" TO ", " YOU ")
intercept[TestFailedException] {
(all (toLists) shouldNot contain inOrderElementsOf Seq(" TO ", " YOU ")) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
all (toLists) shouldNot contain inOrderElementsOf Seq("fee", "fie", "foe", "fie", "fum")
}
}
describe("when used with shouldNot (contain inOrderElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (toLists) shouldNot (contain inOrderElementsOf Seq("you", "to"))
val e1 = intercept[TestFailedException] {
all (toLists) shouldNot (contain inOrderElementsOf Seq("to", "you"))
}
e1.failedCodeFileName.get should be ("ListShouldContainInOrderElementsOfSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 0, " + decorateToStringValue(prettifier, toLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("to", "you")) + " in order (ListShouldContainInOrderElementsOfSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in " + decorateToStringValue(prettifier, toLists)))
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (toLists) shouldNot (contain inOrderElementsOf Seq("YOU", "TO"))
intercept[TestFailedException] {
all (toLists) shouldNot (contain inOrderElementsOf Seq("TO", "YOU"))
}
}
it("should use an explicitly provided Equality") {
(all (toLists) shouldNot (contain inOrderElementsOf Seq("YOU", "TO"))) (decided by upperCaseStringEquality)
intercept[TestFailedException] {
(all (toLists) shouldNot (contain inOrderElementsOf Seq("TO", "YOU"))) (decided by upperCaseStringEquality)
}
all (toLists) shouldNot (contain inOrderElementsOf Seq(" TO ", " YOU "))
intercept[TestFailedException] {
(all (toLists) shouldNot (contain inOrderElementsOf Seq(" TO ", " YOU "))) (after being lowerCased and trimmed)
}
}
it("should do nothing when RHS contain duplicated value") {
all (toLists) shouldNot (contain inOrderElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/ListShouldContainInOrderElementsOfSpec.scala
|
Scala
|
apache-2.0
| 28,654
|
package pl.touk.nussknacker.openapi.functional
import org.apache.http.entity.{InputStreamEntity, StringEntity}
import org.apache.http.impl.bootstrap.ServerBootstrap
import org.apache.http.protocol.{HttpContext, HttpRequestHandler}
import org.apache.http.{HttpRequest, HttpResponse}
import pl.touk.nussknacker.test.AvailablePortFinder
object StubService {
def withCustomerService[T](action: Int => T): T = {
AvailablePortFinder.withAvailablePortsBlocked(1) { ports =>
val port = ports.head
val server = ServerBootstrap.bootstrap()
.setListenerPort(port)
.registerHandler("/swagger", new HttpRequestHandler {
override def handle(request: HttpRequest, response: HttpResponse, context: HttpContext): Unit = {
response.setStatusCode(200)
response.setEntity(new InputStreamEntity(getClass.getResourceAsStream("/customer-swagger.json")))
}
})
.registerHandler("/customers/*", new HttpRequestHandler {
override def handle(request: HttpRequest, response: HttpResponse, context: HttpContext): Unit = {
val id = request.getRequestLine.getUri.replaceAll(".*customers/", "").toInt
response.setStatusCode(200)
response.setEntity(new StringEntity(s"""{"name": "Robert Wright", "id": $id, "category": "GOLD"}"""))
}
}).create()
try {
server.start()
action(port)
} finally {
server.stop()
}
}
}
}
|
TouK/nussknacker
|
components/openapi/src/it/scala/pl/touk/nussknacker/openapi/functional/StubService.scala
|
Scala
|
apache-2.0
| 1,489
|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.config
import slamdata.Predef._
import argonaut._, Argonaut._
import monocle._, macros.Lenses
import scalaz._, Scalaz._
@Lenses final case class WebConfig(server: ServerConfig, metastore: Option[MetaStoreConfig])
object WebConfig {
implicit val configOps: ConfigOps[WebConfig] = new ConfigOps[WebConfig] {
val name = "web"
def metaStoreConfig = WebConfig.metastore
val default = MetaStoreConfig.default ∘ (ms => WebConfig(ServerConfig(ServerConfig.DefaultPort), ms.some))
}
implicit val codecJson: CodecJson[WebConfig] =
casecodec2(WebConfig.apply, WebConfig.unapply)("server", "metastore")
}
|
jedesah/Quasar
|
web/src/main/scala/quasar/config/WebConfig.scala
|
Scala
|
apache-2.0
| 1,242
|
package rocks.molarialessandro.coordinates.converters
import rocks.molarialessandro.coordinates.{HSBACoordinates, RGBACoordinates}
class RGBAToHSBAConverter extends Converter[RGBACoordinates, HSBACoordinates] {
override def convert(coordinates: RGBACoordinates): HSBACoordinates = {
null // TODO
}
}
|
alem0lars/claps
|
src/main/scala/rocks/molarialessandro/coordinates/converters/RGBAToHSBAConverter.scala
|
Scala
|
apache-2.0
| 310
|
package es.own3dh2so4.model
/**
* Created by david on 27/05/17.
*/
case class Order (time: java.sql.Timestamp, orderId: Long, clientId: Long, symbol: String, amount: Int, price: Double, buy: Boolean){
}
|
own3dh2so4/spark-in-action-book
|
src/main/scala/es/own3dh2so4/model/Order.scala
|
Scala
|
apache-2.0
| 209
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.{FlinkLogicalCalc, FlinkLogicalLegacyTableSourceScan}
import org.apache.flink.table.planner.plan.optimize.program._
import org.apache.flink.table.planner.plan.rules.FlinkBatchRuleSets
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.NonDeterministicUdf
import org.apache.flink.table.planner.utils.TableTestBase
import org.apache.calcite.plan.hep.HepMatchOrder
import org.apache.calcite.rel.rules._
import org.apache.calcite.tools.RuleSets
import org.junit.{Before, Test}
/**
* Test for [[FlinkCalcMergeRule]].
*/
class FlinkCalcMergeRuleTest extends TableTestBase {
private val util = batchTestUtil()
@Before
def setup(): Unit = {
val programs = new FlinkChainedProgram[BatchOptimizeContext]()
programs.addLast(
"table_ref",
FlinkHepRuleSetProgramBuilder.newBuilder
.setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE)
.setHepMatchOrder(HepMatchOrder.BOTTOM_UP)
.add(FlinkBatchRuleSets.TABLE_REF_RULES)
.build())
programs.addLast(
"logical",
FlinkVolcanoProgramBuilder.newBuilder
.add(RuleSets.ofList(
FilterToCalcRule.INSTANCE,
ProjectToCalcRule.INSTANCE,
FlinkCalcMergeRule.INSTANCE,
FlinkLogicalCalc.CONVERTER,
FlinkLogicalLegacyTableSourceScan.CONVERTER
))
.setRequiredOutputTraits(Array(FlinkConventions.LOGICAL))
.build())
util.replaceBatchProgram(programs)
util.addTableSource[(Int, Int, String)]("MyTable", 'a, 'b, 'c)
}
@Test
def testCalcMergeWithSameDigest(): Unit = {
util.verifyPlan("SELECT a, b FROM (SELECT * FROM MyTable WHERE a = b) t WHERE b = a")
}
@Test
def testCalcMergeWithNonDeterministicExpr1(): Unit = {
util.addFunction("random_udf", new NonDeterministicUdf)
val sqlQuery = "SELECT a, a1 FROM (SELECT a, random_udf(a) AS a1 FROM MyTable) t WHERE a1 > 10"
util.verifyPlan(sqlQuery)
}
@Test
def testCalcMergeWithNonDeterministicExpr2(): Unit = {
util.addFunction("random_udf", new NonDeterministicUdf)
val sqlQuery = "SELECT a FROM (SELECT a FROM MyTable) t WHERE random_udf(a) > 10"
util.verifyPlan(sqlQuery)
}
}
|
hequn8128/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/FlinkCalcMergeRuleTest.scala
|
Scala
|
apache-2.0
| 3,273
|
package uk.vitalcode.dateparser
import java.time.{DayOfWeek, LocalDate, LocalDateTime, LocalTime}
import uk.vitalcode.dateparser.DateTimeInterval.defaultTime
import uk.vitalcode.dateparser.token._
case object Analyser {
def analyse(tokens: List[DateToken]): List[DateTimeInterval] = {
val dateTokens = dates(tokens)
val dateRangeTokens = dateRanges(tokens)
val weekDayTokens = weekDays(tokens)
val timeTokens = times(tokens)
val timeRangeTokens = timeRanges(tokens)
val dateTimeRangeTokens = dateTimeRange(tokens)
(dateTokens, dateRangeTokens, weekDayTokens, timeTokens, timeRangeTokens, dateTimeRangeTokens) match {
case (_ , dateRange :: Nil, weekDays, Nil, Nil,_) =>
analyseDateRangeNoTimePatterns(dateRange, weekDays)
case (_ , dateRange :: Nil, weekDays, times, Nil,_) =>
analyseDateRangeTimePatterns(dateRange, weekDays, times)
case (_ , dateRange :: Nil, weekDays, _, timeRanges: List[TimeRange], _) =>
analyseDateRangeTimeTimeRangesPatterns(dateRange, weekDays, timeRanges)
case (date :: Nil, _, _, Nil, Nil, _) =>
analyseSingleNoTimePatterns(date)
case (date :: Nil, _, _, times: List[Time], Nil, _) =>
analyseSingleDateTimePatterns(date, times)
case (date :: Nil, _, _, _, timeRanges: List[TimeRange], _) =>
analyseSingleDateTimeRangesPatterns(date, timeRanges)
case (_, _, _, Nil, _, dateTimeRanges) =>
analyseDateTimeRangesNoTimePatterns(dateTimeRanges)
case (_, _, _, times, _, dateTimeRange :: Nil) =>
analyseDateTimeRangeTimesPatterns(dateTimeRange, times)
case _ => Nil
}
}
private def analyseSingleNoTimePatterns(date: Date): List[DateTimeInterval] =
List(DateTimeInterval.from(date.value, defaultTime))
private def analyseDateRangeNoTimePatterns(dateRange: DateRange, weekDays: Set[DayOfWeek]): List[DateTimeInterval] = {
DateTimeUtils.datesInRange(dateRange.from, dateRange.to, weekDays)
.map(localDate => DateTimeInterval(LocalDateTime.of(localDate, LocalTime.of(0, 0)), None))
}
private def analyseDateRangeTimePatterns(dateRange: DateRange, weekDays: Set[DayOfWeek], times: List[Time]): List[DateTimeInterval] = {
for {
localDate <- DateTimeUtils.datesInRange(dateRange.from, dateRange.to, weekDays)
time <- times
} yield DateTimeInterval(LocalDateTime.of(localDate, time.value), None)
}
private def analyseDateRangeTimeTimeRangesPatterns(dateRange: DateRange, weekDays: Set[DayOfWeek], timeRanges: List[TimeRange]) = {
for {
localDate <- DateTimeUtils.datesInRange(dateRange.from, dateRange.to, weekDays ++ timeRanges.collect {
case TimeRange(_,_, Some(weekDay),_) => weekDay
})
timeRange <- timeRanges if timeRange.weekDay.isEmpty || timeRange.weekDay.get == localDate.getDayOfWeek
} yield DateTimeInterval(
LocalDateTime.of(localDate, timeRange.from),
Some(LocalDateTime.of(localDate, timeRange.to))
)
}
private def analyseSingleDateTimeRangesPatterns(date: Date, timeRanges: List[TimeRange]): List[DateTimeInterval] = {
timeRanges.map(time => {
DateTimeInterval.from(date.value, time.from)
.to(date.value, time.to)
})
}
private def analyseSingleDateTimePatterns(date: Date, times: List[Time]): List[DateTimeInterval] = {
times.map(time => {
DateTimeInterval.from(date.value, time.value)
})
}
private def analyseDateTimeRangesNoTimePatterns(dateTimeRanges: List[DateTimeRange]): List[DateTimeInterval] = {
dateTimeRanges.flatMap {
case DateTimeRange(fromDate, None, fromTime, Some(toTime), _) =>
List(DateTimeInterval.from(fromDate, fromTime).to(fromDate, toTime))
case DateTimeRange(fromDate, None, fromTime, _, _) =>
List(DateTimeInterval.from(fromDate, fromTime))
case DateTimeRange(fromDate, Some(toDate), fromTime, None, _) =>
for {
localDate <- DateTimeUtils.datesInRange(fromDate, toDate)
} yield DateTimeInterval.from(localDate, fromTime)
case DateTimeRange(fromDate, Some(toDate), fromTime, Some(toTime), _) =>
for {
localDate <- DateTimeUtils.datesInRange(fromDate, toDate)
} yield DateTimeInterval.from(localDate, fromTime).to(localDate, toTime)
}
}
private def analyseDateTimeRangeTimesPatterns(dateTimeRange: DateTimeRange, times: List[Time]): List[DateTimeInterval] = {
val fromDate = dateTimeRange.fromDate
val toDate = dateTimeRange.toDate
val toTime = None
val dateTimeRanges = dateTimeRange +: times.map(time => DateTimeRange(fromDate, toDate, time.value, None, time.index))
analyseDateTimeRangesNoTimePatterns(dateTimeRanges)
}
private def dates(dateTokens: List[DateToken]) = dateTokens.collect {
case d: Date => d
}
private def dateRanges(dateTokens: List[DateToken]) = dateTokens.collect {
case t: DateRange => t
}
private def times(dateTokens: List[DateToken]) = dateTokens.collect {
case t: Time => t
}
private def timeRanges(dateTokens: List[DateToken]) = dateTokens.collect {
case t: TimeRange => t
}
private def weekDays(dateTokens: List[DateToken]) = dateTokens.collect {
case t: WeekDay => t.value
}.toSet
private def dateTimeRange(dateTokens: List[DateToken]) = dateTokens.collect {
case t: DateTimeRange => t
}
}
|
vitalcode/date-time-range-parser
|
src/main/scala/uk/vitalcode/dateparser/Analyser.scala
|
Scala
|
mit
| 5,362
|
package models
case class CurrencyData(symbol: String, name: Option[String], rate: Float)
|
matiwinnetou/play-currency-rates
|
app/models/CurrencyData.scala
|
Scala
|
apache-2.0
| 91
|
package org.jetbrains.plugins.scala
package refactoring.move
import org.jetbrains.plugins.scala.util.TestUtils
import com.intellij.testFramework.{PlatformTestUtil, PsiTestUtil}
import com.intellij.openapi.vfs.{LocalFileSystem, VirtualFile}
import java.util
import java.io.File
import com.intellij.psi.impl.source.PostprocessReformattingAspect
import com.intellij.psi._
import com.intellij.psi.search.GlobalSearchScope
import collection.mutable.ArrayBuffer
import lang.psi.impl.{ScalaFileImpl, ScalaPsiManager}
import com.intellij.refactoring.move.moveClassesOrPackages.SingleSourceRootMoveDestination
import com.intellij.refactoring.PackageWrapper
import com.intellij.openapi.fileEditor.FileDocumentManager
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject}
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager
import com.intellij.openapi.vfs.impl.VirtualFilePointerManagerImpl
import org.jetbrains.plugins.scala.settings.ScalaApplicationSettings
import org.jetbrains.plugins.scala.lang.refactoring.move.ScalaMoveClassesOrPackagesProcessor
/**
* @author Alefas
* @since 30.10.12
*/
class ScalaMoveClassTest extends ScalaLightPlatformCodeInsightTestCaseAdapter {
def testPackageObject() {
doTest("packageObject", Array("com.`package`"), "org")
}
def testPackageObject2() {
doTest("packageObject2", Array("com"), "org")
}
def testSimple() {
doTest("simple", Array("com.A"), "org")
}
def testSCL2625() {
doTest("scl2625", Array("somepackage.Dummy", "somepackage.MoreBusiness", "somepackage.Business", "somepackage.AnotherEnum"), "dest")
}
def testSCL4623() {
doTest("scl4623", Array("moveRefactoring.foo.B"), "moveRefactoring.bar")
}
def testSCL4613() {
doTest("scl4613", Array("moveRefactoring.foo.B"), "moveRefactoring.bar")
}
def testSCL4621() {
doTest("scl4621", Array("moveRefactoring.foo.O"), "moveRefactoring.bar")
}
def testSCL4619() {
doTest("scl4619", Array("foo.B"), "bar")
}
def testSCL4875() {
doTest("scl4875", Array("com.A"), "org")
}
def testSCL4878() {
doTest("scl4878", Array("org.B"), "com")
}
def testSCL4894() {
doTest("scl4894", Array("moveRefactoring.foo.B", "moveRefactoring.foo.BB"), "moveRefactoring.bar")
}
def testSCL4972() {
doTest("scl4972", Array("moveRefactoring.foo.B"), "moveRefactoring.bar")
}
def testWithCompanion() {
doTest("withCompanion", Array("source.A"), "target", Kinds.onlyClasses)
}
def testBothJavaAndScala() {
doTest("bothJavaAndScala", Array("org.A", "org.J"), "com")
}
// wait for fix SCL-6316
// def testWithoutCompanion() {
// doTest("withoutCompanion", Array("source.A"), "target", Kinds.onlyObjects, moveCompanion = false)
// }
def doTest(testName: String, classNames: Array[String], newPackageName: String, mode: Kinds.Value = Kinds.all, moveCompanion: Boolean = true) {
val root: String = TestUtils.getTestDataPath + "/move/" + testName
val rootBefore: String = root + "/before"
val rootDir: VirtualFile = PsiTestUtil.createTestProjectStructure(getProjectAdapter, getModuleAdapter, rootBefore, new util.HashSet[File]())
VirtualFilePointerManager.getInstance.asInstanceOf[VirtualFilePointerManagerImpl].storePointers()
val settings = ScalaApplicationSettings.getInstance()
val moveCompanionOld = settings.MOVE_COMPANION
settings.MOVE_COMPANION = moveCompanion
try {
performAction(classNames, newPackageName, mode)
} finally {
PsiTestUtil.removeSourceRoot(getModuleAdapter, rootDir)
}
settings.MOVE_COMPANION = moveCompanionOld
val rootAfter: String = root + "/after"
val rootDir2: VirtualFile = LocalFileSystem.getInstance.findFileByPath(rootAfter.replace(File.separatorChar, '/'))
VirtualFilePointerManager.getInstance.asInstanceOf[VirtualFilePointerManagerImpl].storePointers()
getProjectAdapter.getComponent(classOf[PostprocessReformattingAspect]).doPostponedFormatting()
PlatformTestUtil.assertDirectoriesEqual(rootDir2, rootDir)
}
private def performAction(classNames: Array[String], newPackageName: String, mode: Kinds.Value) {
val classes = new ArrayBuffer[PsiClass]()
for (name <- classNames) {
classes ++= ScalaPsiManager.instance(getProjectAdapter).getCachedClasses(GlobalSearchScope.allScope(getProjectAdapter), name).filter {
case o: ScObject if o.isSyntheticObject => false
case c: ScClass if mode == Kinds.onlyObjects => false
case o: ScObject if mode == Kinds.onlyClasses => false
case _ => true
}
}
val aPackage: PsiPackage = JavaPsiFacade.getInstance(getProjectAdapter).findPackage(newPackageName)
val dirs: Array[PsiDirectory] = aPackage.getDirectories(GlobalSearchScope.moduleScope(getModuleAdapter))
assert(dirs.length == 1)
ScalaFileImpl.performMoveRefactoring {
new ScalaMoveClassesOrPackagesProcessor(getProjectAdapter, classes.toArray,
new SingleSourceRootMoveDestination(PackageWrapper.create(JavaDirectoryService.getInstance.getPackage(dirs(0))), dirs(0)), true, true, null).run()
}
PsiDocumentManager.getInstance(getProjectAdapter).commitAllDocuments()
FileDocumentManager.getInstance.saveAllDocuments()
}
object Kinds extends Enumeration {
type Kinds = Value
val onlyObjects, onlyClasses, all = Value
}
}
|
consulo/consulo-scala
|
test/org/jetbrains/plugins/scala/refactoring/move/ScalaMoveClassTest.scala
|
Scala
|
apache-2.0
| 5,453
|
import sbt._
object MdocLibrary {
val fshell = "org.m-doc" %% "fshell" % "0.0.0-52-gb7ce9e2"
val renderingEngines = "org.m-doc" %% "rendering-engines" % "0.0.0-28-ge31be67"
val commonModel = "org.m-doc" %% "common-model" % "0.0.0-27-g451b84e"
}
|
m-doc/adelmo
|
project/MdocLibrary.scala
|
Scala
|
apache-2.0
| 252
|
package org.scalaide.ui.internal.editor
import org.eclipse.jdt.core.IJavaElement
import org.eclipse.jdt.internal.ui.javaeditor.ClassFileEditor
import org.eclipse.jdt.ui.actions.IJavaEditorActionDefinitionIds
import org.eclipse.jface.action.Action
import org.eclipse.jface.text.ITextSelection
import org.scalaide.core.internal.jdt.model.ScalaClassFile
import org.scalaide.core.internal.jdt.model.ScalaCompilationUnit
import org.scalaide.ui.internal.editor.decorators.implicits.ImplicitHighlightingPresenter
import org.scalaide.ui.internal.editor.decorators.semantichighlighting.TextPresentationEditorHighlighter
import org.scalaide.ui.internal.editor.decorators.semantichighlighting.TextPresentationHighlighter
import org.scalaide.ui.internal.editor.outline.OutlinePageEditorExtension
class ScalaClassFileEditor extends ClassFileEditor with ScalaCompilationUnitEditor with MarkOccurrencesEditorExtension with OutlinePageEditorExtension{
private lazy val implicitHighlighter = new ImplicitHighlightingPresenter(sourceViewer)
override def dispose() = {
super.dispose()
implicitHighlighter.dispose()
}
override def createPartControl(parent: org.eclipse.swt.widgets.Composite): Unit = {
super.createPartControl(parent)
if (isMarkingOccurrences())
installOccurrencesFinder(/*forceUpdate*/false)
getInteractiveCompilationUnit() match {
case scu: ScalaCompilationUnit => implicitHighlighter(scu)
case _ =>
}
}
override def getElementAt(offset : Int) : IJavaElement = {
getInputJavaElement match {
case scf : ScalaClassFile => scf.getElementAt(offset)
case _ => null
}
}
override def getCorrespondingElement(element : IJavaElement) : IJavaElement = {
getInputJavaElement match {
case scf : ScalaClassFile => scf.getCorrespondingElement(element).getOrElse(super.getCorrespondingElement(element))
case _ => super.getCorrespondingElement(element)
}
}
override protected def createActions(): Unit = {
super.createActions()
val openAction = new Action {
override def run: Unit = {
Option(getInputJavaElement) map (ScalaCompilationUnit.castFrom) foreach { scu =>
scu.followDeclaration(ScalaClassFileEditor.this, getSelectionProvider.getSelection.asInstanceOf[ITextSelection])
}
}
}
openAction.setActionDefinitionId(IJavaEditorActionDefinitionIds.OPEN_EDITOR)
setAction("OpenEditor", openAction)
}
override def createSemanticHighlighter: TextPresentationHighlighter =
TextPresentationEditorHighlighter(this, semanticHighlightingPreferences, _ => (), _ => ())
override def forceSemanticHighlightingOnInstallment: Boolean = true
}
|
scala-ide/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/editor/ScalaClassFileEditor.scala
|
Scala
|
bsd-3-clause
| 2,701
|
package chess
package format
import cats.data.Validated
import chess.variant.Variant
object UciDump {
// a2a4, b8c6
def apply(force960Notation: Boolean)(replay: Replay): List[String] =
replay.chronoMoves map move(replay.setup.board.variant, force960Notation)
def apply(
moves: Seq[String],
initialFen: Option[FEN],
variant: Variant,
force960Notation: Boolean = false
): Validated[String, List[String]] =
if (moves.isEmpty) Validated.valid(Nil)
else Replay(moves, initialFen, variant) andThen (_.valid) map apply(force960Notation)
def move(variant: Variant, force960Notation: Boolean = false)(mod: MoveOrDrop): String =
mod match {
case Left(m) =>
m.castle.fold(m.toUci.uci) {
case ((kf, kt), (rf, _))
if force960Notation || kf == kt || variant.chess960 || variant.fromPosition =>
kf.key + rf.key
case ((kf, kt), _) => kf.key + kt.key
}
case Right(d) => d.toUci.uci
}
}
|
ornicar/scalachess
|
src/main/scala/format/UciDump.scala
|
Scala
|
mit
| 1,003
|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ksmpartners.ernie.server.filter
import org.testng.annotations.{ BeforeClass, Test }
import com.ksmpartners.ernie.server.PropertyNames._
import net.liftweb.mocks.{ MockHttpServletResponse, MockHttpServletRequest }
import javax.servlet._
import java.io.{ ByteArrayOutputStream, FileInputStream, File }
import com.ksmpartners.ernie.util.Utility._
import org.apache.cxf.rs.security.saml.DeflateEncoderDecoder
import com.ksmpartners.ernie.util.Base64Util
import com.ksmpartners.ernie.server.filter.SAMLConstants._
import org.testng.Assert
import org.slf4j.{ LoggerFactory, Logger }
import com.ksmpartners.ernie.server.filter.SAMLFilter.SAMLHttpServletRequestWrapper
import com.ksmpartners.ernie.util.TestLogger
import net.liftweb.http.Req
import java.util
class FilterWrapperTest extends TestLogger {
private val log: Logger = LoggerFactory.getLogger("com.ksmpartners.ernie.server.filter.FilterWrapperTest")
private val readWriteMode = "read-write"
@BeforeClass
def setup() {
val ks = Thread.currentThread.getContextClassLoader.getResource("keystore.jks")
System.setProperty(keystoreLocProp, ks.getPath)
}
@Test
def goodAuthReturns200() {
val req = new MockHttpServletRequest
val resp = new MockResp
val filter = new FilterWrapper
val chain = new Chain
filter.init(new FilterConfig {
def getFilterName: String = "FilterWrapper"
def getInitParameterNames: util.Enumeration[_] = null
def getInitParameter(p1: String): String = ""
def getServletContext: ServletContext = null
})
req.headers += (authHeaderProp -> List(getSamlHeaderVal(readWriteMode)))
filter.doFilter(req, resp, chain)
Assert.assertEquals(resp.getStatusCode, 200)
}
def getSamlHeaderVal(mode: String): String = "SAML " + (new String(encodeToken(mode)))
def encodeToken(mode: String): Array[Byte] = {
val samlUrl = Thread.currentThread.getContextClassLoader.getResource("saml/" + mode + ".xml")
val samlFile = new File(samlUrl.getFile)
var bos: Array[Byte] = null
var deflatedToken: Array[Byte] = null
try_(new FileInputStream(samlFile)) { file =>
val fileBytes: Array[Byte] = new Array[Byte](file.available())
file.read(fileBytes)
deflatedToken = new DeflateEncoderDecoder().deflateToken(fileBytes)
}
val encodedToken = Base64Util.encode(deflatedToken)
try_(new ByteArrayOutputStream()) { os =>
os.write(encodedToken)
bos = os.toByteArray
}
bos
}
class MockResp extends MockHttpServletResponse(null, null) {
def getStatusCode: Int = statusCode
}
class Chain extends FilterChain {
var userName: String = ""
def doFilter(request: ServletRequest, response: ServletResponse) {
if (request.isInstanceOf[SAMLHttpServletRequestWrapper])
userName = request.asInstanceOf[SAMLHttpServletRequestWrapper].getRemoteUser
}
}
}
|
ksmpartners/ernie
|
ernie-server/src/test/scala/com/ksmpartners/ernie/server/filter/FilterWrapperTest.scala
|
Scala
|
apache-2.0
| 3,463
|
/*
* Copyright (c) 2019. Lorem ipsum dolor sit amet, consectetur adipiscing elit.
* Morbi non lorem porttitor neque feugiat blandit. Ut vitae ipsum eget quam lacinia accumsan.
* Etiam sed turpis ac ipsum condimentum fringilla. Maecenas magna.
* Proin dapibus sapien vel ante. Aliquam erat volutpat. Pellentesque sagittis ligula eget metus.
* Vestibulum commodo. Ut rhoncus gravida arcu.
*/
package com.wallace.demo.app.utils
import com.wallace.demo.app.UnitSpec
/**
* Created by Wallace on 2017/1/14.
*/
class StringFuncUtilsUnitSpec extends UnitSpec {
"Wallace Huang" should "test StringFuncUtils: empty elements" in {
val str = ",,,"
val expect = ""
val result = StringFuncUtils.splitString(str, ",", "\\"")
for (elem <- result) {
log.info("@" + elem + "@")
}
result.length shouldBe 3
result(0) shouldBe expect
result(2) shouldBe expect
}
"Wallace Huang" should "test StringFuncUtils: empty string" in {
val str = ""
val expect = ""
val result = StringFuncUtils.splitString(str, ",", "\\"")
for (elem <- result) {
log.info("@" + elem + "@")
}
result.length shouldBe 1
result.head shouldBe expect
}
"Wallace Huang" should "test StringFuncUtils: one element" in {
val str = "elem1"
val expect = "elem1"
val result = StringFuncUtils.splitString(str, ",", "\\"")
for (elem <- result) {
log.info("@" + elem + "@")
}
result.length shouldBe 1
result.head shouldBe expect
}
"Wallace Huang" should "test StringFuncUtils: two elements" in {
val str = "elem1,elem2"
val expect = "elem2"
val result = StringFuncUtils.splitString(str, ",", "\\"")
for (elem <- result) {
log.info("@" + elem + "@")
}
result.length shouldBe 2
result.last shouldBe expect
}
"Wallace Huang" should "test StringFuncUtils: three elements" in {
val str = "elem1,elem2,\\"elem3=1,elem4=2,elem5=3\\""
val expect = "elem3=1,elem4=2,elem5=3"
val result = StringFuncUtils.splitString(str, ",", "\\"")
for (elem <- result) {
log.info("@" + elem + "@")
}
result.length shouldBe 3
result.last shouldBe expect
}
"Wallace Huang" should "test StringFuncUtils: more than three elements" in {
val str = "elem1,elem2,\\"elem3=1,elem4=2,elem5=3\\",elem6,\\"elem7=4,elem8=5,elem9=6\\",elem10"
val expect1 = "elem3=1,elem4=2,elem5=3"
val expect2 = "elem7=4,elem8=5,elem9=6"
val result = StringFuncUtils.splitString(str, ",", "\\"")
for (elem <- result) {
log.info("@" + elem + "@")
}
result.length shouldBe 6
result(2) shouldBe expect1
result(4) shouldBe expect2
}
"Wallace Huang" should "do unit test for the function: concatStrUtils" in {
val input = "a1,b2,c3,d4,e5,4,n1,n11,n12,n13,n2,n21,n22,n23,n3,n31,n32,n33,n4,n41,n42,n43,f6,g8,h9"
val res = ConcatStringUtils.concatCols(input, 5, 4)
val expect = Array("a1", "b2", "c3", "d4", "e5", "4", "n1$n2$n3$n4", "n11$n21$n31$n41", "n12$n22$n32$n42", "n13$n23$n33$n43", "f6", "g8", "h9")
res shouldBe expect
}
"Wallace Huang" should "do one more unit test for the function: concatStrUtils" in {
val input = "a1,b2,c3,d4,4,n1,n11,n12,n2,n21,n22,n3,n31,n32,n4,n41,n42,f6,g8,h9"
val res = ConcatStringUtils.concatCols(input, 4, 3)
val expect = Array("a1", "b2", "c3", "d4", "4", "n1$n2$n3$n4", "n11$n21$n31$n41", "n12$n22$n32$n42", "f6", "g8", "h9")
res shouldBe expect
}
"Wallace Huang" should "do unit test for: countKeyWord" in {
val input = "Hello world and Hello again. It's wonderful day!"
val res: Map[String, Int] = StringFuncUtils.countKeyWord(input, " ")
res.foreach(x => log.info(s"KeyWord: ${x._1}, Count: ${x._2}"))
val expect = 2
res.getOrElse("Hello", "") shouldBe expect
res.getOrElse("wonderful", "") shouldBe 1
}
"Wallace Huang" should "do unit test for: convertStrToFixedFormat" in {
val res = StringFuncUtils.convertStrToFixedFormat("25525511135")
res.contains("255.255.11.135") shouldBe true
res.length shouldBe 2
}
"Wallace Huang" should "do unit test for: extractFields" in {
val res0: String = StringFuncUtils.extractFieldsJava("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50")
val res1: String = StringFuncUtils.extractFieldsScala("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50")
val expect: String = "2018-4-8 17:19:19,666666,1,true,1,109.01,32.34,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"
runTimes = 1000000
val costTime1: Double = runtimeDuration(StringFuncUtils.extractFieldsJava("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes)
val costTime2: Double = runtimeDuration(StringFuncUtils.extractFieldsJava("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes)
val costTime3: Double = runtimeDuration(StringFuncUtils.extractFieldsScala("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes)
val costTime4: Double = runtimeDuration(StringFuncUtils.extractFieldsScala("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes)
val costTime5: Double = runtimeDuration(StringFuncUtils.extractFieldsJava("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes)
val costTime6: Double = runtimeDuration(StringFuncUtils.extractFieldsScala("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes)
log.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime1 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime1 / 1000.0)}")
log.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime2 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime2 / 1000.0)}")
log.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime3 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime3 / 1000.0)}")
log.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime4 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime4 / 1000.0)}")
log.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime5 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime5 / 1000.0)}")
log.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime6 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime6 / 1000.0)}")
res0 shouldBe expect
res1 shouldBe expect
}
}
|
BiyuHuang/CodePrototypesDemo
|
demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/StringFuncUtilsUnitSpec.scala
|
Scala
|
apache-2.0
| 7,537
|
/*
* Copyright 2007-2008 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb.builtin.snippet;
import _root_.net.liftweb.http._
import S._
import _root_.scala.xml._
import _root_.net.liftweb.util.Helpers._
import _root_.net.liftweb.util.{Box, Full, Empty}
/**
* This class is a built in snippet that renders the messages (Errors, Warnings, Notices). Typically it is used in templates
* as a place holder for any messages set by user that are not associated with an ID.
*
* E.g. (child nodes are optional)
* <pre>
* <lift:snippet type="error_report">
* <lift:error_msg>Error! The details are:</lift:error_msg>
* <lift:error_class>errorBox</lift:error_class>
* <lift:warning_msg>Whoops, I had a problem:</lift:warning_msg>
* <lift:warning_class>warningBox</lift:warning_class>
* <lift:notice_msg>Note:</lift:notice_msg>
* <lift:notice_class>noticeBox</lift:notice_class>
* </lift:snippet>
* </pre>
*
*/
object Msgs extends DispatchSnippet {
def dispatch: DispatchIt = {
case _ => render
}
def render(styles: NodeSeq): NodeSeq = {
val f = noIdMessages _
val msgs = List((f(S.errors),
(styles \\\\ "error_msg"), S.??("msg.error"),
((styles \\\\ "error_class") ++
(styles \\\\ "error_msg" \\\\ "@class")), 0),
(f(S.warnings),
(styles \\\\ "warning_msg"), S.??("msg.warning"),
((styles \\\\ "warning_class")++
(styles \\\\ "warning_msg" \\\\ "@class")), 1),
(f(S.notices),
(styles \\\\ "notice_msg"), S.??("msg.notice"),
((styles \\\\ "notice_class")) ++
(styles \\\\ "notice_msg" \\\\ "@class"), 2)).flatMap
{
case (msg, titleList, defaultTitle, styleList, ord) =>
val title: String = titleList.toList. filter(_.prefix == "lift").
map(_.text.trim).filter(_.length > 0) headOr defaultTitle
val styles = styleList.toList.map(_.text.trim)
if (!styles.isEmpty) {
ord match {
case 0 => MsgsErrorMeta(Full(AjaxMessageMeta(Full(title),
Full(styles.mkString(" ")))))
case 1 => MsgsWarningMeta(Full(AjaxMessageMeta(Full(title),
Full(styles.mkString(" ")))))
case 2 => MsgsNoticeMeta(Full(AjaxMessageMeta(Full(title),
Full(styles.mkString(" ")))))
}
}
msg.toList.map(e => (<li>{e}</li>) ) match {
case Nil => Nil
case msgList => val ret = (<div>{title}<ul>{msgList}</ul></div>)
styles.foldLeft(ret)((xml, style) => xml % new UnprefixedAttribute("class", Text(style), Null))
}
}
<div>{msgs}</div> % ("id" -> LiftRules.noticesContainerId)
}
}
object MsgsNoticeMeta extends SessionVar[Box[AjaxMessageMeta]](Empty)
object MsgsWarningMeta extends SessionVar[Box[AjaxMessageMeta]](Empty)
object MsgsErrorMeta extends SessionVar[Box[AjaxMessageMeta]](Empty)
case class AjaxMessageMeta(title: Box[String], cssClass: Box[String])
|
beni55/liftweb
|
lift/src/main/scala/net/liftweb/builtin/snippet/Msgs.scala
|
Scala
|
apache-2.0
| 3,844
|
package org.aguo.civsim.controller
import org.aguo.civsim.model.World
import org.aguo.civsim.view._
object ViewController {
def handleInput(input: String, world: World): World = input match {
case "b" | "buildings" => ViewBuildingScreen.render(world)
case "j" | "jobs" => ViewJobScreen.render(world)
case _ => UnknownScreen.render(world)
}
}
|
aguo777/civ-sim
|
src/main/scala/org/aguo/civsim/controller/ViewController.scala
|
Scala
|
mit
| 361
|
package org.jetbrains.plugins.scala
package lang.psi.light.scala
import com.intellij.psi.PsiElement
import com.intellij.psi.impl.light.LightElement
import org.jetbrains.plugins.scala.lang.psi.api.base.ScPrimaryConstructor
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScFunctionExpr
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause}
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil}
/**
* @author Alefas
* @since 03/04/14.
*/
class ScLightParameterClause(types: Seq[ScType], clause: ScParameterClause)
extends LightElement(clause.getManager, clause.getLanguage) with ScParameterClause {
override def isImplicit: Boolean = clause.isImplicit
override def parameters: Seq[ScParameter] = clause.parameters.zip(types).zipWithIndex.map {
case ((param, tp), i) => new ScLightParameter(param, tp, i)
}
override def effectiveParameters: Seq[ScParameter] = parameters
override def toString: String = "Light parameter clause"
override def addParameter(param: ScParameter): ScParameterClause =
throw new UnsupportedOperationException("Operation on light element")
override protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T] =
throw new UnsupportedOperationException("Operation on light element")
override protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T =
throw new UnsupportedOperationException("Operation on light element")
override def owner: PsiElement = {
ScalaPsiUtil.getContextOfType(this, true, classOf[ScFunctionExpr], classOf[ScFunction], classOf[ScPrimaryConstructor])
}
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/light/scala/ScLightParameterClause.scala
|
Scala
|
apache-2.0
| 1,817
|
package ru.wordmetrix.treeapproximator
import java.io.{File, InputStream}
import java.net.URI
import ru.wordmetrix.smartfile.SmartFile.fromFile
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
/**
* An utility that prepares a sample of short text fragments.
*
* This utility downloads a text from given URL's and split it on
* strings that comply with a pattern.
*
* @author Elec
* @usage SamplingOfSections [ -regexp <REGEXP> ] { <URI> }+ [<Path to store>]
*/
object SamplingOfSections extends App {
override def main(args: Array[String]) {
val (regexp, files, path) = (args match {
case Array("-regexp", regexp, args@_*) => (regexp.r, args)
case Array("-mark", mark, args@_*) =>
(s".+$mark.+".r, args)
case Array(args@_*) => ("^\\\\s*$".r, args)
}) match {
case (regexp, Seq(file, files@_*)) => (regexp,
(file :: files.toList.dropRight(1)).map(new URI(_)),
files.lastOption match {
case Some(x) => new File(x)
case None => new File(".")
})
case _ =>
printf("Enter "
+ "SamplingOfParagraphs [-mark <Mark> | -regexp <Regexp>]"
+ " { <uri> }+ [ <path> ]\\n")
sys.exit
(null, null, null)
}
def write(paragraph: List[String], n: Int, name: String) =
if (paragraph.length > 10)
(path /
s"${name}.%04d.txt".format(n))
.write(s"$n : ${name} : "
+ paragraph.mkString("\\n") + "\\n")
def output(lines: List[String], n: Int, name: String): Unit = if (n < 10000)
lines match {
case paragraph@line :: List() =>
write(paragraph, n, name)
case line :: lines =>
println(line)
lines.span(regexp.findFirstMatchIn(_).isEmpty) match {
case (List(), List()) =>
case (paragraph@List(_ @ _*), lines) =>
write (line :: paragraph, n, name)
output (lines, n + 1, name)
}
}
val end = Future.sequence(files.map { file =>
for {
content <- Future(file.toURL.getContent())
lines <- content match {
case content: InputStream =>
Future(io.Source.fromInputStream(content).getLines.toList)
case _ =>
Future.failed(new Exception("Invalid data type"))
}
} yield {
output(lines, 1, file.getPath().split("/").toList.last)
}
})
Await.ready(end, 1 minute)
}
}
|
electricmind/treeapproximator
|
src/main/scala/ru/wordmetrix/treeapproximator/SamplingOfSections.scala
|
Scala
|
apache-2.0
| 2,535
|
package org.pico.statsd.impl
import java.net.{InetAddress, InetSocketAddress}
object StaticAddressResolution {
/**
* Lookup the address for the given host name and cache the result.
*
* @param hostname the host name of the targeted StatsD server
* @param port the port of the targeted StatsD server
* @return a function that cached the result of the lookup
* @throws Exception if the lookup fails, i.e. { @link UnknownHostException}
*/
def apply(hostname: String, port: Int): () => InetSocketAddress = {
val address = new InetSocketAddress(InetAddress.getByName(hostname), port)
() => address
}
}
|
pico-works/pico-statsd
|
pico-statsd/src/main/scala/org/pico/statsd/impl/StaticAddressResolution.scala
|
Scala
|
mit
| 645
|
package chapter14
object Exercise4 extends App {
sealed abstract class Item
case class Product(name: String, price: Double) extends Item
case class Multiple(count: Int, item: Item) extends Item
def price(item: Item): Double = item match {
case Product(_, result) => result
case Multiple(count, subItem) => count * price(subItem)
}
val item = Multiple(10, Multiple(2, Product("Blackwell Toaster", 29.95)))
println(price(item))
}
|
vsuharnikov/books-exercises
|
scala/scala-for-the-impatient/src/main/scala/chapter14/Exercise4.scala
|
Scala
|
mit
| 456
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.dmlc.mxnet.module
import ml.dmlc.mxnet.DType.DType
import ml.dmlc.mxnet._
import ml.dmlc.mxnet.module.DataParallelExecutorGroup.Builder
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
private object DataParallelExecutorGroup {
private val logger: Logger = LoggerFactory.getLogger(classOf[DataParallelExecutorGroup])
// Load a list of arrays into a list of arrays specified by slices
private def loadGeneralMulti(data: Seq[NDArray],
targets: Seq[Array[((Int, Int), NDArray)]],
majorAxis: Seq[Int]): Unit = {
for (((dSrc, dTargets), axis) <- data zip targets zip majorAxis) {
for (((sliceIdxStart, sliceIdxStop), dDst) <- dTargets) {
if (axis >= 0) {
// copy slice
val shape = dSrc.shape
val begin = Array.fill(shape.length)(0)
val end = shape.toArray
begin(axis) = sliceIdxStart
end(axis) = sliceIdxStop
if (dSrc.context == dDst.context) {
NDArray.crop(Map(
"begin" -> new Shape(begin),
"end" -> new Shape(end),
"out" -> dDst))(dSrc)
} else {
// on different device, crop and then do cross device copy
val dDstCopy: NDArray = NDArray.crop(Map(
"begin" -> new Shape(begin),
"end" -> new Shape(end)))(dSrc)
dDstCopy.copyTo(dDst)
}
} else {
dSrc.copyTo(dDst)
}
}
}
}
private def loadGeneral(data: Seq[NDArray], targets: Seq[NDArray]): Unit = {
for ((dSrc, dTarget) <- data zip targets) {
dSrc.copyTo(dTarget)
}
}
// Load data into sliced arrays
private def loadData(batch: DataBatch,
targets: Seq[Array[((Int, Int), NDArray)]],
majorAxis: Seq[Int]): Unit = {
loadGeneralMulti(batch.data, targets, majorAxis)
}
// Load label into sliced arrays
private def loadLabel(batch: DataBatch,
targets: Seq[Array[((Int, Int), NDArray)]],
majorAxis: Seq[Int]): Unit = {
loadGeneralMulti(batch.label, targets, majorAxis)
}
// Merge outputs that lives on multiple context into one,
// so that they look like living on one context.
private def mergeMultiContext(outputs: IndexedSeq[IndexedSeq[NDArray]], majorAxis: Seq[Int])
: IndexedSeq[NDArray] = {
(outputs zip majorAxis).map { case (tensors, axis) =>
if (axis >= 0) {
NDArray.concatenate(tensors, axis = axis, alwaysCopy = false)
} else {
// negative axis means the there is no batch_size axis, and all the
// results should be the same on each device. We simply take the first one,
// without checking they are actually the same
tensors(0)
}
}
}
private object Builder {
private[module] def convertGradReq(
gradReq: String, argNames: IndexedSeq[String], paramNames: IndexedSeq[String],
fixedParamNames: Set[String], dataNames: Seq[String], inputsNeedGrad: Boolean)
: Map[String, String] = {
require(argNames != null)
require(paramNames != null)
require(fixedParamNames != null)
require(dataNames != null)
argNames.map(k => {
if (paramNames.contains(k)) {
(k, if (fixedParamNames.contains(k)) "null" else gradReq)
} else if (dataNames.contains(k)) {
(k, if (inputsNeedGrad) gradReq else "null")
} else {
(k, "null")
}
}).toMap
}
}
class Builder private[module](private val symbol: Symbol,
private val contexts: Array[Context],
private val paramNames: IndexedSeq[String]) {
private var workLoadList: IndexedSeq[Float] = null
private var dataShapes: IndexedSeq[DataDesc] = null
private var labelShapes: Option[IndexedSeq[DataDesc]] = None
private var forTraining: Boolean = true
private var inputsNeedGrad: Boolean = false
private var sharedGroup: Option[DataParallelExecutorGroup] = None
private var inputTypes: Option[Map[String, DType]] = None
private var fixedParamNames: Set[String] = Set.empty[String]
private var gradReqs: Map[String, String] = null
val argNames = symbol.listArguments()
def setWorkLoadList(workLoad: IndexedSeq[Float]): Builder = {
this.workLoadList = workLoad
this
}
def setDataShapes(shapes: IndexedSeq[DataDesc]): Builder = {
require(shapes != null)
this.dataShapes = shapes
this
}
def setDataShapesByName(shapes: IndexedSeq[(String, Shape)]): Builder = {
require(shapes != null)
this.dataShapes = shapes.map { case (k, s) => new DataDesc(k, s) }
this
}
def setLabelShapes(shapes: IndexedSeq[DataDesc]): Builder = {
this.labelShapes = Option(shapes)
this
}
def setLabelShapesByName(shapes: IndexedSeq[(String, Shape)]): Builder = {
this.labelShapes = Option(shapes).map(shapesInst =>
shapesInst.map { case (k, s) => new DataDesc(k, s) }
)
this
}
def setForTraining(forTraining: Boolean): Builder = {
this.forTraining = forTraining
this
}
def setInputsNeedGrad(needGrad: Boolean): Builder = {
this.inputsNeedGrad = needGrad
this
}
def setSharedGroup(sharedGroup: DataParallelExecutorGroup): Builder = {
this.sharedGroup = Option(sharedGroup)
this
}
def setInputTypes(inputTypes: Map[String, DType]): Builder = {
this.inputTypes = Option(inputTypes)
this
}
def setFixedParamNames(fixedParamNames: Set[String]): Builder = {
this.fixedParamNames = Option(fixedParamNames).getOrElse(Set.empty[String])
this
}
def setGradReq(gradReq: Map[String, String]): Builder = {
require(dataShapes != null)
val gradReqTmp = mutable.HashMap.empty[String, String]
val dataNames = dataShapes.map(_.name)
for (k <- argNames) {
if (paramNames.contains(k)) {
gradReqTmp.put(k, if (fixedParamNames.contains(k)) "null" else "write")
} else if (dataNames.contains(k)) {
gradReqTmp.put(k, if (inputsNeedGrad) "write" else "null")
} else {
gradReqTmp.put(k, "null")
gradReqTmp ++= gradReq
}
}
this.gradReqs = gradReqTmp.toMap
this
}
def setGradReq(gradReq: String): Builder = {
require(dataShapes != null)
val dataNames = dataShapes.map(_.name)
this.gradReqs = Builder.convertGradReq(
gradReq, argNames, paramNames, fixedParamNames, dataNames, inputsNeedGrad)
this
}
def setGradReq(gradReq: Seq[(String, String)]): Builder = {
require(gradReq.size == argNames.size)
this.gradReqs = gradReq.toMap
this
}
def build(): DataParallelExecutorGroup = {
new DataParallelExecutorGroup(
symbol, contexts, workLoadList, dataShapes, labelShapes, paramNames, forTraining,
inputsNeedGrad, sharedGroup, inputTypes, fixedParamNames, this.gradReqs)
}
}
}
/**
* DataParallelExecutorGroup is a group of executors that lives on a group of devices.
* This is a helper class used to implement data parallelism. Each mini-batch will
* be split and run on the devices.
* @param symbol The common symbolic computation graph for all executors.
* @param contexts A list of contexts.
* @param workLoadList If not `None`, could be a list of numbers that
* specify the workload to be assigned to different context.
* Larger number indicate heavier workload.
* @param dataShapes Should be a list of (name, shape) tuples, for the shapes of data.
* Note the order is important and should be the same as the order that
* the `DataIter` provide the data.
* @param labelShapes Should be a list of (name, shape) tuples, for the shapes of label.
* Note the order is important and should be the same as the order that
* the `DataIter` provide the label.
* @param paramNames A list of strings, indicating the names of parameters
* (e.g. weights, filters, etc.) in the computation graph.
* @param forTraining Indicate whether the executors should be bind for training.
* When not doing training, the memory for gradients will not be allocated.
* @param inputsNeedGrad Indicate whether the gradients for the input data should be computed.
* This is currently not used.
* It will be useful for implementing composition of modules.
* @param sharedGroup Default is `None`. This is used in bucketing. When not `None`,
* it should be a executor group corresponding to a different bucket.
* In other words, it will correspond to a different symbol but
* with the same set of parameters (e.g. unrolled RNNs with different lengths).
* In this case, many memory will be shared.
* @param inputTypes Default is `None`. When not `None`,
* can be used to specify the data type for each of the data/label inputs.
* @param fixedParamNames Indicate parameters to be fixed during training.
* Parameters in this list will not allocate space for gradient,
* nor do gradient calculation.
* @param gradReq Requirement for gradient accumulation. Can be 'write', 'add', or 'null',
* be specified for each argument.
*/
class DataParallelExecutorGroup private[module](
symbol: Symbol,
contexts: Array[Context],
workLoadList: IndexedSeq[Float],
dataShapes: IndexedSeq[DataDesc],
labelShapes: Option[IndexedSeq[DataDesc]] = None,
private[module] val paramNames: IndexedSeq[String],
forTraining: Boolean,
inputsNeedGrad: Boolean,
sharedGroup: Option[DataParallelExecutorGroup] = None,
inputTypes: Option[Map[String, DType]] = None,
fixedParamNames: Set[String] = Set.empty[String],
gradReq: Map[String, String] = null) {
require(symbol != null)
require(contexts != null)
private val argNames = symbol.listArguments()
private val auxNames = symbol.listAuxiliaryStates()
private val gradReqRun =
if (!forTraining) {
val dataNames = dataShapes.map(_.name)
Builder.convertGradReq("null",
argNames, paramNames, fixedParamNames, dataNames, inputsNeedGrad)
} else {
gradReq
}
private val sharedDataArrays: Array[mutable.Map[String, NDArray]] =
sharedGroup.map(_.sharedDataArrays).getOrElse(
Array.fill(contexts.length)(mutable.Map.empty[String, NDArray]))
private var batchSize: Int = -1
private var slices: Array[(Int, Int)] = null
private var execs: Array[Executor] = null
private var dataArrays: Seq[Array[((Int, Int), NDArray)]] = null
private var labelArrays: Option[Seq[Array[((Int, Int), NDArray)]]] = None
private[module] var paramArrays: IndexedSeq[Array[NDArray]] = null
private[module] var gradArrays: IndexedSeq[Array[NDArray]] = null
private[module] var auxArrays: IndexedSeq[Array[NDArray]] = null
private var inputGradArrays: IndexedSeq[Array[NDArray]] = null
private val dataLayouts = decideSlices(dataShapes)
private val labelLayouts =
// call it to make sure labels has the same batch size as data
if (labelShapes != None) decideSlices(labelShapes.get)
else null
private val outputLayouts = symbol.listOutputs().map(name =>
DataDesc.getBatchAxis(symbol.get(name).attr("__layout__"))
)
bindExec(dataShapes, labelShapes, sharedGroup)
def getBatchSize: Int = batchSize
/**
* Decide the slices for each context according to the workload.
* @param dataShapes list of DataDesc(name, shape) specifying
* the shapes for the input data or label.
*/
private def decideSlices(dataShapes: Seq[DataDesc]): Seq[Int] = {
require(dataShapes.size > 0)
val majorAxis = dataShapes.map(data => DataDesc.getBatchAxis(Option(data.layout)))
for ((dataDesc, axis) <- dataShapes.zip(majorAxis)) {
if (axis != -1) {
val batchSize = dataDesc.shape(axis)
if (this.batchSize != -1) {
require(batchSize == this.batchSize,
s"all data must have the same batch size: $batchSize," +
s"but ${dataDesc.name} has shape ${dataDesc.shape}")
} else {
this.batchSize = batchSize
require(this.workLoadList != null)
this.slices = ExecutorManager.splitInputSlice(this.batchSize, this.workLoadList)
}
}
}
majorAxis
}
/**
* Bind executors on their respective devices.
* @param dataShapes DataDesc for input data.
* @param labelShapes DataDesc for input labels.
* @param sharedGroup
*/
def bindExec(dataShapes: Seq[DataDesc], labelShapes: Option[Seq[DataDesc]],
sharedGroup: Option[DataParallelExecutorGroup]): Unit = {
execs = (0 until contexts.length).map(i =>
bindIthExec(i, dataShapes, labelShapes, sharedGroup)
).toArray
// convenient data structures
dataArrays = dataShapes.map(dataDesc =>
this.execs.zipWithIndex.map { case (e, i) => (this.slices(i), e.argDict(dataDesc.name)) }
)
labelArrays = labelShapes.map(shapes =>
shapes.map(labelDesc =>
this.execs.zipWithIndex.map { case (e, i) => (this.slices(i), e.argDict(labelDesc.name)) }
)
)
paramArrays = argNames.zipWithIndex.withFilter {
case (name, i) => paramNames.contains(name)
}.map { case (name, i) =>
execs.map(_.argArrays(i))
}
gradArrays =
if (forTraining) {
argNames.zipWithIndex.withFilter {
case (name, i) => paramNames.contains(name)
}.map { case (name, i) =>
execs.map(_.gradArrays(i))
}
} else {
null
}
val dataNames = dataShapes.map(_.name)
inputGradArrays =
if (inputsNeedGrad) {
argNames.zipWithIndex.withFilter {
case (name, i) => dataNames.contains(name)
}.map { case (name, i) =>
execs.map(_.gradArrays(i))
}
} else {
null
}
auxArrays = (0 until auxNames.length).map(i => execs.map(_.auxArrays(i)))
}
/**
* Assign, i.e. copy parameters to all the executors.
* @param argParams A dictionary of name to `NDArray` parameter mapping.
* @param auxParams A dictionary of name to `NDArray` auxiliary variable mapping.
*/
def setParams(argParams: Map[String, NDArray], auxParams: Map[String, NDArray]): Unit = {
execs.foreach(_.copyParamsFrom(argParams, auxParams))
}
/**
* Copy data from each executor to `arg_params` and `aux_params`.
* @param argParams target parameter arrays
* @param auxParams target aux arrays
* Note this function will inplace update the NDArrays in arg_params and aux_params.
*/
def getParams(argParams: Map[String, NDArray], auxParams: Map[String, NDArray]): Unit = {
for ((name, block) <- paramNames.zip(paramArrays)) {
val weight = (block.map(_.copyTo(Context.cpu())).reduce((a: NDArray, b: NDArray) =>
(a + b).disposeDeps()
) / block.length).disposeDeps()
val weightNewType = weight.asType(argParams(name).dtype)
weightNewType.copyTo(argParams(name))
weight.dispose()
weightNewType.dispose()
}
for ((name, block) <- auxNames.zip(auxArrays)) {
val weight = (block.map(_.copyTo(Context.cpu())).reduce((a: NDArray, b: NDArray) =>
(a + b).disposeDeps()
) / block.length).disposeDeps()
val weightNewType = weight.asType(auxParams(name).dtype)
weightNewType.copyTo(auxParams(name))
weight.dispose()
weightNewType.dispose()
}
}
/**
* Split `dataBatch` according to workload and run forward on each devices.
* @param dataBatch
* @param isTrain The hint for the backend, indicating whether we are during training phase.
* Default is `None`, then the value `self.for_training` will be used.
*/
def forward(dataBatch: DataBatch, isTrain: Option[Boolean] = None): Unit = {
DataParallelExecutorGroup.loadData(dataBatch, dataArrays, dataLayouts)
val isTrainOpt = isTrain.getOrElse(this.forTraining)
labelArrays.foreach(labels => {
require(!isTrainOpt || dataBatch.label != null)
if (dataBatch.label != null) {
require(labelLayouts != null)
DataParallelExecutorGroup.loadLabel(dataBatch, labels, labelLayouts)
}
})
execs.foreach(_.forward(isTrainOpt))
}
// Get the shapes of the outputs.
def getOutputShapes: IndexedSeq[(String, Shape)] = {
val outputs = execs(0).outputs
val shapes = outputs.map(_.shape)
(symbol.listOutputs() zip shapes zip outputLayouts) map { case ((key, theShape), axis) =>
val shape = theShape.toArray
if (axis >= 0) {
shape(axis) = batchSize
}
(key, Shape(shape))
}
}
/**
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
(0 until execs(0).outputs.length).map(i => execs.map(_.outputs(i)).toIndexedSeq)
}
/**
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be merged from multiple devices,
* as they look like from a single executor.
* The results will look like `[out1, out2]`
*/
def getOutputsMerged(): IndexedSeq[NDArray] = {
DataParallelExecutorGroup.mergeMultiContext(getOutputs(), outputLayouts)
}
/**
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
require(inputsNeedGrad)
inputGradArrays.map(_.toIndexedSeq)
}
/**
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be merged from multiple devices,
* as they look like from a single executor.
* The results will look like `[grad1, grad2]`
*/
def getInputGradsMerged(): IndexedSeq[NDArray] = {
DataParallelExecutorGroup.mergeMultiContext(getInputGrads(), dataLayouts)
}
/**
* Run backward on all devices. A backward should be called after
* a call to the forward function. Backward cannot be called unless
* `this.for_training` is `True`.
* @param outGrads Gradient on the outputs to be propagated back.
* This parameter is only needed when bind is called
* on outputs that are not a loss function.
*/
def backward(outGrads: Array[NDArray] = null): Unit = {
require(forTraining, "re-bind with forTraining = true to run backward")
for (((exec, islice), i) <- (execs zip slices).zipWithIndex) {
val outGradsSlice =
if (outGrads != null) {
(outGrads zip outputLayouts).map { case (grad, axis) =>
if (axis >= 0) {
val ogMySlice: NDArray = NDArray.slice_axis(
Map("axis" -> axis, "begin" -> islice._1, "end" -> islice._2))(grad)
ogMySlice.asInContext(contexts(i))
} else {
grad.copyTo(contexts(i))
}
}
} else {
Array.empty[NDArray]
}
exec.backward(outGrads = outGradsSlice)
}
}
/**
* Accumulate the performance according to `eval_metric` on all devices.
* @param evalMetric The metric used for evaluation.
* @param labels Typically comes from `label` of a `DataBatch`.
*/
def updateMetric(evalMetric: EvalMetric, labels: IndexedSeq[NDArray]): Unit = {
for ((texec, islice) <- this.execs zip this.slices) {
val labelsSlice =
(labels zip this.labelLayouts) map { case (label, axis) =>
if (axis == 0) {
label.slice(islice)
} else if (axis > 0) {
val labelMySlice: NDArray = NDArray.slice_axis(Map(
"axis" -> axis, "begin" -> islice._1, "end" -> islice._2))(label)
.asInContext(label.context)
labelMySlice
} else {
label
}
}
evalMetric.update(labelsSlice, texec.outputs)
}
}
// Internal utility function to bind the i-th executor.
private def bindIthExec(i: Int, dataShapes: Seq[DataDesc],
labelShapes: Option[Seq[DataDesc]],
sharedGroup: Option[DataParallelExecutorGroup]): Executor = {
val dataShapesSliced = slicedShape(dataShapes, i, dataLayouts)
val labelShapesSliced = labelShapes.map(slicedShape(_, i, labelLayouts))
val sharedExec = sharedGroup.map(_.execs(i))
val context = contexts(i)
val sharedDataArrays = this.sharedDataArrays(i)
val inputShapes
= dataShapesSliced.toMap ++ labelShapesSliced.getOrElse(Map.empty[String, Shape])
val (argShapes, _, auxShapes) = symbol.inferShape(inputShapes)
require(argShapes != null, "shape inference failed")
val inputTypesGot = inputTypes.getOrElse(inputShapes.map { case (k, v) =>
(k, Base.MX_REAL_TYPE)
})
val (argTypes, _, auxTypes) = symbol.inferType(inputTypesGot)
require(argTypes != null, "type inference failed")
val argArrays = ArrayBuffer.empty[NDArray]
val gradArrayMap = mutable.HashMap.empty[String, NDArray]
// create or borrow arguments and gradients
for (j <- 0 until argNames.length) {
val name = argNames(j)
val argArr =
if (paramNames.contains(name)) {
// model parameter
sharedExec match {
case None =>
val argArr = NDArray.zeros(argShapes(j), context, dtype = argTypes(j))
if (gradReqRun(name) != "null") {
val gradArr = NDArray.zeros(argShapes(j), context, dtype = argTypes(j))
gradArrayMap.put(name, gradArr)
}
argArr
case Some(sharedExecInst) =>
val argArr = sharedExecInst.argDict(name)
require(argArr.shape == argShapes(j))
require(argArr.dtype == argTypes(j))
if (gradReqRun(name) != "null") {
gradArrayMap.put(name, sharedExecInst.gradDict(name))
}
argArr
}
} else {
// data or label
val argArr = getOrReshape(name, sharedDataArrays, argShapes(j), argTypes(j), context)
// data might also need grad if inputs_need_grad is True
if (gradReqRun(name) != "null") {
gradArrayMap.put(name,
getOrReshape(s"grad of $name", sharedDataArrays, argShapes(j), argTypes(j), context))
}
argArr
}
argArrays.append(argArr)
}
// create or borrow aux variables
val auxArrays =
sharedExec match {
case None => (auxShapes zip auxTypes).map { case (s, t) =>
NDArray.zeros(s, context, dtype = t)
}.toArray
case Some(sharedExecInst) =>
for ((arr, j) <- sharedExecInst.auxArrays.zipWithIndex) {
require(auxShapes(j) == arr.shape)
require(auxTypes(j) == arr.dtype)
}
sharedExecInst.auxArrays.map(identity)
}
symbol.bind(ctx = context, args = argArrays.toSeq, argsGrad = gradArrayMap.toMap,
gradsReq = gradReqRun, auxStates = auxArrays.toSeq, group2ctx = null,
sharedExec = sharedExec.orNull)
}
/**
* Get the sliced shapes for the i-th executor.
* @param shapes : The original (name, shape) pairs.
* @param i Which executor we are dealing with.
* @param majorAxis
*/
private def slicedShape(shapes: Seq[DataDesc], i: Int, majorAxis: Seq[Int])
: Seq[(String, Shape)] = {
(shapes zip majorAxis).map { case (DataDesc(k, shape, _ , _), axis) =>
val shapeArr = shape.toArray
if (axis >= 0) {
shapeArr(axis) = slices(i)._2 - slices(i)._1
}
(k, Shape(shapeArr))
}
}
// Install monitor on all executors
def installMonitor(monitor: Monitor): Unit = {
execs.foreach(monitor.install)
}
// Internal helper to get a memory block or re-use by re-shaping
private def getOrReshape(name: String,
sharedDataArrays: mutable.Map[String, NDArray],
argShape: Shape,
argType: DType,
context: Context): NDArray = {
if (sharedDataArrays.contains(name)) {
val argArr = sharedDataArrays(name)
if (argArr.shape.product >= argShape.product) {
// nice, we can directly re-use this data blob
require(argArr.dtype == argType)
argArr.reshape(argShape)
} else {
DataParallelExecutorGroup.logger.warn(s"bucketing: data $name has a shape $argShape," +
s"which is larger than already allocated shape ${argArr.shape}." +
"Need to re-allocate. Consider putting default_bucket_key to be the bucket" +
"taking the largest input for better memory sharing.")
val argArrNew = NDArray.zeros(argShape, context, dtype = argType)
// replace existing shared array because the new one is bigger
sharedDataArrays.put(name, argArrNew)
argArrNew
}
} else {
val argArrNew = NDArray.zeros(argShape, context, dtype = argType)
sharedDataArrays.put(name, argArrNew)
argArrNew
}
}
}
|
lxn2/mxnet
|
scala-package/core/src/main/scala/ml/dmlc/mxnet/module/DataParallelExecutorGroup.scala
|
Scala
|
apache-2.0
| 26,991
|
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.reactive.Observable
import scala.concurrent.duration._
import scala.concurrent.duration.Duration.Zero
object MiscIsEmptySuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val shouldBeEmpty = (sourceCount % 2) == 0
val sum = if (shouldBeEmpty) 2L else 1L
val source =
if (shouldBeEmpty)
Observable.empty
else
Observable.range(0L, sourceCount.toLong)
val o = source.isEmpty.map(x => if (x) 2L else 1L)
Sample(o, 1, sum, Zero, Zero)
}
def observableInError(sourceCount: Int, ex: Throwable) = Some {
val o = Observable.raiseError(ex).isEmpty.map(x => if (x) 1L else 0L)
Sample(o, 0, 0, Zero, Zero)
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) =
None
override def cancelableObservables() = {
val source1 = Observable.empty
.delayOnComplete(1.second)
.isEmpty
.map(x => if (x) 2L else 1L)
val source2 = Observable
.now(1)
.delayOnNext(1.second)
.isEmpty
.map(x => if (x) 2L else 1L)
Seq(Sample(source1, 0, 0, Zero, Zero), Sample(source2, 0, 0, Zero, Zero))
}
}
|
alexandru/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/MiscIsEmptySuite.scala
|
Scala
|
apache-2.0
| 1,873
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import org.junit.Test
import junit.framework.Assert._
import org.scalatest.junit.JUnit3Suite
import kafka.utils.TestUtils
class KafkaConfigTest extends JUnit3Suite {
@Test
def testLogRetentionTimeHoursProvided() {
val props = TestUtils.createBrokerConfig(0, 8181)
props.put("log.retention.hours", "1")
val cfg = new KafkaConfig(props)
assertEquals(60L * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeMinutesProvided() {
val props = TestUtils.createBrokerConfig(0, 8181)
props.put("log.retention.minutes", "30")
val cfg = new KafkaConfig(props)
assertEquals(30 * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeNoConfigProvided() {
val props = TestUtils.createBrokerConfig(0, 8181)
val cfg = new KafkaConfig(props)
assertEquals(24 * 7 * 60L * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testLogRetentionTimeBothMinutesAndHoursProvided() {
val props = TestUtils.createBrokerConfig(0, 8181)
props.put("log.retention.minutes", "30")
props.put("log.retention.hours", "1")
val cfg = new KafkaConfig(props)
assertEquals( 30 * 60L * 1000L, cfg.logRetentionTimeMillis)
}
@Test
def testAdvertiseDefaults() {
val port = 9999
val hostName = "fake-host"
val props = TestUtils.createBrokerConfig(0, port)
props.put("host.name", hostName)
val serverConfig = new KafkaConfig(props)
assertEquals(serverConfig.advertisedHostName, hostName)
assertEquals(serverConfig.advertisedPort, port)
}
@Test
def testAdvertiseConfigured() {
val port = 9999
val advertisedHostName = "routable-host"
val advertisedPort = 1234
val props = TestUtils.createBrokerConfig(0, port)
props.put("advertised.host.name", advertisedHostName)
props.put("advertised.port", advertisedPort.toString)
val serverConfig = new KafkaConfig(props)
assertEquals(serverConfig.advertisedHostName, advertisedHostName)
assertEquals(serverConfig.advertisedPort, advertisedPort)
}
@Test
def testUncleanLeaderElectionDefault() {
val props = TestUtils.createBrokerConfig(0, 8181)
val serverConfig = new KafkaConfig(props)
assertEquals(serverConfig.uncleanLeaderElectionEnable, true)
}
@Test
def testUncleanElectionDisabled() {
val props = TestUtils.createBrokerConfig(0, 8181)
props.put("unclean.leader.election.enable", String.valueOf(false))
val serverConfig = new KafkaConfig(props)
assertEquals(serverConfig.uncleanLeaderElectionEnable, false)
}
@Test
def testUncleanElectionEnabled() {
val props = TestUtils.createBrokerConfig(0, 8181)
props.put("unclean.leader.election.enable", String.valueOf(true))
val serverConfig = new KafkaConfig(props)
assertEquals(serverConfig.uncleanLeaderElectionEnable, true)
}
@Test
def testUncleanElectionInvalid() {
val props = TestUtils.createBrokerConfig(0, 8181)
props.put("unclean.leader.election.enable", "invalid")
intercept[IllegalArgumentException] {
new KafkaConfig(props)
}
}
}
|
stealthly/kafka
|
core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala
|
Scala
|
apache-2.0
| 3,960
|
package spire
package object syntax {
object cfor extends CforSyntax
object literals extends LiteralsSyntax
object eq extends EqSyntax
object partialOrder extends PartialOrderSyntax
object order extends OrderSyntax
object signed extends SignedSyntax
object truncatedDivision extends TruncatedDivisionSyntax
object involution extends InvolutionSyntax
object isReal extends IsRealSyntax
object convertableFrom extends ConvertableFromSyntax
object semigroupoid extends SemigroupoidSyntax
object groupoid extends GroupoidSyntax
object semigroup extends SemigroupSyntax
object monoid extends MonoidSyntax
object group extends GroupSyntax
object additiveSemigroup extends AdditiveSemigroupSyntax
object additiveMonoid extends AdditiveMonoidSyntax
object additiveGroup extends AdditiveGroupSyntax
object multiplicativeSemigroup extends MultiplicativeSemigroupSyntax
object multiplicativeMonoid extends MultiplicativeMonoidSyntax
object multiplicativeGroup extends MultiplicativeGroupSyntax
object semiring extends SemiringSyntax
object rig extends RigSyntax
object rng extends RngSyntax
object ring extends RingSyntax
object gcdRing extends GCDRingSyntax
object euclideanRing extends EuclideanRingSyntax
object field extends FieldSyntax
object nroot extends NRootSyntax
object trig extends TrigSyntax
object leftModule extends LeftModuleSyntax
object rightModule extends RightModuleSyntax
object cModule extends CModuleSyntax
object vectorSpace extends VectorSpaceSyntax
object metricSpace extends MetricSpaceSyntax
object normedVectorSpace extends NormedVectorSpaceSyntax
object innerProductSpace extends InnerProductSpaceSyntax
object coordinateSpace extends CoordinateSpaceSyntax
object lattice extends LatticeSyntax
object heyting extends HeytingSyntax
object bool extends BoolSyntax
object bitString extends BitStringSyntax
object partialAction extends PartialActionSyntax
object action extends ActionSyntax
object torsor extends TorsorSyntax
object integral extends IntegralSyntax
object fractional extends FractionalSyntax
object numeric extends NumericSyntax
object all extends AllSyntax
object unbound extends UnboundSyntax
object interval extends IntervalSyntax
}
|
adampingel/spire
|
core/src/main/scala/spire/syntax/package.scala
|
Scala
|
mit
| 2,285
|
package api
import com.google.inject.AbstractModule
import com.google.inject.multibindings.Multibinder
import org.reflections.Reflections
import play.twirl.api.TemplateMagic.javaCollectionToScala
import play.api.Logger
class IntegrationsBindingModule extends AbstractModule {
override def configure(): Unit = {
val b = Multibinder.newSetBinder(binder, classOf[Integration])
val classes = new Reflections().getSubTypesOf(classOf[Integration])
classes.toSeq.map { case c: Class[Integration] =>
if (c.isAnnotationPresent(classOf[javax.inject.Singleton])) {
Logger.debug(s"Binding an integration: ${c.getCanonicalName}")
b.addBinding().to(c)
}
}
}
}
|
JetChat/JetChat
|
app/api/IntegrationsBindingModule.scala
|
Scala
|
apache-2.0
| 696
|
package com.twitter.finagle.http.filter
import com.twitter.finagle.{Service, SimpleFilter}
import com.twitter.finagle.http.{Request, Response, Status}
import com.twitter.util.Future
/**
* Validate request filter:
* 400 Bad Request if the parameters are invalid.
*/
@deprecated("Being removed due to its limited utility", "2017-01-11")
class ValidateRequestFilter[REQUEST <: Request]
extends SimpleFilter[REQUEST, Response] {
def apply(request: REQUEST, service: Service[REQUEST, Response]): Future[Response] = {
if (request.params.isValid) service(request)
else Future.value(Response(request.version, Status.BadRequest))
}
}
@deprecated("Being removed due to its limited utility", "2017-01-11")
object ValidateRequestFilter extends ValidateRequestFilter[Request]
|
spockz/finagle
|
finagle-http/src/main/scala/com/twitter/finagle/http/filter/ValidateRequestFilter.scala
|
Scala
|
apache-2.0
| 787
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.