code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
import collection.mutable.{Stack, StackProxy}
class A extends StackProxy[Int] {
val self = Stack[Int]()
}
object Test {
def main(args: Array[String]) {
val a = new A
a push 3
a push 4
a push 5
a.push(6, 7, 8)
println(a)
a.pop
}
}
|
felixmulder/scala
|
test/files/run/t5428.scala
|
Scala
|
bsd-3-clause
| 278
|
/*
Lodo is a layered to-do list (Outliner)
Copyright (C) 2015 Keith Morrow.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License v3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package lodo
import java.util.UUID
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.vdom.prefix_<^._
object Sidebar {
case class Props(b: Dashboard.Backend, itemMap: ItemMap,
selectedNotebook: Option[UUID] = None,
isAdding: Boolean = false,
isSidebarShown: Boolean = false,
isCompleteHidden: Boolean = false,
isQuickAdd: Boolean = false)
val sidebar = ReactComponentB[Props]("Sidebar")
.render({ P =>
<.div(^.cls := "container-fluid",
<.div(^.cls := "row",
<.div(^.classSet1("col-sm-4 col-md-3 sidebar", ("sidebar-shown", P.isSidebarShown)),
NotebookSelector(NotebookSelector.Props(P.b, P.itemMap, P.selectedNotebook, P.isAdding, P.isCompleteHidden, P.isQuickAdd))
)
)
)
}).build
def apply(props: Props) = sidebar(props)
}
|
k3d3/lodo
|
lodo/js/src/main/scala/components/Sidebar.scala
|
Scala
|
agpl-3.0
| 1,569
|
package io.protoless.messages.encoders
import scala.annotation.implicitNotFound
import shapeless.HList
import io.protoless.messages.Encoder
@implicitNotFound("No CustomMappingEncoder found for type ${A} and ${L}.")
@annotation.inductive
trait CustomMappingEncoder[A, L <: HList] extends Encoder[A]
/**
* Utilities for [[CustomMappingEncoder]]
*/
final object CustomMappingEncoder {
def apply[A, L <: HList](implicit instance: CustomMappingEncoder[A, L]): CustomMappingEncoder[A, L] = instance
}
|
julien-lafont/protoless
|
modules/core/src/main/scala/io/protoless/messages/encoders/CustomMappingEncoder.scala
|
Scala
|
apache-2.0
| 508
|
package com.twitter.finatra.http.tests
import com.twitter.finatra.test.LocalFilesystemTestUtils.{createFile, writeStringToFile}
/** Tests when the mustache template directory is different from the local doc root directory */
class LocalTemplateRootTest extends AbstractTemplateRootTest {
def templateRootDirectory(baseFolderName: String): String =
s"${baseFolderName}src/main/resources/templates"
/* same as the root */
def fullTemplatePath(baseFolderName: String): String = templateRootDirectory(baseFolderName)
def setup(baseFolderName: String): Unit = {
val templateRootDirectory: String = s"${baseFolderName}src/main/resources/templates"
// create src/main/resources/templates directory and add files
val templates = createFile(templateRootDirectory)
writeStringToFile(createFile(templates, "testuser.mustache"), testUserMustacheString)
writeStringToFile(
createFile(templates, "testuser2.mustache"),
testUser2MustacheString
)
writeStringToFile(createFile(templates, "testHtml.mustache"), testHtmlMustacheString)
}
}
|
twitter/finatra
|
http-mustache/src/test/scala/com/twitter/finatra/http/tests/LocalTemplateRootTest.scala
|
Scala
|
apache-2.0
| 1,081
|
package org.phillipgreenii.codedependencytracker
class DummyEntityRelationshipExtractor extends EntityRelationshipExtractor[FileEntity] {
def apply(fileEntity: FileEntity): List[Relationship[FileEntity, _]] = {
List(new Relationship[FileEntity, UriEntity](fileEntity.asReference(), new EntityReference[UriEntity](fileEntity.source.mkString), "likes"))
}
}
|
phillipgreenii/code-dependency-tracker
|
src/it/scala/org/phillipgreenii/codedependencytracker/DummyEntityRelationshipExtractor.scala
|
Scala
|
mit
| 366
|
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.raster.iterators
import org.apache.accumulo.core.data._
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator}
/**
* This is an Index Only Iterator, to be used in situations where the data records are
* not useful enough to pay the penalty of decoding when using the
* SpatioTemporalIntersectingIterator.
*
* This iterator returns as its nextKey the key for the index. nextValue is
* the value for the INDEX, mapped into a SimpleFeature
*/
class IndexIterator
extends GeomesaFilteringIterator
with HasFeatureType
with SetTopIndexUnique
with SetTopIndexFilterUnique
with SetTopIndexTransformUnique
with SetTopIndexFilterTransformUnique {
var setTopOptimized: (Key) => Unit = null
override def init(source: SortedKeyValueIterator[Key, Value],
options: java.util.Map[String, String],
env: IteratorEnvironment) = {
super.init(source, options, env)
initFeatureType(options)
init(featureType, options)
// pick the execution path once based on the filters and transforms we need to apply
// see org.locationtech.geomesa.core.iterators.IteratorFunctions
setTopOptimized = (stFilter, transform, checkUniqueId) match {
case (null, null, null) => setTopIndexInclude
case (null, null, _) => setTopIndexUnique
case (_, null, null) => setTopIndexFilter
case (_, null, _) => setTopIndexFilterUnique
case (null, _, null) => setTopIndexTransform
case (null, _, _) => setTopIndexTransformUnique
case (_, _, null) => setTopIndexFilterTransform
case (_, _, _) => setTopIndexFilterTransformUnique
}
}
override def setTopConditionally(): Unit = setTopOptimized(source.getTopKey)
}
|
nagavallia/geomesa
|
geomesa-accumulo/geomesa-accumulo-raster/src/main/scala/org/locationtech/geomesa/raster/iterators/IndexIterator.scala
|
Scala
|
apache-2.0
| 2,283
|
package com.outr.arango.api
import com.outr.arango.api.model._
import io.youi.client.HttpClient
import io.youi.http.HttpMethod
import io.youi.net._
import io.circe.Json
import scala.concurrent.{ExecutionContext, Future}
object APIViewViewNameRename {
def put(client: HttpClient, viewName: String)(implicit ec: ExecutionContext): Future[Json] = client
.method(HttpMethod.Put)
.path(path"/_api/view/{view-name}/rename".withArguments(Map("view-name" -> viewName)), append = true)
.call[Json]
}
|
outr/arangodb-scala
|
api/src/main/scala/com/outr/arango/api/APIViewViewNameRename.scala
|
Scala
|
mit
| 513
|
/*
* Copyright (C) 2012 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.method.evolution
import fr.iscpif.mgo._
import org.openmole.core.workflow.builder.TaskBuilder
import org.openmole.core.workflow.data._
import org.openmole.core.workflow.task._
import org.openmole.core.workflow.data._
import org.openmole.core.workflow.sampling._
import org.openmole.core.workflow.domain._
import org.openmole.core.workflow.task._
object ScalingGAPopulationTask {
def apply(evolution: GAAlgorithm)(
population: Prototype[Population[evolution.G, evolution.P, evolution.F]])(implicit plugins: PluginSet) = {
val (_evolution, _population) = (evolution, population)
new TaskBuilder { builder ⇒
addInput(population)
evolution.inputsPrototypes foreach { i ⇒ addOutput(i.toArray) }
evolution.outputPrototypes foreach { o ⇒ addOutput(o.toArray) }
def toTask = new ScalingGAPopulationTask with Built {
val evolution = _evolution
val population = _population.asInstanceOf[Prototype[Population[evolution.G, evolution.P, evolution.F]]]
}
}
}
}
sealed abstract class ScalingGAPopulationTask extends Task {
val evolution: GAAlgorithm
val population: Prototype[Population[evolution.G, evolution.P, evolution.F]]
override def process(context: Context) =
evolution.toVariables(context(population), context)
}
|
ISCPIF/PSEExperiments
|
openmole-src/openmole/plugins/org.openmole.plugin.method.evolution/src/main/scala/org/openmole/plugin/method/evolution/ScalingGAPopulationTask.scala
|
Scala
|
agpl-3.0
| 2,024
|
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest3_0_1
import org.jetbrains.plugins.scala.testingSupport.scalatest.ScalaTestSelectedTests
class Scalatest2_11_3_0_1_SelectedTestsTest extends Scalatest2_11_3_0_1_Base with ScalaTestSelectedTests
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest3_0_1/Scalatest2_11_3_0_1_SelectedTestsTest.scala
|
Scala
|
apache-2.0
| 276
|
/*
* Copyright (c) 2017. Yuriy Stul
*/
package com.stulsoft.ysps.pvariance
/** Playing with upper bound.
*
* An upper type bound T <: A declares that type variable T refers to a subtype of type A.
*
* @see [[http://docs.scala-lang.org/tutorials/tour/upper-type-bounds.html Upper Type Bounds]]
* @author Yuriy Stul
*/
object OfficeUpperBound extends App {
test1()
def test1(): Unit = {
println("==>test1")
val workers = Seq(
new OfficeWorker[YoungWorker](new YoungWorker("young1")),
new OfficeWorker[MiddleWorker](new MiddleWorker("middle1")),
new OfficeWorker[SeniorWorker](new SeniorWorker("middle1"))
)
for (worker <- workers) worker.doWork()
println("<==test1")
}
def test2(): Unit = {
println("==>test2")
val workers = Seq(
new OfficeWorker[YoungWorker](new YoungWorker("young21")),
new OfficeWorker[YoungWorker](new MiddleWorker("middle21")),
new OfficeWorker[YoungWorker](new SeniorWorker("senior21")),
// Error new OfficeWorker[MiddleWorker](new YoungWorker("young22")),
new OfficeWorker[MiddleWorker](new MiddleWorker("middle22")),
new OfficeWorker[MiddleWorker](new SeniorWorker("senior22")),
// Error new OfficeWorker[SeniorWorker](new YoungWorker("young23")),
// Error new OfficeWorker[SeniorWorker](new MiddleWorker("middle23")),
new OfficeWorker[SeniorWorker](new SeniorWorker("middle2"))
)
for (worker <- workers) worker.doWork()
println("<==test2")
}
trait Worker {
def name: String
def order: String
}
class YoungWorker(workerName: String) extends Worker {
override def name: String = workerName
override def order: String = "I did it!"
}
class MiddleWorker(workerName: String) extends YoungWorker(workerName) {
override def order: String = "I'll try"
}
class SeniorWorker(workerName: String) extends MiddleWorker(workerName) {
override def order: String = "Everything will be done"
}
/**
* Class with Low Bound
*
* @param worker a worker
* @tparam T specifies the worker may any subclass of Worker
*/
class OfficeWorker[T <: Worker](worker: T) {
def doWork(): Unit = {
println(s"${worker.name} says ${worker.order} ")
}
}
}
|
ysden123/ysps
|
src/main/scala/com/stulsoft/ysps/pvariance/OfficeUpperBound.scala
|
Scala
|
mit
| 2,264
|
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.executionplan.builders
abstract sealed class QueryToken[T](val token: T) {
def solved: Boolean
def unsolved = !solved
def solve: QueryToken[T] = Solved(token)
def map[B](f : T => B):QueryToken[B] = if (solved) Solved(f(token)) else Unsolved(f(token))
}
case class Solved[T](t: T) extends QueryToken[T](t) {
val solved = true
}
case class Unsolved[T](t: T) extends QueryToken[T](t) {
val solved = false
}
|
dksaputra/community
|
cypher/src/main/scala/org/neo4j/cypher/internal/executionplan/builders/QueryToken.scala
|
Scala
|
gpl-3.0
| 1,252
|
/*
* Copyright (c) 2015-2017 EpiData, Inc.
*/
package models
import com.epidata.lib.models.{ SensorMeasurement => BaseSensorMeasurement, Measurement }
import play.api.Logger
import play.api.libs.json._
import _root_.util.Ordering
import service.{ Configs, KafkaService, DataService }
import scala.collection.convert.WrapAsScala
import java.util.Date
import scala.language.implicitConversions
object SensorMeasurement {
import com.epidata.lib.models.SensorMeasurement._
val name: String = "SensorMeasurement"
private def keyForMeasurementTopic(measurement: BaseSensorMeasurement): String = {
val key =
s"""
|${measurement.customer}${DataService.Delim}
|${measurement.customer_site}${DataService.Delim}
|${measurement.collection}${DataService.Delim}
|${measurement.dataset}${DataService.Delim}
|${measurement.epoch}
""".stripMargin
DataService.getMd5(key)
}
/**
* Insert a Double sensor measurement into the database.
* @param sensorMeasurement The SensorMeasurement to insert.
*/
def insert(sensorMeasurement: BaseSensorMeasurement) = MeasurementService.insert(sensorMeasurement)
def insert(sensorMeasurementList: List[BaseSensorMeasurement]) = MeasurementService.bulkInsert(sensorMeasurementList)
def insertRecordFromKafka(str: String) = {
BaseSensorMeasurement.jsonToSensorMeasurement(str) match {
case Some(sensorMeasurement) => insert(sensorMeasurement)
case _ => Logger.error("Bad json format!")
}
}
/**
* Insert a measurement into the kafka.
* @param sensorMeasurement The Measurement to insert.
*/
def insertToKafka(sensorMeasurement: BaseSensorMeasurement): Unit = {
val key = keyForMeasurementTopic(sensorMeasurement)
val value = BaseSensorMeasurement.toJson(sensorMeasurement)
KafkaService.sendMessage(Measurement.KafkaTopic, key, value)
}
def insertToKafka(sensorMeasurementList: List[BaseSensorMeasurement]): Unit = {
sensorMeasurementList.foreach(m => insertToKafka(m))
if (Configs.twoWaysIngestion) {
insert(sensorMeasurementList)
}
}
/**
* Find sensor measurements in the database matching the specified parameters.
* @param company
* @param site
* @param station
* @param sensor
* @param beginTime Beginning of query time interval, inclusive
* @param endTime End of query time interval, exclusive
* @param ordering Timestamp ordering of results, if specified.
*/
@Deprecated
def find(
company: String,
site: String,
station: String,
sensor: String,
beginTime: Date,
endTime: Date,
ordering: Ordering.Value,
tableName: String = com.epidata.lib.models.Measurement.DBTableName
): List[BaseSensorMeasurement] = MeasurementService.find(company, site, station, sensor, beginTime, endTime, ordering, tableName)
.map(measurementToSensorMeasurement)
/** Convert a list of SensorMeasurement to a json representation. */
def toJson(sensorMeasurements: List[BaseSensorMeasurement]): String = BaseSensorMeasurement.toJson(sensorMeasurements)
}
|
epidataio/epidata-community
|
play/app/models/SensorMeasurement.scala
|
Scala
|
apache-2.0
| 3,099
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity.test
import akka.http.scaladsl.model.ContentTypes
import common.StreamLogging
import spray.json._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.entity.Attachments.{Attached, Inline}
import org.apache.openwhisk.core.entity.ExecManifest.ImageName
import org.apache.openwhisk.core.entity.{
BlackBoxExec,
CodeExecAsAttachment,
CodeExecAsString,
Exec,
ExecManifest,
WhiskAction
}
import scala.collection.mutable
@RunWith(classOf[JUnitRunner])
class ExecTests extends FlatSpec with Matchers with StreamLogging with BeforeAndAfterAll {
behavior of "exec deserialization"
val config = new WhiskConfig(ExecManifest.requiredProperties)
ExecManifest.initialize(config)
override protected def afterAll(): Unit = {
ExecManifest.initialize(config)
super.afterAll()
}
it should "read existing code string as attachment" in {
val json = """{
| "name": "action_tests_name2",
| "_id": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH/action_tests_name2",
| "publish": false,
| "annotations": [],
| "version": "0.0.1",
| "updated": 1533623651650,
| "entityType": "action",
| "exec": {
| "kind": "nodejs:10",
| "code": "foo",
| "binary": false
| },
| "parameters": [
| {
| "key": "x",
| "value": "b"
| }
| ],
| "limits": {
| "timeout": 60000,
| "memory": 256,
| "logs": 10
| },
| "namespace": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH"
|}""".stripMargin.parseJson.asJsObject
val action = WhiskAction.serdes.read(json)
action.exec should matchPattern { case CodeExecAsAttachment(_, Inline("foo"), None, false) => }
}
it should "properly determine binary property" in {
val j1 = """{
| "kind": "nodejs:10",
| "code": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case CodeExecAsAttachment(_, Inline("SGVsbG8gT3BlbldoaXNr"), None, true) =>
}
val j2 = """{
| "kind": "nodejs:10",
| "code": "while (true)",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j2) should matchPattern {
case CodeExecAsAttachment(_, Inline("while (true)"), None, false) =>
}
//Defaults to binary
val j3 = """{
| "kind": "nodejs:10",
| "code": "while (true)"
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j3) should matchPattern {
case CodeExecAsAttachment(_, Inline("while (true)"), None, false) =>
}
}
it should "read code stored as attachment" in {
val json = """{
| "kind": "java:8",
| "code": {
| "attachmentName": "foo:bar",
| "attachmentType": "application/java-archive",
| "length": 32768,
| "digest": "sha256-foo"
| },
| "binary": true,
| "main": "hello"
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(json) should matchPattern {
case CodeExecAsAttachment(_, Attached("foo:bar", _, Some(32768), Some("sha256-foo")), Some("hello"), true) =>
}
}
it should "read code stored as jar property" in {
val j1 = """{
| "kind": "nodejs:10",
| "jar": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case CodeExecAsAttachment(_, Inline("SGVsbG8gT3BlbldoaXNr"), None, true) =>
}
}
it should "read existing code string as string with old manifest" in {
val oldManifestJson =
"""{
| "runtimes": {
| "nodejs": [
| {
| "kind": "nodejs:10",
| "default": true,
| "image": {
| "prefix": "openwhisk",
| "name": "nodejs6action",
| "tag": "latest"
| },
| "deprecated": false,
| "stemCells": [{
| "initialCount": 2,
| "memory": "256 MB"
| }]
| }
| ]
| }
|}""".stripMargin.parseJson.compactPrint
val oldConfig =
new TestConfig(Map(WhiskConfig.runtimesManifest -> oldManifestJson), ExecManifest.requiredProperties)
ExecManifest.initialize(oldConfig)
val j1 = """{
| "kind": "nodejs:10",
| "code": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case CodeExecAsString(_, "SGVsbG8gT3BlbldoaXNr", None) =>
}
//Reset config back
ExecManifest.initialize(config)
}
behavior of "blackbox exec deserialization"
it should "read existing code string as attachment" in {
val json = """{
| "name": "action_tests_name2",
| "_id": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH/action_tests_name2",
| "publish": false,
| "annotations": [],
| "version": "0.0.1",
| "updated": 1533623651650,
| "entityType": "action",
| "exec": {
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": "foo",
| "binary": false
| },
| "parameters": [
| {
| "key": "x",
| "value": "b"
| }
| ],
| "limits": {
| "timeout": 60000,
| "memory": 256,
| "logs": 10
| },
| "namespace": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH"
|}""".stripMargin.parseJson.asJsObject
val action = WhiskAction.serdes.read(json)
action.exec should matchPattern { case BlackBoxExec(_, Some(Inline("foo")), None, false, false) => }
}
it should "properly determine binary property" in {
val j1 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case BlackBoxExec(_, Some(Inline("SGVsbG8gT3BlbldoaXNr")), None, false, true) =>
}
val j2 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": "while (true)",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j2) should matchPattern {
case BlackBoxExec(_, Some(Inline("while (true)")), None, false, false) =>
}
//Empty code should resolve as None
val j3 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": " "
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j3) should matchPattern {
case BlackBoxExec(_, None, None, false, false) =>
}
val j4 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": {
| "attachmentName": "foo:bar",
| "attachmentType": "application/octet-stream",
| "length": 32768,
| "digest": "sha256-foo"
| },
| "binary": true,
| "main": "hello"
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j4) should matchPattern {
case BlackBoxExec(_, Some(Attached("foo:bar", _, Some(32768), Some("sha256-foo"))), Some("hello"), false, true) =>
}
}
behavior of "blackbox exec serialization"
it should "serialize with inline attachment" in {
val bb = BlackBoxExec(
ImageName.fromString("docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1").get,
Some(Inline("foo")),
None,
false,
false)
val js = Exec.serdes.write(bb)
val js2 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "binary": false,
| "code": "foo"
|}""".stripMargin.parseJson.asJsObject
js shouldBe js2
}
it should "serialize with attached attachment" in {
val bb = BlackBoxExec(
ImageName.fromString("docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1").get,
Some(Attached("foo", ContentTypes.`application/octet-stream`, Some(42), Some("sha1-42"))),
None,
false,
true)
val js = Exec.serdes.write(bb)
val js2 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "binary": true,
| "code": {
| "attachmentName": "foo",
| "attachmentType": "application/octet-stream",
| "length": 42,
| "digest": "sha1-42"
| }
|}""".stripMargin.parseJson.asJsObject
js shouldBe js2
}
private class TestConfig(val props: Map[String, String], requiredProperties: Map[String, String])
extends WhiskConfig(requiredProperties) {
override protected def getProperties() = mutable.Map(props.toSeq: _*)
}
}
|
jeremiaswerner/openwhisk
|
tests/src/test/scala/org/apache/openwhisk/core/entity/test/ExecTests.scala
|
Scala
|
apache-2.0
| 11,292
|
/**
*
*/
package plainer.items
/**
* @author eddie
*
*/
object SetCreator {
def getMutable(args: String*) = scala.collection.mutable.Set(args)
def getImmutable(args: String*) = scala.collection.immutable.Set(args)
}
|
zeroed/plainer.scala
|
src/plainer/items/SetCreator.scala
|
Scala
|
gpl-3.0
| 229
|
/*
* Copyright 2015 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package repository
import java.sql._
import java.text.SimpleDateFormat
import java.util.UUID
import javax.sql.DataSource
import scala.util.{ Failure, Success, Try }
import scala.concurrent._
object JdbcConnection {
type Seq[+A] = scala.collection.immutable.Seq[A]
val Seq = scala.collection.immutable.Seq
type List[+A] = scala.collection.immutable.List[A]
val List = scala.collection.immutable.List
type Vector[+A] = scala.collection.immutable.Vector[A]
val Vector = scala.collection.immutable.Vector
implicit class RowData(rs: ResultSet) {
def apply(columnNumber: Int): Any = rs.getObject(columnNumber)
def apply(columnName: String): Any = rs.getObject(columnName)
def toIterator[E](rowMapper: ResultSet ⇒ E): Iterator[E] = new Iterator[E] {
override def hasNext: Boolean = rs.next()
override def next(): E = rowMapper(rs)
}.toIterator
}
implicit class DateFormatter(date: java.sql.Date) {
def print: String = new SimpleDateFormat("yyyy-MM-dd").format(date)
}
implicit class TimeFormatter(time: java.sql.Time) {
def print: String = new SimpleDateFormat("HH:mm:ss.SSS").format(time)
}
implicit class TimestampFormatter(timestamp: java.sql.Timestamp) {
def print: String = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS").format(timestamp)
}
implicit class NullOption(v: Try[String]) {
def toOpt: Option[String] = v.toOption.find(_ != "null")
}
implicit class PimpedRowData(row: ResultSet) {
def str(name: String): String = Option(row(name).asInstanceOf[String]).map(_.trim).orNull
def str(index: Int): String = Option(row(index).asInstanceOf[String]).map(_.trim).orNull
def strOpt(name: String): Option[String] = Option(str(name))
def strOpt(index: Int): Option[String] = Option(str(index))
def int(name: String): Int = row(name).asInstanceOf[Int]
def int(index: Int): Int = row(index).asInstanceOf[Int]
def intOpt(name: String): Option[Int] = Try(int(name)).toOption
def intOpt(index: Int): Option[Int] = Try(int(index)).toOption
def long(name: String): Long = row(name).asInstanceOf[Long]
def long(index: Int): Long = row(index).asInstanceOf[Long]
def longOpt(name: String): Option[Long] = Try(long(name)).toOption
def longOpt(index: Int): Option[Long] = Try(long(index)).toOption
def float(name: String): Float = row(name).asInstanceOf[Float]
def float(index: Int): Float = row(index).asInstanceOf[Float]
def floatOpt(name: String): Option[Float] = Try(float(name)).toOption
def floatOpt(index: Int): Option[Float] = Try(float(index)).toOption
def double(name: String): Double = row(name).asInstanceOf[Double]
def double(index: Int): Double = row(index).asInstanceOf[Double]
def doubleOpt(index: Int): Option[Double] = Try(double(index)).toOption
def doubleOpt(name: String): Option[Double] = Try(double(name)).toOption
def bi(name: String): BigInt = row(name).asInstanceOf[BigInt]
def bi(index: Int): BigInt = row(index).asInstanceOf[BigInt]
def biOpt(name: String): Option[BigInt] = Try(bi(name)).toOption
def biOpt(index: Int): Option[BigInt] = Try(bi(index)).toOption
def bd(name: String): BigDecimal = BigDecimal(row(name).asInstanceOf[java.math.BigDecimal])
def bd(index: Int): BigDecimal = BigDecimal(row(index).asInstanceOf[java.math.BigDecimal])
def bdOpt(name: String): Option[BigDecimal] = Option(bd(name))
def bdOpt(index: Int): Option[BigDecimal] = Option(bd(index))
def date(name: String): Date = row(name).asInstanceOf[Date]
def date(index: Int): Date = row(index).asInstanceOf[Date]
def dateOpt(name: String): Option[Date] = Option(date(name))
def dateOpt(index: Int): Option[Date] = Option(date(index))
def dateStr(name: String): String = date(name).print
def dateStr(index: Int): String = date(index).print
def dateStrOpt(name: String): Option[String] = Option(date(name)).map(_.print)
def dateStrOpt(index: Int): Option[String] = Option(date(index)).map(_.print)
def dateTime(name: String): Timestamp = row(name).asInstanceOf[Timestamp]
def dateTime(index: Int): Timestamp = row(index).asInstanceOf[Timestamp]
def dateTimeStr(name: String): String = dateTime(name).print
def dateTimeStr(index: Int): String = dateTime(index).print
def dateTimeStrOpt(name: String): Option[String] = Option(dateTime(name)).map(_.print)
def dateTimeStrOpt(index: Int): Option[String] = Option(dateTime(index)).map(_.print)
def bool(name: String): Boolean = row(name).asInstanceOf[Boolean]
def bool(index: Int): Boolean = row(index).asInstanceOf[Boolean]
def boolOpt(name: String): Option[Boolean] = Try(bool(name)).toOption
def boolOpt(index: Int): Option[Boolean] = Try(bool(index)).toOption
def uuid(name: String): UUID = row(name).asInstanceOf[UUID]
def uuid(index: Int): UUID = row(index).asInstanceOf[UUID]
def uuidOpt(name: String): Option[UUID] = Try(uuid(name)).toOption
def uuidOpt(index: Int): Option[UUID] = Try(uuid(index)).toOption
def uuidStr(name: String): String = uuid(name).toString
def uuidStr(index: Int): String = uuid(index).toString
}
//
// SqlInterpolation
//
case class SqlAndArgs(sql: String, args: Seq[Any]) {
def +(that: SqlAndArgs): SqlAndArgs = {
SqlAndArgs(sql + " " + that.sql, args ++ that.args)
}
def stripMargin: SqlAndArgs = SqlAndArgs(sql.stripMargin, args)
}
// '$arg does string interpolation rather than argument
implicit class SqlInterpolationHelper(val sc: StringContext) {
def q(args: Any*): SqlAndArgs = {
var actualArgs: List[Any] = List()
val parts = sc.parts.iterator.toList
val inserts = args.iterator.toList
val pi = parts.zip(inserts)
val sql = pi.foldLeft("")((a: String, b: (String, Any)) ⇒ {
if (b._1.endsWith("'")) {
a + b._1.dropRight(1) + b._2
} else {
if (b._2.isInstanceOf[List[Any]]) {
val list = b._2.asInstanceOf[List[Any]]
actualArgs = list.reverse ++ actualArgs
a + b._1 + ("?," * list.length).dropRight(1)
} else {
actualArgs = b._2 :: actualArgs
a + b._1 + "?"
}
}
})
val extra = if (pi.length < parts.length) parts.reverse.head.toString else ""
SqlAndArgs(sql + extra, actualArgs.reverse)
}
}
}
trait JdbcConnection {
import JdbcConnection._
def dataSource: DataSource
def withConnection[R](f: Connection ⇒ R): Try[R] = blocking {
import resource._
managed[Connection](dataSource.getConnection)
.map { (conn: Connection) ⇒ f(conn) }
.either match {
case Left(cause) ⇒ Failure(cause.head)
case Right(connection) ⇒ Success(connection)
}
}
def withStatement[R](f: Statement ⇒ R): Try[R] =
withConnection[R] { (conn: Connection) ⇒ f(conn.createStatement) }
def withPreparedStatement[R](query: String)(f: PreparedStatement ⇒ R): Try[R] =
withConnection[R] { (conn: Connection) ⇒ f(conn.prepareStatement(query)) }
def withPreparedResultSet[R](query: String, values: Seq[Any])(implicit f: ResultSet ⇒ R): Try[R] =
withPreparedStatement[R](query) { (preparedStatement: PreparedStatement) ⇒
f(withValueInsertion(values, preparedStatement).executeQuery())
}
def withResultSet[R](query: String)(f: ResultSet ⇒ R): Try[R] =
withStatement[R] { (statement: Statement) ⇒ f(statement.executeQuery(query)) }
def queryForList[E](query: String)(implicit rowMapper: ResultSet ⇒ E): Try[Seq[E]] =
withResultSet(query)(_.toIterator(rowMapper).toList)
/**
* Returns a Vector of elements
* @param interpolation
* @tparam A
* @return
*/
def queryForList[A](interpolation: SqlAndArgs)(implicit rowMapper: ResultSet ⇒ A): Try[Seq[A]] =
queryForList(interpolation.sql, interpolation.args)
def queryForList[E](query: String, values: Seq[Any])(implicit rowMapper: ResultSet ⇒ E): Try[Seq[E]] =
withPreparedResultSet(query, values)(_.toIterator(rowMapper).toList)
def queryForObject[E](query: String)(implicit rowMapper: ResultSet ⇒ E): Try[E] =
withResultSet(query) { rs ⇒
rs.next()
rowMapper(rs)
}
/**
* Returns an Option of an element
* @param interpolation
* @param rowMapper
* @tparam A
* @return
*/
def queryForObject[A](interpolation: SqlAndArgs)(implicit rowMapper: ResultSet ⇒ A): Try[A] =
queryForObject(interpolation.sql, interpolation.args)
def queryForObject[E](query: String, values: Seq[Any])(implicit rowMapper: ResultSet ⇒ E): Try[E] =
withPreparedResultSet(query, values) { rs ⇒
rs.next()
rowMapper(rs)
}
def mapSingle[A](interpolation: SqlAndArgs)(implicit rowMapper: ResultSet ⇒ A): Try[Option[A]] =
queryForList(
if (interpolation.sql.contains("LIMIT 1"))
interpolation else interpolation.copy(sql = interpolation.sql + " LIMIT 1")
)(rowMapper).map(_.headOption)
def mapQuery[A](interpolation: SqlAndArgs)(implicit rowMapper: ResultSet ⇒ A): Try[Seq[A]] =
queryForList(interpolation)
def executeQueryId[A](interpolation: SqlAndArgs)(implicit rowMapper: ResultSet ⇒ A): Try[A] =
executeQuery(interpolation.sql, interpolation.args).map { rs ⇒
rs.next()
rowMapper(rs)
}
private def withValueInsertion(values: Seq[Any], preparedStatement: PreparedStatement): PreparedStatement = {
values.zipWithIndex.map(t ⇒ (t._1, t._2 + 1)).foreach {
case (int: Int, index) ⇒ preparedStatement.setInt(index, int)
case (long: Long, index) ⇒ preparedStatement.setLong(index, long)
case (double: Double, index) ⇒ preparedStatement.setDouble(index, double)
case (boolean: Boolean, index) ⇒ preparedStatement.setBoolean(index, boolean)
case (float: Float, index) ⇒ preparedStatement.setFloat(index, float)
case (byte: Byte, index) ⇒ preparedStatement.setByte(index, byte)
case (short: Short, index) ⇒ preparedStatement.setShort(index, short)
case (timestamp: Timestamp, index) ⇒ preparedStatement.setTimestamp(index, timestamp)
case (date: Date, index) ⇒ preparedStatement.setDate(index, date)
case (date: java.util.Date, index) ⇒ preparedStatement.setDate(index, new Date(date.getTime))
case (any: Any, index) ⇒ preparedStatement.setString(index, any.toString)
case (null, index) ⇒ preparedStatement.setNull(index, Types.NULL)
}
preparedStatement
}
/**
* Executes the SQL query in this PreparedStatement object and returns the ResultSet object generated by the query.
* This is used generally for reading the content of the database. The output will be in the form of ResultSet.
* Generally SELECT statement is used.
* @param interpolation
* @return
*/
def executeQuery(interpolation: SqlAndArgs): Try[ResultSet] =
executeQuery(interpolation.sql, interpolation.args)
/**
* Executes the SQL query in this PreparedStatement object and returns the ResultSet object generated by the query.
* This is used generally for reading the content of the database. The output will be in the form of ResultSet.
* Generally SELECT statement is used.
* @param query
* @return
*/
def executeQuery(query: String): Try[ResultSet] =
withStatement[ResultSet] { (statement: Statement) ⇒
statement.executeQuery(query)
}
/**
* Executes the SQL query in this PreparedStatement object and returns the ResultSet object generated by the query.
* This is used generally for reading the content of the database. The output will be in the form of ResultSet.
* Generally SELECT statement is used.
* @param query
* @param values
* @return
*/
def executeQuery(query: String, values: Seq[Any]): Try[ResultSet] =
withPreparedStatement[ResultSet](query) { preparedStatement ⇒
withValueInsertion(values, preparedStatement).executeQuery()
}
/**
* Executes the SQL statement in this PreparedStatement object, which must be an SQL INSERT, UPDATE or DELETE statement;
* or an SQL statement that returns nothing, such as a DDL statement. This is generally used for altering the databases.
* Generally DROP TABLE or DATABASE, INSERT into TABLE, UPDATE TABLE, DELETE from TABLE statements will be used in this.
* The output will be in the form of int. This int value denotes the number of rows affected by the query.
* @param query
* @return
*/
def executeUpdate(query: String): Try[Int] =
withStatement[Int] { (statement: Statement) ⇒
statement.executeUpdate(query)
}
/**
* Executes the SQL statement in this PreparedStatement object, which must be an SQL INSERT, UPDATE or DELETE statement;
* or an SQL statement that returns nothing, such as a DDL statement. This is generally used for altering the databases.
* Generally DROP TABLE or DATABASE, INSERT into TABLE, UPDATE TABLE, DELETE from TABLE statements will be used in this.
* The output will be in the form of int. This int value denotes the number of rows affected by the query.
* @param interpolation
* @return
*/
def executeUpdate(interpolation: SqlAndArgs): Try[Int] =
executeUpdate(interpolation.sql, interpolation.args)
/**
* Executes the SQL statement in this PreparedStatement object, which must be an SQL INSERT, UPDATE or DELETE statement;
* or an SQL statement that returns nothing, such as a DDL statement. This is generally used for altering the databases.
* Generally DROP TABLE or DATABASE, INSERT into TABLE, UPDATE TABLE, DELETE from TABLE statements will be used in this.
* The output will be in the form of int. This int value denotes the number of rows affected by the query.
* @param query
* @param values
* @return
*/
def executeUpdate(query: String, values: Seq[Any]): Try[Int] =
withPreparedStatement[Int](query) { (preparedStatement: PreparedStatement) ⇒
withValueInsertion(values, preparedStatement).executeUpdate()
}
/**
* Executes the SQL statement in this PreparedStatement object, which may be any kind of SQL statement,
* This will return a boolean. TRUE indicates the result is a ResultSet and FALSE indicates
* it has the int value which denotes number of rows affected by the query. It can be used for executing stored procedures.
* @param interpolation
* @return
*/
def execute(interpolation: SqlAndArgs): Try[Boolean] =
execute(interpolation.sql, interpolation.args)
/**
* Executes the SQL statement in this PreparedStatement object, which may be any kind of SQL statement,
* This will return a boolean. TRUE indicates the result is a ResultSet and FALSE indicates
* it has the int value which denotes number of rows affected by the query. It can be used for executing stored procedures.
* @param query
* @param values
* @return
*/
def execute(query: String, values: Seq[Any]): Try[Boolean] =
withPreparedStatement[Boolean](query) { preparedStatement ⇒
withValueInsertion(values, preparedStatement).execute()
}
/**
* Executes the SQL statement in this PreparedStatement object, which may be any kind of SQL statement,
* This will return a boolean. TRUE indicates the result is a ResultSet and FALSE indicates
* it has the int value which denotes number of rows affected by the query. It can be used for executing stored procedures.
* @param query
* @return
*/
def execute(query: String): Try[Boolean] =
withStatement[Boolean] { statement ⇒
statement.execute(query)
}
}
|
dnvriend/jboss-wildfly-test
|
helloworld/src/main/scala/repository/JdbcConnection.scala
|
Scala
|
apache-2.0
| 16,330
|
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package noop.interpreter
import java.io.{FileWriter, File}
/**
* @author alexeagle@google.com (Alex Eagle)
*/
trait TempFileFixture {
def withTempFile(filename: String, content: String)(testFunction: => Any) {
val tempFile = new File(filename);
try {
val writer = new FileWriter(tempFile);
writer.write(content);
writer.close();
testFunction;
} finally {
tempFile.delete;
}
}
}
|
masterx2/noop
|
interpreter/src/test/scala/noop/interpreter/TempFileFixture.scala
|
Scala
|
apache-2.0
| 1,027
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testinterface
import scala.scalajs.js
import scala.scalajs.concurrent.QueueExecutionContext
import scala.scalajs.js.annotation.JSName
import js.URIUtils.{decodeURIComponent, encodeURIComponent}
import scala.collection.mutable
import scala.concurrent.{Future, Promise}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Try
import sbt.testing._
protected[testinterface] object HTMLRunner extends js.JSApp {
private val classLoader = new ScalaJSClassLoader(js.Dynamic.global)
private object EventCounter {
private val isErrorStatus = Set(Status.Error, Status.Failure)
val counts = mutable.Map.empty[Status, Int].withDefaultValue(0)
class Handler extends EventHandler {
private[this] var _hasErrors = false
def handle(event: Event): Unit = {
val status = event.status
_hasErrors ||= isErrorStatus(status)
counts(status) += 1
}
def hasErrors: Boolean = _hasErrors
}
}
def main(): Unit = {
/* Note: Test filtering is currently done based on the fully qualified name
* of a test. While this is reasonable in most cases, there could be a test
* that is run by multiple test frameworks.
*/
val (testFilter, optExcludedHash): (TaskDef => Boolean, Option[Int]) = {
val search = dom.document.location.search.stripPrefix("?")
search.split("&").map(decodeURIComponent).toList match {
case "i" :: excludedHash :: included =>
val includeSet = included.toSet
(t => includeSet.contains(t.fullyQualifiedName),
Some(excludedHash.toInt))
case "e" :: excluded =>
val excludeSet = excluded.toSet
(t => !excludeSet.contains(t.fullyQualifiedName), None)
case _ =>
// Invalid parameter. Run everything.
(_ => true, None)
}
}
val allTests = TestDetector.detectTests()
val totalTestCount = allTests.map(_._2.size).sum
val excludedTests = allTests.flatMap(_._2.filterNot(testFilter))
val ui = new UI(excludedTests, totalTestCount)
// Warn if test set changed.
def excludedHash = excludedTests.map(_.fullyQualifiedName).toSet.##
if (optExcludedHash.exists(_ != excludedHash)) {
ui.warnTestSetChanged()
}
val oks = for {
(framework, taskDefs) <- allTests
} yield {
runTests(framework, taskDefs.filter(testFilter), ui)
}
// Report event counts.
Future.sequence(oks).map(and).onComplete(ui.done)
}
private def runTests(framework: Framework,
taskDefs: Seq[TaskDef], ui: UI): Future[Boolean] = {
def runAllTasks(tasks: Seq[Task]): Future[Boolean] = {
val oks = tasks.map { task =>
for {
(ok, newTasks) <- scheduleTask(task, ui)
newOk <- runAllTasks(newTasks)
} yield ok && newOk
}
Future.sequence(oks).map(and)
}
val runner = framework.runner(Array(), Array(), classLoader)
val tasks = runner.tasks(taskDefs.toArray)
for (ok <- runAllTasks(tasks)) yield {
val resultStr = runner.done()
if (resultStr.nonEmpty)
ui.reportFrameworkResult(ok, framework.name, resultStr)
ok
}
}
private def scheduleTask(task: Task, ui: UI): Future[(Boolean, Array[Task])] = {
val uiBox = ui.newTestTask(task.taskDef.fullyQualifiedName)
val handler = new EventCounter.Handler
// Schedule test via timeout so we yield to the UI event thread.
val newTasks = Promise[Array[Task]]
val invocation = Future(task.execute(handler, Array(uiBox.logger),
newTasks.success))(QueueExecutionContext.timeouts())
val result = for {
_ <- invocation
tasks <- newTasks.future
} yield {
(!handler.hasErrors, tasks)
}
result.map(_._1).onComplete(uiBox.done)
result.recover {
case _ => (false, Array[Task]())
}
}
private class UI(excludedTaskDefs: Seq[TaskDef], totalTestCount: Int) {
// State.
private var _done = false
private val runningTests = mutable.Buffer.empty[RunningTest]
private val excludedTests = mutable.Buffer.empty[Test]
// UI Elements.
// Top level container. Mainly to prevent code under test from accidentally
// modifying our UI.
private val container = dom.document.body.newElement()
private val rootBox = new RootBox(excludedTaskDefs.size, totalTestCount)
private[this] var nextFailureLocation: MoveTarget =
if (excludedTaskDefs.nonEmpty) new ExcludedTestBox()
else rootBox
updateCounts()
trait TestTask {
def logger: Logger
def done(ok: Try[Boolean]): Unit
}
def newTestTask(testName: String): TestTask = {
val task = new RunningTest(testName)
runningTests += task
task
}
def done(ok: Try[Boolean]): Unit = {
_done = true
ok.failed.foreach { t =>
rootBox.log("Test framework crashed during execution:", "error")
rootBox.log(t.toString, "error")
}
rootBox.done(ok.getOrElse(false))
updateCounts()
}
def warnTestSetChanged(): Unit = {
val line = rootBox.log("", "warn")
// Note: The following is not entirely true. The warning will also appear
// if tests have been removed.
line.newTextNode("There are new excluded tests in your project. You " +
"may wish to ")
line.newLink("?", "Run all")
line.newTextNode(" to rediscover all available tests.")
}
def reportFrameworkResult(ok: Boolean,
framework: String, result: String): Unit = {
rootBox.log(s"$framework reported $result", statusClass(ok))
}
private def updateCounts(): Unit = {
import EventCounter.counts
// Construct report string.
val countStr = {
val total = counts.values.sum
val countStrs = {
s"Total: $total" +:
Status.values.map(status => s"$status: ${counts(status)}")
}
countStrs.mkString(", ")
}
if (_done) {
rootBox.counterLineText = countStr
} else {
rootBox.counterLineText = "Running... " + countStr
}
}
private trait Test {
def testName: String
def selected: Boolean
def selected_=(v: Boolean): Unit
def failed: Boolean
}
private trait MoveTarget {
def setNextSibling(that: TestBox): Unit
}
private class RunningTest(val testName: String) extends Test with TestTask {
private val box = new TestBox(testName)
box.checkbox.onclick = rootBox.updateCheckbox
private var _ok = false
def done(ok: Try[Boolean]): Unit = {
ok.failed.foreach { t =>
logger.error("Test framework crashed during test:")
logger.trace(t)
}
_ok = ok.getOrElse(false)
updateCounts()
box.done(_ok)
if (!_ok) {
box.expand()
nextFailureLocation.setNextSibling(box)
nextFailureLocation = box
}
}
def selected: Boolean = box.checkbox.checked
def selected_=(v: Boolean): Unit = box.checkbox.checked = v
def failed: Boolean = !_ok
val logger: Logger = new Logger {
val ansiCodesSupported = false
def error(msg: String): Unit = {
box.log(msg, "error")
box.expand()
}
def warn(msg: String): Unit = box.log(msg, "warn")
def info(msg: String): Unit = box.log(msg, "info")
def debug(msg: String): Unit = box.log(msg, "debug")
def trace(t: Throwable): Unit = error(t.toString)
}
}
private class TestBox(caption: String) extends MoveTarget {
private val box = container.newElement(clss = "test-box")
private val header = box.newElement(clss = "test-box-header")
private val expandLink = header.newLink(href = "#", text = "[+]")
expandLink.onclick = { () => toggleExpand(); false }
private val headerCaption = header.newTextNode(" " + caption)
val checkbox = header.newCheckbox(checked = true)
private val body = box.newElement(clss = "test-box-body")
private[this] var expanded = false
def done(ok: Boolean): Unit = {
header.className += " " + statusClass(ok)
headerCaption.textContent += (if (ok) " - Passed" else " - Failed")
}
def expand(): Unit = {
if (!expanded)
toggleExpand()
}
def log(msg: String, clss: String): dom.Element =
body.newElement(clss = s"log $clss", text = msg, tpe = "pre")
def setNextSibling(that: TestBox): Unit = {
this.box.insertAdjacentElement("afterend", that.box)
}
private def toggleExpand(): Unit = {
expanded = !expanded
expandLink.textContent = if (expanded) "[-]" else "[+]"
body.style.display = if (expanded) "block" else "none"
}
}
private class RootBox(excludedTestCount: Int,
totalTestCount: Int) extends MoveTarget {
private val box = {
val caption = {
if (excludedTestCount == 0) {
s"Total Test Suites: $totalTestCount"
} else {
val selectedCount = totalTestCount - excludedTestCount
s"Selected Test Suites $selectedCount (Total: $totalTestCount)"
}
}
new TestBox(caption)
}
box.expand()
box.checkbox.onclick = testUpdater(runningTests, box.checkbox)
private val counterLine = box.log("", "info")
def counterLineText: String = counterLine.textContent
def counterLineText_=(v: String): Unit = counterLine.textContent = v
def done(ok: Boolean): Unit = {
box.done(ok)
counterLine.className = "log " + statusClass(ok)
val rerunLine = box.log("Next: ", statusClass(ok))
if (!ok) {
rerunLine.newLink(runLink(_.failed), "Run failed")
rerunLine.newTextNode(" | ")
}
rerunLine.newLink("#", "Run selected").onclick = { () =>
dom.document.location.search = runLink(_.selected)
false
}
rerunLine.newTextNode(" | ")
rerunLine.newLink("?", "Run all")
}
val updateCheckbox: js.Function0[Boolean] =
checkboxUpdater(runningTests, box.checkbox)
def log(msg: String, clss: String): dom.Element = box.log(msg, clss)
def setNextSibling(that: TestBox): Unit = box.setNextSibling(that)
private def runLink(condition: Test => Boolean): String = {
val (included, excluded) =
(runningTests ++ excludedTests).partition(condition)
val params = {
if (included.size < excluded.size) {
// Create an include list.
val excludedHash = excluded.map(_.testName).toSet.##.toString
Seq("i", excludedHash) ++ included.map(_.testName)
} else {
// Create an exclude list.
"e" +: excluded.map(_.testName)
}
}
params.map(encodeURIComponent).mkString("?", "&", "")
}
}
private class ExcludedTestBox extends MoveTarget {
private val box = {
val count = excludedTaskDefs.size
new TestBox(s"Excluded Test Suites ($count)")
}
private val updateCheckbox: js.Function0[Boolean] =
checkboxUpdater(excludedTests, box.checkbox)
box.checkbox.checked = false
box.checkbox.onclick = testUpdater(excludedTests, box.checkbox)
for (taskDef <- excludedTaskDefs) {
excludedTests += new ExcludedTest(taskDef.fullyQualifiedName)
}
def setNextSibling(that: TestBox): Unit = box.setNextSibling(that)
private class ExcludedTest(val testName: String) extends Test {
private val logLine = box.log("", "info")
private val checkbox = logLine.newCheckbox(checked = false)
checkbox.onclick = updateCheckbox
logLine.newTextNode(" " + testName)
def selected: Boolean = checkbox.checked
def selected_=(v: Boolean): Unit = checkbox.checked = v
def failed: Boolean = false
}
}
private def statusClass(ok: Boolean): String =
if (ok) "success" else "error"
private def checkboxUpdater(tests: Seq[Test],
checkbox: dom.Checkbox): js.Function0[Boolean] = { () =>
val all = tests.forall(_.selected)
val indet = !all && tests.exists(_.selected)
checkbox.indeterminate = indet
if (!indet)
checkbox.checked = all
true
}
private def testUpdater(tests: Seq[Test],
checkbox: dom.Checkbox): js.Function0[Boolean] = { () =>
tests.foreach(_.selected = checkbox.checked)
true
}
}
// Mini dom facade.
private object dom { // scalastyle:ignore
@JSName("document")
@js.native
object document extends js.Object { // scalastyle:ignore
def body: Element = js.native
def createElement(tag: String): Element = js.native
def createTextNode(tag: String): Node = js.native
val location: Location = js.native
}
@js.native
trait Node extends js.Object {
var textContent: String = js.native
}
@js.native
trait Element extends Node {
def appendChild(child: Node): Unit = js.native
def setAttribute(name: String, value: String): Unit = js.native
var className: String = js.native
val style: Style = js.native
var onclick: js.Function0[Boolean] = js.native
def insertAdjacentElement(location: String, element: Element): Unit = js.native
}
@js.native
trait Checkbox extends Element {
var checked: Boolean = js.native
var indeterminate: Boolean = js.native
}
@js.native
trait Style extends js.Object {
var display: String = js.native
}
@js.native
trait Location extends js.Object {
var search: String = js.native
}
implicit class RichElement(val element: Element) extends AnyVal {
def newElement(clss: String = "", text: String = "",
tpe: String = "div"): dom.Element = {
val el = document.createElement(tpe)
if (clss.nonEmpty)
el.className = clss
if (text.nonEmpty)
el.textContent = text
element.appendChild(el)
el
}
def newLink(href: String, text: String): dom.Element = {
val el = newElement(tpe = "a", text = text)
el.setAttribute("href", href)
el
}
def newCheckbox(checked: Boolean): dom.Checkbox = {
val el = newElement(tpe = "input").asInstanceOf[dom.Checkbox]
el.setAttribute("type", "checkbox")
el.checked = checked
el
}
def newTextNode(text: String): Node = {
val n = document.createTextNode(text)
element.appendChild(n)
n
}
}
}
private def and(xs: Seq[Boolean]): Boolean = xs.fold(true)(_ && _)
}
|
lrytz/scala-js
|
test-interface/src/main/scala/org/scalajs/testinterface/HTMLRunner.scala
|
Scala
|
bsd-3-clause
| 15,368
|
/* Copyright 2015 White Label Personal Clouds Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.welcomer.framework.pico.dsl
import scala.concurrent.ExecutionContext
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import akka.actor.ActorRef
import me.welcomer.framework.extensions.ReflectionExtension
import me.welcomer.framework.pico.PicoRuleset
import me.welcomer.framework.pico.PicoRulesetContainer
trait PicoRulesetContainerDSL extends AnyRef { self: PicoRulesetContainer =>
def loadRulesets(rulesets: Set[String])(implicit ec: ExecutionContext): Map[String, Try[ActorRef]] = {
rulesets.size match {
case size if (size > 0) => {
log.info(s"Loading rulesets ($size)..")
val rulesetRefs = rulesets.foldLeft(Map[String, Try[ActorRef]]()) { (rulesetRefs, ruleset) => rulesetRefs + (ruleset -> loadRuleset(ruleset)) }
log.info("Loading rulesets complete.")
rulesetRefs
}
case _ => {
log.info(s"No rulesets to load.")
Map[String, Try[ActorRef]]()
}
}
}
def loadRuleset(name: String): Try[ActorRef] = {
log.info("Loading ruleset {}..", name)
Try(
useRulesetCalled(name) match {
case Success(ruleset) => {
context.actorOf(
PicoRuleset.props(ruleset, _picoServices),
ruleset.getCanonicalName())
}
case Failure(e) => {
e match {
case _: ClassNotFoundException => log.error("ClassNotFoundException while attempting to load ruleset {}", name)
case _ => log.error(e, "Error while attempting to load ruleset {}", name)
}
throw e
}
})
}
// TODO: Refactor the ruleset scope into settings somehow? (put pico settings in picoServices?)
def useRulesetCalled(className: String): Try[Class[_ <: PicoRuleset]] = {
val fqcn = "me.welcomer.rulesets." + className
ReflectionExtension(context.system).classFor[PicoRuleset](fqcn)
}
}
|
welcomer/framework
|
src/main/scala/me/welcomer/framework/pico/dsl/PicoRulesetContainerDSL.scala
|
Scala
|
apache-2.0
| 2,556
|
package skuber.apps
import org.specs2.mutable.Specification
import skuber.{Container, LabelSelector, ObjectMeta, Pod, ReplicationController, Scale}
import play.api.libs.json.Json
/**
* @author David O'Riordan
*/
class ScaleSpec extends Specification {
"This is a unit specification for the skuber Scale class. ".txt
"A Scale object can be constructed from a name and replica count" >> {
val scale=Scale.named("example").withSpecReplicas(10)
scale.spec.replicas mustEqual Some(10)
scale.name mustEqual "example"
scale.status mustEqual None
}
"A scale object can be written to Json and then read back again successfully" >> {
val scale= Scale(
apiVersion="autoscaling/v1",
metadata=ObjectMeta(name="example", namespace="na"),
spec=Scale.Spec(replicas=Some(10))
)
val readScale = Json.fromJson[Scale](Json.toJson(scale)).get
readScale mustEqual scale
}
"A scale object can be read directly from a JSON string" >> {
val scaleJsonStr = """
{
"kind": "Scale",
"apiVersion": "extensions/v1beta1",
"metadata": {
"name": "redis-master",
"namespace": "default",
"selfLink": "/apis/extensions/v1beta1/namespaces/default/replicationcontrollers/redis-master/scale",
"creationTimestamp": "2015-12-29T11:55:14Z"
},
"spec": {
"replicas": 1
},
"status": {
"replicas": 1,
"targetSelector": "redis-master"
}
}
"""
val scale = Json.parse(scaleJsonStr).as[Scale]
scale.kind mustEqual "Scale"
scale.name mustEqual "redis-master"
scale.spec.replicas mustEqual Some(1)
scale.status mustEqual Some(Scale.Status(replicas=1, selector=None, targetSelector=Some("redis-master")))
}
"A scale object can contain NO replicas" >> {
val scaleJsonObj =
"""
|{
| "kind": "Scale",
| "apiVersion": "extensions/v1beta1",
| "metadata": {
| "name": "redis-master",
| "namespace": "default",
| "selfLink": "/apis/extensions/v1beta1/namespaces/default/replicationcontrollers/redis-master/scale",
| "creationTimestamp": "2015-12-29T11:55:14Z"
| },
| "spec": {
| },
| "status": {
| "replicas": 1,
| "targetSelector": "redis-master"
| }
|}
""".stripMargin
val scale = Json.parse(scaleJsonObj).as[Scale]
scale.kind mustEqual "Scale"
scale.name mustEqual "redis-master"
scale.spec.replicas mustEqual None
}
}
|
doriordan/skuber
|
client/src/test/scala/skuber/ext/ScaleSpec.scala
|
Scala
|
apache-2.0
| 2,518
|
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.memstore2
import org.apache.hadoop.hive.serde2.`lazy`.ByteArrayRef
import org.apache.hadoop.hive.serde2.objectinspector.primitive._
import org.apache.hadoop.io._
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector
import org.scalatest.FunSuite
import shark.memstore2.column._
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector
import shark.memstore2.column.Implicits._
import java.nio.ByteOrder
class ColumnIteratorSuite extends FunSuite {
val PARALLEL_MODE = true
test("void column") {
val builder = new VoidColumnBuilder
builder.initialize(5, "void")
builder.append(null, null)
builder.append(null, null)
builder.append(null, null)
val buf = builder.build()
val iter = ColumnIterator.newIterator(buf)
iter.next()
assert(iter.current == null)
iter.next()
assert(iter.current == null)
iter.next()
assert(iter.current == null)
}
test("boolean column") {
var builder = new BooleanColumnBuilder
testColumn(
Array[java.lang.Boolean](true, false, true, true, true),
builder,
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector,
PrimitiveObjectInspectorFactory.writableBooleanObjectInspector,
classOf[BooleanColumnIterator])
assert(builder.stats.min === false)
assert(builder.stats.max === true)
builder = new BooleanColumnBuilder
testColumn(
Array[java.lang.Boolean](null, false, null, true, true),
builder,
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector,
PrimitiveObjectInspectorFactory.writableBooleanObjectInspector,
classOf[BooleanColumnIterator],
true)
assert(builder.stats.min === false)
assert(builder.stats.max === true)
builder = new BooleanColumnBuilder
builder.compressionSchemes = Seq(new RLE())
val a = Array.ofDim[java.lang.Boolean](100)
Range(0,100).foreach { i =>
a(i) = if (i < 10) true else if (i <80) false else null
}
testColumn(
a,
builder,
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector,
PrimitiveObjectInspectorFactory.writableBooleanObjectInspector,
classOf[BooleanColumnIterator],
true)
}
test("byte column") {
var builder = new ByteColumnBuilder
testColumn(
Array[java.lang.Byte](1.toByte, 2.toByte, 15.toByte, 55.toByte, 0.toByte, 40.toByte),
builder,
PrimitiveObjectInspectorFactory.javaByteObjectInspector,
PrimitiveObjectInspectorFactory.writableByteObjectInspector,
classOf[ByteColumnIterator])
assert(builder.stats.min === 0.toByte)
assert(builder.stats.max === 55.toByte)
builder = new ByteColumnBuilder
testColumn(
Array[java.lang.Byte](null, 2.toByte, 15.toByte, null, 0.toByte, null),
builder,
PrimitiveObjectInspectorFactory.javaByteObjectInspector,
PrimitiveObjectInspectorFactory.writableByteObjectInspector,
classOf[ByteColumnIterator],
true)
assert(builder.stats.min === 0.toByte)
assert(builder.stats.max === 15.toByte)
builder = new ByteColumnBuilder
builder.compressionSchemes = Seq(new RLE())
testColumn(
Array[java.lang.Byte](null, 2.toByte, 2.toByte, null, 4.toByte, 4.toByte,4.toByte,5.toByte),
builder,
PrimitiveObjectInspectorFactory.javaByteObjectInspector,
PrimitiveObjectInspectorFactory.writableByteObjectInspector,
classOf[ByteColumnIterator],
true)
}
test("short column") {
var builder = new ShortColumnBuilder
testColumn(
Array[java.lang.Short](1.toShort, 2.toShort, -15.toShort, 355.toShort, 0.toShort, 40.toShort),
builder,
PrimitiveObjectInspectorFactory.javaShortObjectInspector,
PrimitiveObjectInspectorFactory.writableShortObjectInspector,
classOf[ShortColumnIterator])
assert(builder.stats.min === -15.toShort)
assert(builder.stats.max === 355.toShort)
builder = new ShortColumnBuilder
testColumn(
Array[java.lang.Short](1.toShort, 2.toShort, -15.toShort, null, 0.toShort, null),
builder,
PrimitiveObjectInspectorFactory.javaShortObjectInspector,
PrimitiveObjectInspectorFactory.writableShortObjectInspector,
classOf[ShortColumnIterator],
true)
assert(builder.stats.min === -15.toShort)
assert(builder.stats.max === 2.toShort)
testColumn(
Array[java.lang.Short](1.toShort, 2.toShort, 2.toShort, null, 1.toShort, 1.toShort),
builder,
PrimitiveObjectInspectorFactory.javaShortObjectInspector,
PrimitiveObjectInspectorFactory.writableShortObjectInspector,
classOf[ShortColumnIterator],
true)
}
test("int column") {
var builder = new IntColumnBuilder
testColumn(
Array[java.lang.Integer](0, 1, 2, 5, 134, -12, 1, 0, 99, 1),
builder,
PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.writableIntObjectInspector,
classOf[IntColumnIterator])
assert(builder.stats.min === -12)
assert(builder.stats.max === 134)
builder = new IntColumnBuilder
testColumn(
Array[java.lang.Integer](null, 1, 2, 5, 134, -12, null, 0, 99, 1),
builder,
PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.writableIntObjectInspector,
classOf[IntColumnIterator],
true)
assert(builder.stats.min === -12)
assert(builder.stats.max === 134)
builder = new IntColumnBuilder
builder.compressionSchemes = Seq(new RLE())
val a = Array.ofDim[java.lang.Integer](100)
Range(0,100).foreach { i =>
a(i) = if (i < 10) 10 else if (i <80) 11 else null
}
testColumn(
a,
builder,
PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.writableIntObjectInspector,
classOf[IntColumnIterator],
true)
}
test("long column") {
var builder = new LongColumnBuilder
testColumn(
Array[java.lang.Long](1L, -345345L, 15L, 0L, 23445456L),
builder,
PrimitiveObjectInspectorFactory.javaLongObjectInspector,
PrimitiveObjectInspectorFactory.writableLongObjectInspector,
classOf[LongColumnIterator])
assert(builder.stats.min === -345345L)
assert(builder.stats.max === 23445456L)
builder = new LongColumnBuilder
testColumn(
Array[java.lang.Long](null, -345345L, 15L, 0L, null),
builder,
PrimitiveObjectInspectorFactory.javaLongObjectInspector,
PrimitiveObjectInspectorFactory.writableLongObjectInspector,
classOf[LongColumnIterator],
true)
assert(builder.stats.min === -345345L)
assert(builder.stats.max === 15L)
builder = new LongColumnBuilder
builder.compressionSchemes = Seq(new RLE())
val a = Array.ofDim[java.lang.Long](100)
Range(0,100).foreach { i =>
a(i) = if (i < 10) 10 else if (i <80) 11 else null
}
testColumn(
a,
builder,
PrimitiveObjectInspectorFactory.javaLongObjectInspector,
PrimitiveObjectInspectorFactory.writableLongObjectInspector,
classOf[LongColumnIterator],
true)
}
test("float column") {
var builder = new FloatColumnBuilder
testColumn(
Array[java.lang.Float](1.1.toFloat, -2.5.toFloat, 20000.toFloat, 0.toFloat, 15.0.toFloat),
builder,
PrimitiveObjectInspectorFactory.javaFloatObjectInspector,
PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
classOf[FloatColumnIterator])
assert(builder.stats.min === -2.5.toFloat)
assert(builder.stats.max === 20000.toFloat)
builder = new FloatColumnBuilder
testColumn(
Array[java.lang.Float](1.1.toFloat, null, 20000.toFloat, null, 15.0.toFloat),
builder,
PrimitiveObjectInspectorFactory.javaFloatObjectInspector,
PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
classOf[FloatColumnIterator],
true)
assert(builder.stats.min === 1.1.toFloat)
assert(builder.stats.max === 20000.toFloat)
}
test("double column") {
var builder = new DoubleColumnBuilder
testColumn(
Array[java.lang.Double](1.1, 2.2, -2.5, 20000, 0, 15.0),
builder,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
classOf[DoubleColumnIterator])
assert(builder.stats.min === -2.5)
assert(builder.stats.max === 20000)
builder = new DoubleColumnBuilder
testColumn(
Array[java.lang.Double](1.1, 2.2, -2.5, null, 0, 15.0),
builder,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
classOf[DoubleColumnIterator],
true)
assert(builder.stats.min === -2.5)
assert(builder.stats.max === 15.0)
}
test("string column") {
var builder = new StringColumnBuilder
testColumn(
Array[Text](new Text("a"), new Text(""), new Text("b"), new Text("Abcdz")),
builder,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
classOf[StringColumnIterator],
false,
(a, b) => (a.equals(b.toString))
)
assert(builder.stats.min.toString === "")
assert(builder.stats.max.toString === "b")
builder = new StringColumnBuilder
testColumn(
Array[Text](new Text("a"), new Text(""), null, new Text("b"), new Text("Abcdz"), null),
builder,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
classOf[StringColumnIterator],
false,
(a, b) => if (a == null) b == null else (a.toString.equals(b.toString))
)
assert(builder.stats.min.toString === "")
assert(builder.stats.max.toString === "b")
builder = new StringColumnBuilder
builder.compressionSchemes = Seq(new RLE())
testColumn(
Array[Text](new Text("a"), new Text("a"), null, new Text("b"), new Text("b"), new Text("Abcdz")),
builder,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
classOf[StringColumnIterator],
false,
(a, b) => if (a == null) b == null else (a.toString.equals(b.toString))
)
}
test("timestamp column") {
val ts1 = new java.sql.Timestamp(0)
val ts2 = new java.sql.Timestamp(500)
ts2.setNanos(400)
val ts3 = new java.sql.Timestamp(1362561610000L)
var builder = new TimestampColumnBuilder
testColumn(
Array(ts1, ts2, ts3),
builder,
PrimitiveObjectInspectorFactory.javaTimestampObjectInspector,
PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,
classOf[TimestampColumnIterator],
false,
(a, b) => (a.equals(b))
)
assert(builder.stats.min.equals(ts1))
assert(builder.stats.max.equals(ts3))
builder = new TimestampColumnBuilder
testColumn(
Array(ts1, ts2, null, ts3, null),
builder,
PrimitiveObjectInspectorFactory.javaTimestampObjectInspector,
PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,
classOf[TimestampColumnIterator],
true,
(a, b) => (a.equals(b))
)
assert(builder.stats.min.equals(ts1))
assert(builder.stats.max.equals(ts3))
}
test("Binary Column") {
val b1 = new BytesWritable()
b1.set(Array[Byte](0,1,2), 0, 3)
val builder = new BinaryColumnBuilder
testColumn(
Array[BytesWritable](b1),
builder,
PrimitiveObjectInspectorFactory.writableBinaryObjectInspector,
PrimitiveObjectInspectorFactory.writableBinaryObjectInspector,
classOf[BinaryColumnIterator],
false,
compareBinary)
assert(builder.stats.isInstanceOf[ColumnStats.NoOpStats[_]])
def compareBinary(x: Object, y: Object): Boolean = {
val xdata = x.asInstanceOf[Array[Byte]]
val ywritable = y.asInstanceOf[BytesWritable]
val ydata = ywritable.getBytes()
val length = ywritable.getLength()
if (length != xdata.length) {
false
} else {
val ydatapruned = new Array[Byte](length)
System.arraycopy(ydata, 0, ydatapruned, 0, length)
java.util.Arrays.equals(xdata, ydatapruned)
}
}
}
def testColumn[T, U <: ColumnIterator](
testData: Array[_ <: Object],
builder: ColumnBuilder[T],
oi: ObjectInspector,
writableOi: AbstractPrimitiveWritableObjectInspector,
iteratorClass: Class[U],
expectEWAHWrapper: Boolean = false,
compareFunc: (Object, Object) => Boolean = (a, b) => a == b) {
builder.initialize(testData.size, "")
testData.foreach { x => builder.append(x, oi)}
val buf = builder.build()
def executeOneTest() {
val iter = ColumnIterator.newIterator(buf)
(0 until testData.size).foreach { i =>
iter.next()
val expected = testData(i)
val reality = writableOi.getPrimitiveJavaObject(iter.current)
//println ("at position " + i + " expected " + expected + ", but saw " + reality)
assert((expected == null && reality == null) || compareFunc(reality, expected),
"at position " + i + " expected " + expected + ", but saw " + reality)
}
}
if (PARALLEL_MODE) {
// parallelize to test concurrency
(1 to 10).par.foreach { parallelIndex => executeOneTest() }
} else {
executeOneTest()
}
}
}
|
stefanvanwouw/puppet-shark
|
files/shark-0.9.0/src/test/scala/shark/memstore2/ColumnIteratorSuite.scala
|
Scala
|
mit
| 14,179
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.joinquery
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
/**
* Test Class for join query on multiple datatypes
*/
class AllDataTypesTestCaseJoin extends QueryTest with BeforeAndAfterAll {
// scalastyle:off lineLength
override def beforeAll {
sql("CREATE TABLE alldatatypestableJoin (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED AS carbondata TBLPROPERTIES('TABLE_BLOCKSIZE'='4')")
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE alldatatypestableJoin OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')""");
sql("CREATE TABLE alldatatypestableJoin_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
sql(s"LOAD DATA local inpath '$resourcesPath/datawithoutheader.csv' INTO TABLE alldatatypestableJoin_hive");
}
test("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestableJoin where empname in ('arvind','ayushi') group by empno,empname,utilization") {
checkAnswer(
sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestableJoin where empname in ('arvind','ayushi') group by empno,empname,utilization"),
sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestableJoin_hive where empname in ('arvind','ayushi') group by empno,empname,utilization"))
}
test("select e.empid from employee e inner join manager m on e.mgrid=m.empid") {
sql("drop table if exists employee")
sql("create table employee(name string, empid string, mgrid string, mobileno bigint) STORED AS carbondata")
sql(s"load data inpath '$resourcesPath/join/emp.csv' into table employee options('fileheader'='name,empid,mgrid,mobileno')")
sql("drop table if exists manager")
sql("create table manager(name string, empid string, mgrid string, mobileno bigint) STORED AS carbondata")
sql(s"load data inpath '$resourcesPath/join/mgr.csv' into table manager options('fileheader'='name,empid,mgrid,mobileno')")
checkAnswer(
sql("select e.empid from employee e inner join manager m on e.mgrid=m.empid"),
Seq(Row("t23717"))
)
}
test("Union with alias fails") {
sql("DROP TABLE IF EXISTS carbon_table1")
sql("DROP TABLE IF EXISTS carbon_table2")
sql("CREATE TABLE carbon_table1(shortField smallint,intField int,bigintField bigint,doubleField double,stringField string,timestampField timestamp,decimalField decimal(18,2),dateField date,charField char(5),floatField float) STORED AS carbondata ")
sql("CREATE TABLE carbon_table2(shortField smallint,intField int,bigintField bigint,doubleField double,stringField string,timestampField timestamp,decimalField decimal(18,2),dateField date,charField char(5),floatField float) STORED AS carbondata ")
val path1 = s"$resourcesPath/join/data1.csv"
val path2 = s"$resourcesPath/join/data2.csv"
sql(
s"""
LOAD DATA LOCAL INPATH '$path1'
INTO TABLE carbon_table1
options('FILEHEADER'='shortField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData','COMPLEX_DELIMITER_LEVEL_1'='#')
""".stripMargin)
sql(
s"""
LOAD DATA LOCAL INPATH '$path2'
INTO TABLE carbon_table2
options('FILEHEADER'='shortField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData','COMPLEX_DELIMITER_LEVEL_1'='#')
""".stripMargin)
checkAnswer(sql("""SELECT t.a a FROM (select charField a from carbon_table1 t1 union all select charField a from carbon_table2 t2) t order by a """),
Seq(Row("aaa"), Row("bbb"), Row("ccc"), Row("ddd"))
)
// Drop table
sql("DROP TABLE IF EXISTS carbon_table1")
sql("DROP TABLE IF EXISTS carbon_table2")
}
test("join with aggregate plan") {
checkAnswer(sql("SELECT c1.empno,c1.empname, c2.empno FROM (SELECT empno,empname FROM alldatatypestableJoin GROUP BY empno,empname) c1 FULL JOIN " +
"(SELECT empno FROM alldatatypestableJoin GROUP BY empno) c2 ON c1.empno = c2.empno"),
sql("SELECT c1.empno,c1.empname, c2.empno FROM (SELECT empno,empname FROM alldatatypestableJoin_hive GROUP BY empno,empname) c1 FULL JOIN " +
"(SELECT empno FROM alldatatypestableJoin_hive GROUP BY empno) c2 ON c1.empno = c2.empno"))
}
override def afterAll {
sql("drop table alldatatypestableJoin")
sql("drop table alldatatypestableJoin_hive")
sql("drop table if exists manager")
sql("drop table if exists employee")
}
// scalastyle:on lineLength
}
|
zzcclp/carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala
|
Scala
|
apache-2.0
| 5,949
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.util.Locale
import org.apache.hadoop.fs.Path
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
import org.apache.spark.sql.{AnalysisException, DataFrame}
import org.apache.spark.sql.execution.DataSourceScanExec
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
import org.apache.spark.util.Utils
class FileStreamSinkSuite extends StreamTest {
import testImplicits._
override def beforeAll(): Unit = {
super.beforeAll()
spark.sessionState.conf.setConf(SQLConf.ORC_IMPLEMENTATION, "native")
}
override def afterAll(): Unit = {
try {
spark.sessionState.conf.unsetConf(SQLConf.ORC_IMPLEMENTATION)
} finally {
super.afterAll()
}
}
test("unpartitioned writing and batch reading") {
val inputData = MemoryStream[Int]
val df = inputData.toDF()
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
val checkpointDir = Utils.createTempDir(namePrefix = "stream.checkpoint").getCanonicalPath
var query: StreamingQuery = null
try {
query =
df.writeStream
.option("checkpointLocation", checkpointDir)
.format("parquet")
.start(outputDir)
inputData.addData(1, 2, 3)
failAfter(streamingTimeout) {
query.processAllAvailable()
}
val outputDf = spark.read.parquet(outputDir).as[Int]
checkDatasetUnorderly(outputDf, 1, 2, 3)
} finally {
if (query != null) {
query.stop()
}
}
}
test("SPARK-21167: encode and decode path correctly") {
val inputData = MemoryStream[String]
val ds = inputData.toDS()
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
val checkpointDir = Utils.createTempDir(namePrefix = "stream.checkpoint").getCanonicalPath
val query = ds.map(s => (s, s.length))
.toDF("value", "len")
.writeStream
.partitionBy("value")
.option("checkpointLocation", checkpointDir)
.format("parquet")
.start(outputDir)
try {
// The output is partitioned by "value", so the value will appear in the file path.
// This is to test if we handle spaces in the path correctly.
inputData.addData("hello world")
failAfter(streamingTimeout) {
query.processAllAvailable()
}
val outputDf = spark.read.parquet(outputDir)
checkDatasetUnorderly(outputDf.as[(Int, String)], ("hello world".length, "hello world"))
} finally {
query.stop()
}
}
test("partitioned writing and batch reading") {
val inputData = MemoryStream[Int]
val ds = inputData.toDS()
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
val checkpointDir = Utils.createTempDir(namePrefix = "stream.checkpoint").getCanonicalPath
var query: StreamingQuery = null
try {
query =
ds.map(i => (i, i * 1000))
.toDF("id", "value")
.writeStream
.partitionBy("id")
.option("checkpointLocation", checkpointDir)
.format("parquet")
.start(outputDir)
inputData.addData(1, 2, 3)
failAfter(streamingTimeout) {
query.processAllAvailable()
}
val outputDf = spark.read.parquet(outputDir)
val expectedSchema = new StructType()
.add(StructField("value", IntegerType, nullable = false))
.add(StructField("id", IntegerType))
assert(outputDf.schema === expectedSchema)
// Verify that MetadataLogFileIndex is being used and the correct partitioning schema has
// been inferred
val hadoopdFsRelations = outputDf.queryExecution.analyzed.collect {
case LogicalRelation(baseRelation: HadoopFsRelation, _, _, _) => baseRelation
}
assert(hadoopdFsRelations.size === 1)
assert(hadoopdFsRelations.head.location.isInstanceOf[MetadataLogFileIndex])
assert(hadoopdFsRelations.head.partitionSchema.exists(_.name == "id"))
assert(hadoopdFsRelations.head.dataSchema.exists(_.name == "value"))
// Verify the data is correctly read
checkDatasetUnorderly(
outputDf.as[(Int, Int)],
(1000, 1), (2000, 2), (3000, 3))
/** Check some condition on the partitions of the FileScanRDD generated by a DF */
def checkFileScanPartitions(df: DataFrame)(func: Seq[FilePartition] => Unit): Unit = {
val getFileScanRDD = df.queryExecution.executedPlan.collect {
case scan: DataSourceScanExec if scan.inputRDDs().head.isInstanceOf[FileScanRDD] =>
scan.inputRDDs().head.asInstanceOf[FileScanRDD]
}.headOption.getOrElse {
fail(s"No FileScan in query\\n${df.queryExecution}")
}
func(getFileScanRDD.filePartitions)
}
// Read without pruning
checkFileScanPartitions(outputDf) { partitions =>
// There should be as many distinct partition values as there are distinct ids
assert(partitions.flatMap(_.files.map(_.partitionValues)).distinct.size === 3)
}
// Read with pruning, should read only files in partition dir id=1
checkFileScanPartitions(outputDf.filter("id = 1")) { partitions =>
val filesToBeRead = partitions.flatMap(_.files)
assert(filesToBeRead.map(_.filePath).forall(_.contains("/id=1/")))
assert(filesToBeRead.map(_.partitionValues).distinct.size === 1)
}
// Read with pruning, should read only files in partition dir id=1 and id=2
checkFileScanPartitions(outputDf.filter("id in (1,2)")) { partitions =>
val filesToBeRead = partitions.flatMap(_.files)
assert(!filesToBeRead.map(_.filePath).exists(_.contains("/id=3/")))
assert(filesToBeRead.map(_.partitionValues).distinct.size === 2)
}
} finally {
if (query != null) {
query.stop()
}
}
}
test("partitioned writing and batch reading with 'basePath'") {
withTempDir { outputDir =>
withTempDir { checkpointDir =>
val outputPath = outputDir.getAbsolutePath
val inputData = MemoryStream[Int]
val ds = inputData.toDS()
var query: StreamingQuery = null
try {
query =
ds.map(i => (i, -i, i * 1000))
.toDF("id1", "id2", "value")
.writeStream
.partitionBy("id1", "id2")
.option("checkpointLocation", checkpointDir.getAbsolutePath)
.format("parquet")
.start(outputPath)
inputData.addData(1, 2, 3)
failAfter(streamingTimeout) {
query.processAllAvailable()
}
val readIn = spark.read.option("basePath", outputPath).parquet(s"$outputDir/*/*")
checkDatasetUnorderly(
readIn.as[(Int, Int, Int)],
(1000, 1, -1), (2000, 2, -2), (3000, 3, -3))
} finally {
if (query != null) {
query.stop()
}
}
}
}
}
// This tests whether FileStreamSink works with aggregations. Specifically, it tests
// whether the correct streaming QueryExecution (i.e. IncrementalExecution) is used to
// to execute the trigger for writing data to file sink. See SPARK-18440 for more details.
test("writing with aggregation") {
// Since FileStreamSink currently only supports append mode, we will test FileStreamSink
// with aggregations using event time windows and watermark, which allows
// aggregation + append mode.
val inputData = MemoryStream[Long]
val inputDF = inputData.toDF.toDF("time")
val outputDf = inputDF
.selectExpr("CAST(time AS timestamp) AS timestamp")
.withWatermark("timestamp", "10 seconds")
.groupBy(window($"timestamp", "5 seconds"))
.count()
.select("window.start", "window.end", "count")
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
val checkpointDir = Utils.createTempDir(namePrefix = "stream.checkpoint").getCanonicalPath
var query: StreamingQuery = null
try {
query =
outputDf.writeStream
.option("checkpointLocation", checkpointDir)
.format("parquet")
.start(outputDir)
def addTimestamp(timestampInSecs: Int*): Unit = {
inputData.addData(timestampInSecs.map(_ * 1L): _*)
failAfter(streamingTimeout) {
query.processAllAvailable()
}
}
def check(expectedResult: ((Long, Long), Long)*): Unit = {
val outputDf = spark.read.parquet(outputDir)
.selectExpr(
"CAST(start as BIGINT) AS start",
"CAST(end as BIGINT) AS end",
"count")
checkDataset(
outputDf.as[(Long, Long, Long)],
expectedResult.map(x => (x._1._1, x._1._2, x._2)): _*)
}
addTimestamp(100) // watermark = None before this, watermark = 100 - 10 = 90 after this
check() // nothing emitted yet
addTimestamp(104, 123) // watermark = 90 before this, watermark = 123 - 10 = 113 after this
check((100L, 105L) -> 2L) // no-data-batch emits results on 100-105,
addTimestamp(140) // wm = 113 before this, emit results on 100-105, wm = 130 after this
check((100L, 105L) -> 2L, (120L, 125L) -> 1L) // no-data-batch emits results on 120-125
} finally {
if (query != null) {
query.stop()
}
}
}
test("Update and Complete output mode not supported") {
val df = MemoryStream[Int].toDF().groupBy().count()
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
withTempDir { dir =>
def testOutputMode(mode: String): Unit = {
val e = intercept[AnalysisException] {
df.writeStream.format("parquet").outputMode(mode).start(dir.getCanonicalPath)
}
Seq(mode, "not support").foreach { w =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(w))
}
}
testOutputMode("update")
testOutputMode("complete")
}
}
test("parquet") {
testFormat(None) // should not throw error as default format parquet when not specified
testFormat(Some("parquet"))
}
test("orc") {
testFormat(Some("orc"))
}
test("text") {
testFormat(Some("text"))
}
test("json") {
testFormat(Some("json"))
}
def testFormat(format: Option[String]): Unit = {
val inputData = MemoryStream[Int]
val ds = inputData.toDS()
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
val checkpointDir = Utils.createTempDir(namePrefix = "stream.checkpoint").getCanonicalPath
var query: StreamingQuery = null
try {
val writer = ds.map(i => (i, i * 1000)).toDF("id", "value").writeStream
if (format.nonEmpty) {
writer.format(format.get)
}
query = writer.option("checkpointLocation", checkpointDir).start(outputDir)
} finally {
if (query != null) {
query.stop()
}
}
}
test("FileStreamSink.ancestorIsMetadataDirectory()") {
val hadoopConf = spark.sessionState.newHadoopConf()
def assertAncestorIsMetadataDirectory(path: String): Unit =
assert(FileStreamSink.ancestorIsMetadataDirectory(new Path(path), hadoopConf))
def assertAncestorIsNotMetadataDirectory(path: String): Unit =
assert(!FileStreamSink.ancestorIsMetadataDirectory(new Path(path), hadoopConf))
assertAncestorIsMetadataDirectory(s"/${FileStreamSink.metadataDir}")
assertAncestorIsMetadataDirectory(s"/${FileStreamSink.metadataDir}/")
assertAncestorIsMetadataDirectory(s"/a/${FileStreamSink.metadataDir}")
assertAncestorIsMetadataDirectory(s"/a/${FileStreamSink.metadataDir}/")
assertAncestorIsMetadataDirectory(s"/a/b/${FileStreamSink.metadataDir}/c")
assertAncestorIsMetadataDirectory(s"/a/b/${FileStreamSink.metadataDir}/c/")
assertAncestorIsNotMetadataDirectory(s"/a/b/c")
assertAncestorIsNotMetadataDirectory(s"/a/b/c/${FileStreamSink.metadataDir}extra")
}
test("SPARK-20460 Check name duplication in schema") {
Seq((true, ("a", "a")), (false, ("aA", "Aa"))).foreach { case (caseSensitive, (c0, c1)) =>
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
val inputData = MemoryStream[(Int, Int)]
val df = inputData.toDF()
val outputDir = Utils.createTempDir(namePrefix = "stream.output").getCanonicalPath
val checkpointDir = Utils.createTempDir(namePrefix = "stream.checkpoint").getCanonicalPath
var query: StreamingQuery = null
try {
query =
df.writeStream
.option("checkpointLocation", checkpointDir)
.format("json")
.start(outputDir)
inputData.addData((1, 1))
failAfter(streamingTimeout) {
query.processAllAvailable()
}
} finally {
if (query != null) {
query.stop()
}
}
val errorMsg = intercept[AnalysisException] {
spark.read.schema(s"$c0 INT, $c1 INT").json(outputDir).as[(Int, Int)]
}.getMessage
assert(errorMsg.contains("Found duplicate column(s) in the data schema: "))
}
}
}
test("SPARK-23288 writing and checking output metrics") {
Seq("parquet", "orc", "text", "json").foreach { format =>
val inputData = MemoryStream[String]
val df = inputData.toDF()
withTempDir { outputDir =>
withTempDir { checkpointDir =>
var query: StreamingQuery = null
var numTasks = 0
var recordsWritten: Long = 0L
var bytesWritten: Long = 0L
try {
spark.sparkContext.addSparkListener(new SparkListener() {
override def onTaskEnd(taskEnd: SparkListenerTaskEnd) {
val outputMetrics = taskEnd.taskMetrics.outputMetrics
recordsWritten += outputMetrics.recordsWritten
bytesWritten += outputMetrics.bytesWritten
numTasks += 1
}
})
query =
df.writeStream
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.format(format)
.start(outputDir.getCanonicalPath)
inputData.addData("1", "2", "3")
inputData.addData("4", "5")
failAfter(streamingTimeout) {
query.processAllAvailable()
}
spark.sparkContext.listenerBus.waitUntilEmpty(streamingTimeout.toMillis)
assert(numTasks > 0)
assert(recordsWritten === 5)
// This is heavily file type/version specific but should be filled
assert(bytesWritten > 0)
} finally {
if (query != null) {
query.stop()
}
}
}
}
}
}
}
|
bravo-zhang/spark
|
sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala
|
Scala
|
apache-2.0
| 15,884
|
package net.combinatory.rtm
/* (c) rtm-scala contributors, 2012. All rights reserved. */
import Responses._
import cc.spray.json._
object Invoke {
/*
/** Right represents the data from node we are interested in.
* Left is failure with the fail message"
* A sample error response:
* <rsp stat="fail">
* <err msg="Invalid frob - did you authenticate?" code="101"></err>
* </rsp>
*/
def extractNode(resp: String, nodeName: String): Either[String, String] = {
val xml = scala.xml.XML.loadString(resp)
val stat = xml \\\\ "rsp" \\\\ "@stat" toString;
if (stat == "ok") Right((xml \\\\ "rsp" \\\\ nodeName).text)
//TODO add the err code
else Left(stat +" - "+ (xml \\\\ "rsp" \\\\ "err" \\\\ "@msg" toString))
}
def getTasks(token: String) = {
val allParams = ("auth_token", token) :: Nil
Http runMethod (Methods.taskGetList, allParams)
}
*/
}
|
comb/rtm-scala
|
src/main/scala/net/combinatory/rtm/Invoke.scala
|
Scala
|
apache-2.0
| 908
|
package org.edla.scalafxml.demo
import scalafx.beans.property.{ BooleanProperty, IntegerProperty }
import scalafx.collections.ObservableBuffer
import scalafx.scene.control.SingleSelectionModel
// scalastyle:off magic.number
class AudioConfigModel {
/** The minimum audio volume in decibels */
val minDecibels = 0.0
/** The maximum audio volume in decibels */
val maxDecibels = 160.0
/** The selected audio volume in decibels */
val selectedDBs = IntegerProperty(0)
/** Indicates whether audio is muted */
val muting = BooleanProperty(false)
/**
* List of some musical genres
*/
val genres = ObservableBuffer(
"Chamber",
"Country",
"Cowbell",
"Metal",
"Polka",
"Rock"
)
/** A reference to the selection model used by the Slider */
var genreSelectionModel: SingleSelectionModel[String] = _
/**
* Adds a change listener to the selection model of the ChoiceBox, and contains
* code that executes when the selection in the ChoiceBox changes.
*/
def addListenerToGenreSelectionModel(): Unit = {
this.genreSelectionModel.selectedIndex.onChange({
selectedDBs.value = this.genreSelectionModel.selectedIndex() match {
case 0 ⇒ 80
case 1 ⇒ 100
case 2 ⇒ 150
case 3 ⇒ 140
case 4 ⇒ 120
case 5 ⇒ 130
}
})
()
}
}
|
newca12/scalafxml-demo
|
src/main/scala/org/edla/scalafxml/demo/AudioConfigModel.scala
|
Scala
|
gpl-3.0
| 1,415
|
package com.sg.pnx.graphics.glbuffers
import com.sg.pnx.reality.EntityBuilder
import com.sg.pnx.graphics.util.GLUtil
/**
* Created by bodie on 7/28/14.
*/
object VertexBufferBuilder {
def buildVertexBuffer( vertices: Array[Float] = Array[Float]( ),
parentId: Int = -1,
entityId: Int = EntityBuilder.getId( ) ): VertexBuffer = {
val( vaoId, vaoIndex, vboId ) = Buffers.addVerts( vertices )
val newVB = new VertexBuffer(
vboId = vboId,
vaoId = vaoId,
vaoIndex = vaoIndex,
vertexCount = vertices.length,
eId = entityId )
Buffers.registerVertexBuffer( newVB.eId, newVB )
}
}
|
synapse-garden/phoenix
|
src/main/scala/com/sg/pnx/graphics/glbuffers/VertexBufferBuilder.scala
|
Scala
|
mit
| 702
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.validation
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import org.apache.flink.table.api.{TableException, Types}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.stream.table.TestAppendSink
import org.apache.flink.table.utils.MemoryTableSinkUtil.UnsafeMemoryAppendTableSink
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class TableSinksValidationTest extends TableTestBase {
@Test(expected = classOf[TableException])
def testAppendSinkOnUpdatingTable(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Int, Long, String)]("MyTable", 'id, 'num, 'text)
t.groupBy('text)
.select('text, 'id.count, 'num.sum)
// must fail because table is not append-only
.writeToSink(new TestAppendSink)
}
@Test(expected = classOf[TableException])
def testSinkTableRegistrationUsingExistedTableName(): Unit = {
val util = streamTestUtil()
util.addTable[(Int, String)]("TargetTable", 'id, 'text)
val fieldNames = Array("a", "b", "c")
val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.INT, Types.LONG)
// table name already registered
util.tableEnv
.registerTableSink("TargetTable", fieldNames, fieldTypes, new UnsafeMemoryAppendTableSink)
}
@Test(expected = classOf[TableException])
def testRegistrationWithInconsistentFieldNamesAndTypesLength(): Unit = {
val util = streamTestUtil()
// inconsistent length of field names and types
val fieldNames = Array("a", "b", "c")
val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.LONG)
util.tableEnv
.registerTableSink("TargetTable", fieldNames, fieldTypes, new UnsafeMemoryAppendTableSink)
}
}
|
PangZhi/flink
|
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/validation/TableSinksValidationTest.scala
|
Scala
|
apache-2.0
| 2,624
|
package bank5
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.dispatch.Future
case object Start
case object RegisterSender
// Use bank.prop in the code or Bank() or Bank(-1)
// See http://doc.akka.io/docs/akka/snapshot/scala/actors.html#Recommended_Practices
object Bank {
def props(): Props = Props(new Bank())
def apply(): Props = Props(new Bank())
}
class Bank() extends Actor {
import context.dispatcher
var dest: ActorRef = _
def receive = {
case Start => {
dest = sender
var account1 = context.actorOf(Account("Freddy", 250), "Account_Freddy") // Create child actors that will host the accounts
var account2 = context.actorOf(Account("Charlie", 250), "Account_Charlie")
var account3 = context.actorOf(Account("Johnny", 0), "Account_Johnny")
account1 ! Transfer(account3, 250) // Freddy makes a transaction to Johnny for an amount of 250.
account2 ! Transfer(account3, 250) // Charlie makes a transaction to Johnny for an amount of 250.
account3 ! Balance // Return the amount of account Johnny, should be 500
}
case amount: Int => {
println(Console.YELLOW + Console.BOLD+"BANK: registered an amount of %d".format(amount) + Console.RESET)
dest ! amount
}
case _ => println(Console.YELLOW + Console.BOLD+"BANK: 'FATAL ERROR'"+Console.RESET)
}
}
|
Tjoene/thesis
|
benchmark/src/main/scala/bank5/Bank.scala
|
Scala
|
gpl-2.0
| 1,455
|
package utils
import javax.inject.Singleton
import com.google.inject.Inject
import controllers.WebJarAssets
import play.api.http.HttpErrorHandler
import play.api.i18n.{I18nSupport, MessagesApi}
import play.api.mvc.Results._
import play.api.mvc._
import scala.concurrent._
@Singleton
class ErrorHandler @Inject()(val messagesApi: MessagesApi)(implicit assets: WebJarAssets) extends HttpErrorHandler with I18nSupport {
def onClientError(request: RequestHeader, statusCode: Int, message: String) = {
Future.successful(
if(statusCode == play.api.http.Status.NOT_FOUND) {
Ok(views.html.errors.notFound(request))
} else {
Status(statusCode)("A client error occurred: " + message)
}
)
}
def onServerError(request: RequestHeader, exception: Throwable) = {
Future.successful(
InternalServerError("A server error occurred: " + exception.getMessage)
)
}
}
|
asciiu/halo
|
arbiter/app/utils/ErrorHandler.scala
|
Scala
|
mit
| 914
|
/**
* sbt-osgi-manager - OSGi development bridge based on Bnd and Tycho.
*
* Copyright (c) 2016 Alexey Aksenov ezh@ezh.msk.ru
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sbt.osgi.manager.support
import java.net.{ URL, URLClassLoader }
import org.digimead.sbt.util.SLF4JBridge
import sbt.osgi.manager.Plugin
import scala.language.implicitConversions
class PluginClassLoader(internal: Array[URL], external: Array[URL], parent: ClassLoader, reloadPrefix: Seq[String])
extends URLClassLoader(internal, parent) with SLF4JBridge.Loader {
protected val externalURLClassLoader = new URLClassLoader(external)
override def getResource(name: String): URL =
Option(findResource(name)).getOrElse(parent.getResource(name))
@throws(classOf[ClassNotFoundException])
override def loadClass(name: String): Class[_] =
loadClass(name, true)
@throws(classOf[ClassNotFoundException])
override def loadClass(name: String, resolve: Boolean): Class[_] = {
val result = name match {
case this.SLF4JBinderTargetName ⇒
// transform with ASM
val bytecode = loadSLF4JBinder(name)
defineClass(name, bytecode, 0, bytecode.length)
case name if reloadPrefix.find(name.startsWith).nonEmpty ⇒
Option(findLoadedClass(name)) getOrElse {
try findClass(name) catch {
case e: ClassNotFoundException ⇒
// reload with this class loader
loadClass(parent.loadClass(name))
}
}
case name if PluginClassLoader.passToParent.find(name.startsWith).nonEmpty ⇒
// pass to parent
parent.loadClass(name)
case name ⇒
// try to load or pass to parent if we found a name in the externalURLClassLoader
Option(findLoadedClass(name)) getOrElse {
try {
val clazz = findClass(name)
// a bit of debug shit
// if (name.startsWith("org.osgi"))
// println(clazz.getProtectionDomain.getCodeSource.getLocation)
clazz
} catch {
case e: ClassNotFoundException ⇒
// Filter class loading with ClassNotFoundException
externalURLClassLoader.loadClass(name)
parent.loadClass(name)
case e: Throwable ⇒
if (Plugin.debug.nonEmpty)
println("PluginClassLoader exception: " + e)
parent.loadClass(name)
}
}
}
if (resolve) resolveClass(result)
result
}
def loadClass(clazz: Class[_]): Class[_] = {
val is = clazz.getResourceAsStream('/' + clazz.getName().replace('.', '/') + ".class")
val bytes = Stream.continually(is.read).takeWhile(_ != -1).map(_.toByte).toArray
defineClass(clazz.getName, bytes, 0, bytes.length)
}
}
object PluginClassLoader {
implicit def PluginClassLoader2implementation(p: PluginClassLoader.type): PluginClassLoader = p.inner
/** PluginClassLoader implementation. */
lazy val inner = {
getClass.getClassLoader() match {
case loader: URLClassLoader ⇒
val pluginJar = getClass.getProtectionDomain.getCodeSource.getLocation
val internalURLs = pluginJar +: loader.getURLs().filter { url ⇒ PluginClassLoader.internalLibraries.find { url.toString().contains }.nonEmpty }
val externalURLs = loader.getURLs().filter { url ⇒ PluginClassLoader.externalLibraries.find { url.toString().contains }.nonEmpty }
new PluginClassLoader(internalURLs, externalURLs, loader, Seq("sbt.osgi.manager.tycho", "sbt.osgi.manager.bnd"))
case classLoader ⇒
throw new IllegalStateException("Unable to create PluginClassLoader with unexpected parent class loader " + classLoader.getClass)
}
}
/** List of URL parts of libraries that PluginClassLoader must load itself. */
val internalLibraries = Seq(
"org.apache.maven.wagon/wagon-provider-api",
"org.apache.maven/maven-aether-provider",
"org.apache.maven/maven-artifact",
"org.apache.maven/maven-builder-support",
"org.apache.maven/maven-compat",
"org.apache.maven/maven-core",
"org.apache.maven/maven-embedder",
"org.apache.maven/maven-model",
"org.apache.maven/maven-model-builder",
"org.apache.maven/maven-plugin-api",
"org.apache.maven/maven-repository-metadata",
"org.apache.maven/maven-settings",
"org.apache.maven/maven-settings-builder",
"org.codehaus.plexus/plexus-archiver",
"org.codehaus.plexus/plexus-classworlds",
"org.codehaus.plexus/plexus-component-annotations",
"org.codehaus.plexus/plexus-interpolation",
"org.codehaus.plexus/plexus-io",
"org.codehaus.plexus/plexus-utils",
"org.digimead/sbt-osgi-manager",
"org.eclipse.aether/aether-api",
"org.eclipse.aether/aether-connector-basic",
"org.eclipse.aether/aether-impl",
"org.eclipse.aether/aether-spi",
"org.eclipse.aether/aether-transport-file",
"org.eclipse.aether/aether-transport-http",
"org.eclipse.aether/aether-transport-wagon",
"org.eclipse.aether/aether-util",
"org.eclipse.sisu/org.eclipse.sisu.inject",
"org.eclipse.sisu/org.eclipse.sisu.plexus",
"org.eclipse.tycho/org.eclipse.osgi",
"org.eclipse.tycho/org.eclipse.osgi.compatibility.state",
"org.eclipse.tycho/org.eclipse.tycho.core.shared",
"org.eclipse.tycho/org.eclipse.tycho.embedder.shared",
"org.eclipse.tycho/org.eclipse.tycho.p2.resolver.shared",
"org.eclipse.tycho/org.eclipse.tycho.p2.tools.shared",
"org.eclipse.tycho/sisu-equinox-api",
"org.eclipse.tycho/sisu-equinox-embedder",
"org.eclipse.tycho/tycho-core",
"org.eclipse.tycho/tycho-embedder-api",
"org.eclipse.tycho/tycho-metadata-model",
"org.eclipse.tycho/tycho-p2-facade",
"org.slf4j/jcl-over-slf4j",
"org.slf4j/slf4j-api",
"org.sonatype.aether/aether-api",
"org.sonatype.aether/aether-impl",
"org.sonatype.aether/aether-spi",
"org.sonatype.aether/aether-util",
"org.sonatype.plexus/plexus-cipher",
"org.sonatype.plexus/plexus-sec-dispatcher")
/** List of URL parts of libraries that PluginClassLoader must delegate to parent. */
val externalLibraries = Seq(
"aopalliance/aopalliance",
"biz.aQute.bnd/bndlib",
"com.google.guava/guava",
"com.google.inject/guice",
"com.jcraft/jsch",
"com.thoughtworks.paranamer/paranamer",
"commons-cli/commons-cli",
"commons-codec/commons-codec",
"commons-io/commons-io",
"de.pdark/decentxml",
"javax.annotation/jsr250-api",
"javax.enterprise/cdi-api",
"javax.inject/javax.inject",
"jline/jline",
"org.apache.commons/commons-compress",
"org.apache.commons/commons-lang3",
"org.apache.httpcomponents/httpclient",
"org.apache.httpcomponents/httpcore",
"org.digimead/digi-sbt-util",
"org.fusesource.jansi/jansi",
"org.json4s/json4s-ast",
"org.json4s/json4s-core",
//"org.osgi/org.osgi.annotation",
//"org.osgi/org.osgi.core",
//"org.osgi/org.osgi.enterprise",
"org.ow2.asm/asm",
"org.ow2.asm/asm-commons",
"org.ow2.asm/asm-tree",
"org.scala-lang.modules/scala-pickling",
"org.scala-sbt.ivy/ivy",
"org.scala-sbt/actions",
"org.scala-sbt/api",
"org.scala-sbt/apply-macro",
"org.scala-sbt/cache",
"org.scala-sbt/classfile",
"org.scala-sbt/classpath",
"org.scala-sbt/collections",
"org.scala-sbt/command",
"org.scala-sbt/compile",
"org.scala-sbt/compiler-integration",
"org.scala-sbt/compiler-interface",
"org.scala-sbt/compiler-ivy-integration",
"org.scala-sbt/completion",
"org.scala-sbt/control",
"org.scala-sbt/cross",
"org.scala-sbt/incremental-compiler",
"org.scala-sbt/interface",
"org.scala-sbt/io",
"org.scala-sbt/ivy",
"org.scala-sbt/launcher-interface",
"org.scala-sbt/logging",
"org.scala-sbt/logic",
"org.scala-sbt/main-settings",
"org.scala-sbt/main",
"org.scala-sbt/persist",
"org.scala-sbt/process",
"org.scala-sbt/relation",
"org.scala-sbt/run",
"org.scala-sbt/sbt",
"org.scala-sbt/serialization",
"org.scala-sbt/task-system",
"org.scala-sbt/tasks",
"org.scala-sbt/test-agent",
"org.scala-sbt/test-interface",
"org.scala-sbt/testing",
"org.scala-sbt/tracking",
"org.scala-tools.sbinary/sbinary",
"org.scalamacros/quasiquotes",
"org.spire-math/jawn-parser",
"org.spire-math/json4s-support")
/** List of class names than passes to parent class loader. */
val passToParent = Seq(
"java.",
"org.apache.maven.model.Dependency",
"org.digimead.sbt.util.StaticLoggerBinder",
"org.slf4j.ILoggerFactory",
"org.slf4j.spi.LoggerFactoryBinder",
"org.eclipse.tycho.core.ee.shared.ExecutionEnvironmentConfiguration",
"sbt.",
"scala.")
}
|
digimead/sbt-osgi-manager
|
src/main/scala/sbt/osgi/manager/support/PluginClassLoader.scala
|
Scala
|
apache-2.0
| 9,292
|
package com.twitter.util
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class CloseAwaitablyTest extends FunSuite {
def make() = new Closable with CloseAwaitably {
val p = new Promise[Unit]
var n = 0
def close(deadline: Time) = closeAwaitably {
n += 1
p
}
}
test("close") {
val c = make()
assert(c.n == 0)
val f = c.close(Time.now)
assert(f != c.p)
assert(c.n == 1)
assert(c.close(Time.now) == f)
assert(c.n == 1)
assert(f.poll == None)
c.p.setDone()
assert(f.poll == Some(Return.Unit))
}
test("Await.ready") {
val c = make()
val t = new Thread {
start()
override def run() {
Await.ready(c)
}
}
c.close(Time.now)
assert(t.isAlive)
c.p.setDone()
t.join(10000)
assert(!t.isAlive)
}
}
|
luciferous/util
|
util-core/src/test/scala/com/twitter/util/AwaitableTest.scala
|
Scala
|
apache-2.0
| 910
|
package extruder.instances
import cats.Eq
import cats.instances.all._
import cats.laws.discipline.InvariantTests
import cats.syntax.applicative._
import extruder.core.{Decoder, Settings}
import extruder.data.Validation
import extruder.map._
import org.scalacheck.Arbitrary
import org.scalatest.FunSuite
import org.typelevel.discipline.scalatest.Discipline
class DecoderInstancesSuite extends FunSuite with Discipline {
import DecoderInstancesSuite._
checkAll("DecoderT", InvariantTests[Decoder[Validation, Settings, ?, Map[String, String]]].invariant[Int, Int, Int])
}
object DecoderInstancesSuite {
implicit def decoderTArb[A](
implicit arb: Arbitrary[A]
): Arbitrary[Decoder[Validation, Settings, A, Map[String, String]]] =
Arbitrary(
arb.arbitrary
.map(a => Decoder.make[Validation, Settings, A, Map[String, String]]((_, _, _, _) => a.pure[Validation]))
)
implicit def decoderTEq[A: Eq]: Eq[Decoder[Validation, Settings, A, Map[String, String]]] =
Eq.by(_.read(List.empty, defaultSettings, None, Map.empty))
}
|
janstenpickle/extruder
|
core/src/test/scala/extruder/instances/DecoderInstancesSuite.scala
|
Scala
|
mit
| 1,059
|
/*
* Copyright 2015-2020 Noel Welsh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package doodle
package interact
package animation
import cats.Invariant
import cats.syntax.invariant._
import doodle.core.Angle
import doodle.interact.easing.Easing
/**
* An interpolator constructs a transducer from a starting value, a stopping value,
* and the number of elements or steps to produce between these values.
*/
trait Interpolator[A] {
/**
* Enumerate a half-open interval, starting with start and ending with stop.
* The uneased case allows exact computation of the interval while the easing
* will probably introduce numeric error.
*/
def halfOpen(start: A, stop: A, steps: Long): Transducer[A]
/**
* Enumerate a half-open interval, starting with start and ending with stop,
* and passed through the given easing.
*/
def halfOpen(start: A, stop: A, steps: Long, easing: Easing): Transducer[A]
/**
* Interpolate a closed interval, starting with start and ending with the
* first value after stop. The uneased case allows exact computation of the
* interval while the easing will probably introduce numeric error.
*/
def closed(start: A, stop: A, steps: Long): Transducer[A]
/**
* Interpolate a closed interval, starting with start and ending with the first
* value after stop, and passed through the given easing.
*/
def closed(start: A, stop: A, steps: Long, easing: Easing): Transducer[A]
}
object Interpolator {
/**
* Invariant functor instance for Interpolator
*/
implicit object interpolatorInvariant extends Invariant[Interpolator] {
def imap[A,B](fa: Interpolator[A])(f: A => B)(g: B => A): Interpolator[B] =
new Interpolator[B] {
def halfOpen(start: B, stop: B, steps: Long): Transducer[B] =
fa.halfOpen(g(start), g(stop), steps).map(f)
def halfOpen(start: B, stop: B, steps: Long, easing: Easing): Transducer[B] =
fa.halfOpen(g(start), g(stop), steps, easing).map(f)
def closed(start: B, stop: B, steps: Long): Transducer[B] =
fa.closed(g(start), g(stop), steps).map(f)
def closed(start: B, stop: B, steps: Long, easing: Easing): Transducer[B] =
fa.closed(g(start), g(stop), steps, easing).map(f)
}
}
/**
* Perform Kahan summation given the total so far, the value to add to the
* total, and the error term (which starts at 0.0). Returns the updated total
* and the new error term.
*
* Kahan's algorithm is a way to sum floating point numbers that reduces error
* compared to straightforward addition.
*/
def kahanSum(total: Double, x: Double, error: Double): (Double, Double) = {
val y = x - error
val nextTotal = total + y
val nextError = (nextTotal - total) - y
(nextTotal, nextError)
}
/**
* Interpolator instance for Double
*/
implicit val doubleInterpolator: Interpolator[Double] =
new Interpolator[Double] {
def halfOpen(
start: Double,
stop: Double,
steps: Long
): Transducer[Double] =
if (start == stop) Transducer.empty
else
new Transducer[Double] {
// State is the current value and the number of steps
type State = (Double, Long)
val increment = (stop - start) / steps
val initial: State = (start, 0)
def next(current: State): State = {
val (x, s) = current
(x + increment, s + 1)
}
def output(state: State): Double = {
val (x, _) = state
x
}
def stopped(state: State): Boolean = {
val (x, s) = state
if (s >= steps) true
else if (stop >= start) (x >= stop)
else (x <= stop)
}
}
def halfOpen(
start: Double,
stop: Double,
steps: Long,
easing: Easing
): Transducer[Double] =
if (start == stop) Transducer.empty
else
new Transducer[Double] {
// The state consists of a number between [0, 1) that we project to
// [start, stop) and the number of steps taken. We count steps so we
// can stop exactly at the right time, which otherwise due to numeric
// error may not happen.
type State = (Double, Long)
val increment = 1.0 / steps
val initial: State = (0.0, 0)
// Convert [0, 1) to [start, stop)
def project(x: Double): Double =
start + (easing(x) * (stop - start))
def next(current: State): State = {
val (x, s) = current
(x + increment, s + 1)
}
def output(state: State): Double = {
val (x, _) = state
project(x)
}
def stopped(state: State): Boolean = {
val (x, s) = state
if (s >= steps) true
else (x >= 1.0)
}
}
def closed(
start: Double,
stop: Double,
steps: Long
): Transducer[Double] =
if (start == stop) Transducer.pure(stop)
else
new Transducer[Double] {
// State = (Current value, Steps, Error)
// Error is for Kahan summation
type State = (Double, Long, Double)
val increment = (stop - start) / (steps - 1)
val initial: State = (start, 0, 0.0)
def next(current: State): State = {
val (total, steps, error) = current
val (nextTotal, nextError) = kahanSum(total, increment, error)
(nextTotal, steps + 1, nextError)
}
def output(state: State): Double = {
val (total, s, _) = state
if (s + 1 >= steps) stop
else total
}
def stopped(state: State): Boolean = {
val (_, s, _) = state
(s >= steps)
}
}
def closed(
start: Double,
stop: Double,
steps: Long,
easing: Easing
): Transducer[Double] =
if (start == stop) Transducer.pure(stop)
else
new Transducer[Double] {
// The state consists of a number between [0, 1] that we project to
// [start, stop], the number of steps taken, and the error for Kahan
// summation. We count steps so we can stop exactly at the right
// time, which otherwise due to numeric error may not happen.
type State = (Double, Long, Double)
val increment = 1.0 / (steps - 1)
val initial: State = (0.0, 0, 0.0)
// Convert [0, 1] to [start, stop]
def project(x: Double): Double =
start + (easing(x) * (stop - start))
def next(current: State): State = {
val (total, steps, error) = current
val (nextTotal, nextError) = kahanSum(total, increment, error)
(nextTotal, steps + 1, nextError)
}
def output(state: State): Double = {
val (total, s, _) = state
if (s + 1 >= steps) stop
else project(total)
}
def stopped(state: State): Boolean = {
val (_, s, _) = state
(s >= steps)
}
}
}
/**
* Interpolator instance for Angle
*/
implicit val angleInterpolator: Interpolator[Angle] =
doubleInterpolator.imap(turns => Angle.turns(turns))(angle => angle.toTurns)
}
|
underscoreio/doodle
|
interact/shared/src/main/scala/doodle/interact/animation/Interpolator.scala
|
Scala
|
apache-2.0
| 8,214
|
/*
* Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.forclift.languages
import java.io.{FileNotFoundException, IOException}
import collection._
import scala.util.parsing.combinator._;
import scala.io._
import edu.ucla.cs.starai.forclift.compiler._
import edu.ucla.cs.starai.forclift._
import edu.ucla.cs.starai.forclift.constraints._
import edu.ucla.cs.starai.forclift.languages._
import edu.ucla.cs.starai.forclift.inference._
class FactorGraphParser extends JavaTokenParsers with ModelParser{
def parseModel(theoryString: String) = parseFactorGraph(theoryString)
/**
* If you want the same namespace for this atom and a theory, then use the same Parser object.
*/
def parseAtom(str: String): Atom = parseAll(atom, str).get
def applyParsers[T](parsers: List[Parser[T]], line: String): T = parsers match {
case parser :: rest => {
val result = parseAll(parser, line)
// println(parser.toString +" on "+line +" gives ")
// println(result)
if (result.successful) result.get
else applyParsers(rest, line)
}
case Nil => throw new IllegalArgumentException("Failed to parse: " + line)
}
def domainSizes: DomainSizes = domainMap.values.map {
case (k, v) =>
(k.asInstanceOf[Domain], DomainSize(v, k.asInstanceOf[Domain]))
}.toMap
def predicateWeights: PredicateWeights = weightMap.toMap
lazy val kwType: Parser[String] = "predicate".r
lazy val kwDomain: Parser[String] = "domain".r
val domainMap = new mutable.HashMap[String, (RootDomain, Int)]
lazy val domainLine: Parser[Domain] = (
kwDomain ~ """[A-Z][a-zA-Z0-9]*""".r ~ wholeNumber ~ opt("""\\{""".r ~ repsep(constant, ",") ~ """\\}""".r) ^^ {
case _ ~ name ~ size ~ constantsOption => {
val constants = if (constantsOption.isEmpty) List[Constant]() else {
val Some(_ ~ c ~ _) = constantsOption
c
}
domainMap.getOrElseUpdate(name, (new RootDomain(name, constants.toList), size.toInt))._1
}
}
| failure("Illegal domain"))
val predicateMap = new mutable.HashMap[(String, Int), Predicate]
val weightMap = new mutable.HashMap[Predicate, Weights]
lazy val typeLine: Parser[Predicate] = (
kwType ~ predicate ~ opt("(" ~ rep1sep("""[A-Z][a-zA-Z0-9]*""".r, ",") ~ ")") ~ weights ^^ {
case _ ~ predicateName ~ argList ~ weights =>
val domainNames = argList match {
case Some(_ ~ domainNames ~ _) => domainNames
case None => List()
}
val domains = domainNames.map { domainName =>
domainMap.getOrElse(domainName, throw new IllegalStateException("Unknown domain " + domainName))._1
}
val arity = domains.size
assume(!predicateMap.contains((predicateName, arity)))
val predicate = new Predicate(Symbol(predicateName), arity, domains)
predicateMap((predicateName, arity)) = predicate
weightMap(predicate) = weights
predicate
}
| failure("Illegal type"))
lazy val weights: Parser[Weights] = opt(floatingPointNumber ~ floatingPointNumber) ^^ {
case None => Weights(1, 1)
case Some(p ~ n) => Weights(p.toDouble, n.toDouble)
}
lazy val literal: Parser[(Boolean, Atom)] = (
atom ^^ {
(true, _)
}
| """[!¬]""".r ~ atom ^^ {
case _ ~ a => (false, a)
}
| failure("Illegal literal"))
lazy val atom: Parser[Atom] = (
predicate ~ opt("(" ~> rep1sep(term, ",") <~ ")")
^^ {
case name ~ None => {
val predicate = predicateMap.getOrElseUpdate((name, 0), new Predicate(Symbol(name), 0))
predicate()
}
case name ~ Some(arglist) => {
val predicate = predicateMap.getOrElseUpdate((name, arglist.size), new Predicate(Symbol(name), arglist.size))
assume((arglist zip predicate.domains).forall {
case (a, d) =>
!a.isInstanceOf[Constant] || d.contains(a.asInstanceOf[Constant])
})
predicate(arglist: _*)
}
}
| failure("Illegal atom"))
lazy val term: Parser[Term] = (
constant | variable | failure("Illegal term"))
lazy val constant: Parser[Constant] = """[a-z0-9]+""".r ^^ { Constant(_) } | failure("Illegal constant")
val varMap = new mutable.HashMap[String, Var]
lazy val variable: Parser[Var] = (
"""[A-Z][a-zA-Z0-9]*""".r ^^ { name =>
varMap.getOrElseUpdate(name, new Var)
}
| failure("Illegal variable"))
val predicate = """([a-z0-9_{}])+""".r
def parseFactorGraph(str: String): FactorGraph = {
val sourceFile = Source.fromString(str)
try {
parseFactorGraph(sourceFile)
} catch {
case e: IOException => throw new Exception(s"Problem with file $str: " + e.getMessage)
case e: FileNotFoundException => throw new Exception(s"Can't find file $str")
}
finally {
sourceFile.close()
}
}
def parseFactorGraph(source: Source): FactorGraph = {
val lineTypes = List(domainLine, typeLine, ifthenelse, ifthen, conjunctiveFactor, disjunctiveFactor, query)
val lines = source.getLines.map { _.trim }.filter { _.nonEmpty }.map { line =>
applyParsers(lineTypes, line)
}.toList
val parFactors = lines.collect { case bf: BrazFactor => bf }
val factorGraph = new FactorGraph(parFactors.toList, domainSizes, predicateWeights)
factorGraph
}
lazy val factorGraph: Parser[FactorGraph] = (
rep(brazLine) ^^ { lines =>
FactorGraph(lines.collect { case bf: BrazFactor => bf }, DomainSizes.empty, PredicateWeights.empty)
}
| failure("Illegal FactorGraph"))
lazy val brazLine = domainLine | typeLine | parfactor | failure("Illegal Braz line")
//order specific to general ?
lazy val parfactor: Parser[BrazFactor] = (ifthenelse | ifthen | conjunctiveFactor | disjunctiveFactor | query | failure("Illegal BrazFactor"))
lazy val query: Parser[Query] = (
brazLiteral ^^ { Query(_) }
| failure("Illegal Query"))
lazy val conjunctiveFactor: Parser[ConjunctiveFactor] = (
rep1sep(brazLiteral, """and""".r) ~ floatingPointNumber ~ floatingPointNumber ^^ {
case literals ~ weight ~ negWeight => {
ConjunctiveFactor(literals, weight.toDouble, negWeight.toDouble)
}
}
| failure("Illegal conjunctive factor"))
lazy val disjunctiveFactor: Parser[DisjunctiveFactor] = (
rep1sep(brazLiteral, ("""or""".r | """v""".r)) ~ opt(floatingPointNumber ~ floatingPointNumber) ^^ {
case literals ~ Some(weight ~ negWeight) => {
DisjunctiveFactor(literals, weight.toDouble, negWeight.toDouble)
}
case literals ~ None => {
DisjunctiveFactor(literals, 1, 0)
}
}
| failure("Illegal disjunctive factor"))
lazy val ifthen: Parser[IfThen] = (
"if\\\\b".r ~ brazLiteral ~ "then\\\\b".r ~ brazLiteral ~ floatingPointNumber ^^ {
case _ ~ il ~ _ ~ tl ~ w => IfThen(il, tl, w.toDouble)
}
| failure("Illegal IfThen"))
lazy val ifthenelse: Parser[IfThenElse] = (
"if\\\\b".r ~ brazLiteral ~ "then\\\\b".r ~ brazLiteral ~ floatingPointNumber ~ "else\\\\b".r ~ floatingPointNumber ^^ {
case _ ~ il ~ _ ~ tl ~ p ~ _ ~ q => IfThenElse(il, tl, p.toDouble, q.toDouble)
}
| failure("Illegal IfThenElse"))
lazy val brazLiteral: Parser[(Boolean, Atom)] = (
not(reserved) ~> literal
// literal
| failure("Illegal BrazLiteral"))
lazy val reserved: Parser[String] = (kwIf | kwThen | kwElse | kwAnd | kwOr | kwType | kwDomain)
val kwIf: Parser[String] = "if".r
val kwThen: Parser[String] = "then".r
val kwElse: Parser[String] = "else".r
val kwAnd: Parser[String] = "and".r
val kwOr: Parser[String] = "or".r
}
|
UCLA-StarAI/Forclift
|
src/main/scala/edu/ucla/cs/starai/forclift/languages/FactorGraphParser.scala
|
Scala
|
apache-2.0
| 8,251
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.controller
import akka.Done
import akka.actor.{ActorSystem, CoordinatedShutdown}
import akka.event.Logging.InfoLevel
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Route
import kamon.Kamon
import org.apache.openwhisk.common.Https.HttpsConfig
import org.apache.openwhisk.common._
import org.apache.openwhisk.core.{ConfigKeys, WhiskConfig}
import org.apache.openwhisk.core.connector.MessagingProvider
import org.apache.openwhisk.core.containerpool.logging.LogStoreProvider
import org.apache.openwhisk.core.database.{ActivationStoreProvider, CacheChangeNotification, RemoteCacheInvalidation}
import org.apache.openwhisk.core.entitlement._
import org.apache.openwhisk.core.entity.ActivationId.ActivationIdGenerator
import org.apache.openwhisk.core.entity.ExecManifest.Runtimes
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.loadBalancer.LoadBalancerProvider
import org.apache.openwhisk.http.{BasicHttpService, BasicRasService}
import org.apache.openwhisk.spi.SpiLoader
import pureconfig._
import spray.json.DefaultJsonProtocol._
import spray.json._
import pureconfig.generic.auto._
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits
import scala.concurrent.duration.DurationInt
import scala.util.{Failure, Success}
/**
* The Controller is the service that provides the REST API for OpenWhisk.
*
* It extends the BasicRasService so it includes a ping endpoint for monitoring.
*
* Akka sends messages to akka Actors -- the Controller is an Actor, ready to receive messages.
*
* It is possible to deploy a hot-standby controller. Each controller needs its own instance. This instance is a
* consecutive numbering, starting with 0.
* The state and cache of each controller is not shared to the other controllers.
* If the base controller crashes, the hot-standby controller will be used. After the base controller is up again,
* it will be used again. Because of the empty cache after restart, there are no problems with inconsistency.
* The only problem that could occur is, that the base controller is not reachable, but does not restart. After switching
* back to the base controller, there could be an inconsistency in the cache (e.g. if a user has updated an action). This
* inconsistency will be resolved by its own after removing the cached item, 5 minutes after it has been generated.
*
* Uses the Akka routing DSL: http://doc.akka.io/docs/akka-http/current/scala/http/routing-dsl/overview.html
*
* @param config A set of properties needed to run an instance of the controller service
* @param instance if running in scale-out, a unique identifier for this instance in the group
* @param verbosity logging verbosity
* @param executionContext Scala runtime support for concurrent operations
*/
class Controller(val instance: ControllerInstanceId,
runtimes: Runtimes,
implicit val whiskConfig: WhiskConfig,
implicit val actorSystem: ActorSystem,
implicit val logging: Logging)
extends BasicRasService {
TransactionId.controller.mark(
this,
LoggingMarkers.CONTROLLER_STARTUP(instance.asString),
s"starting controller instance ${instance.asString}",
logLevel = InfoLevel)
/**
* A Route in Akka is technically a function taking a RequestContext as a parameter.
*
* The "~" Akka DSL operator composes two independent Routes, building a routing tree structure.
*
* @see http://doc.akka.io/docs/akka-http/current/scala/http/routing-dsl/routes.html#composing-routes
*/
override def routes(implicit transid: TransactionId): Route = {
super.routes ~ {
(pathEndOrSingleSlash & get) {
complete(info)
}
} ~ apiV1.routes ~ swagger.swaggerRoutes ~ internalInvokerHealth
}
// initialize datastores
private implicit val authStore = WhiskAuthStore.datastore()
private implicit val entityStore = WhiskEntityStore.datastore()
private implicit val cacheChangeNotification = Some(new CacheChangeNotification {
val remoteCacheInvalidaton = new RemoteCacheInvalidation(whiskConfig, "controller", instance)
override def apply(k: CacheKey) = {
remoteCacheInvalidaton.invalidateWhiskActionMetaData(k)
remoteCacheInvalidaton.notifyOtherInstancesAboutInvalidation(k)
}
})
// initialize backend services
private implicit val loadBalancer =
SpiLoader.get[LoadBalancerProvider].instance(whiskConfig, instance)
logging.info(this, s"loadbalancer initialized: ${loadBalancer.getClass.getSimpleName}")(TransactionId.controller)
private implicit val entitlementProvider =
SpiLoader.get[EntitlementSpiProvider].instance(whiskConfig, loadBalancer, instance)
private implicit val activationIdFactory = new ActivationIdGenerator {}
private implicit val logStore = SpiLoader.get[LogStoreProvider].instance(actorSystem)
private implicit val activationStore =
SpiLoader.get[ActivationStoreProvider].instance(actorSystem, logging)
// register collections
Collection.initialize(entityStore)
/** The REST APIs. */
implicit val controllerInstance = instance
private val apiV1 = new RestAPIVersion(whiskConfig, "api", "v1")
private val swagger = new SwaggerDocs(Uri.Path.Empty, "infoswagger.json")
/**
* Handles GET /invokers - list of invokers
* /invokers/healthy/count - nr of healthy invokers
* /invokers/ready - 200 in case # of healthy invokers are above the expected value
* - 500 in case # of healthy invokers are bellow the expected value
*
* @return JSON with details of invoker health or count of healthy invokers respectively.
*/
protected[controller] val internalInvokerHealth = {
implicit val executionContext = actorSystem.dispatcher
(pathPrefix("invokers") & get) {
pathEndOrSingleSlash {
complete {
loadBalancer
.invokerHealth()
.map(_.map(i => i.id.toString -> i.status.asString).toMap.toJson.asJsObject)
}
} ~ path("healthy" / "count") {
complete {
loadBalancer
.invokerHealth()
.map(_.count(_.status == InvokerState.Healthy).toJson)
}
} ~ path("ready") {
onSuccess(loadBalancer.invokerHealth()) { invokersHealth =>
val all = invokersHealth.size
val healthy = invokersHealth.count(_.status == InvokerState.Healthy)
val ready = Controller.readyState(all, healthy, Controller.readinessThreshold.getOrElse(1))
if (ready)
complete(JsObject("healthy" -> s"$healthy/$all".toJson))
else
complete(InternalServerError -> JsObject("unhealthy" -> s"${all - healthy}/$all".toJson))
}
}
}
}
// controller top level info
private val info = Controller.info(
whiskConfig,
TimeLimit.config,
MemoryLimit.config,
LogLimit.config,
runtimes,
List(apiV1.basepath()))
}
/**
* Singleton object provides a factory to create and start an instance of the Controller service.
*/
object Controller {
protected val protocol = loadConfigOrThrow[String]("whisk.controller.protocol")
protected val interface = loadConfigOrThrow[String]("whisk.controller.interface")
protected val readinessThreshold = loadConfig[Double]("whisk.controller.readiness-fraction")
val topicPrefix = loadConfigOrThrow[String](ConfigKeys.kafkaTopicsPrefix)
val userEventTopicPrefix = loadConfigOrThrow[String](ConfigKeys.kafkaTopicsUserEventPrefix)
// requiredProperties is a Map whose keys define properties that must be bound to
// a value, and whose values are default values. A null value in the Map means there is
// no default value specified, so it must appear in the properties file
def requiredProperties =
ExecManifest.requiredProperties ++
RestApiCommons.requiredProperties ++
SpiLoader.get[LoadBalancerProvider].requiredProperties ++
EntitlementProvider.requiredProperties
private def info(config: WhiskConfig,
timeLimit: TimeLimitConfig,
memLimit: MemoryLimitConfig,
logLimit: MemoryLimitConfig,
runtimes: Runtimes,
apis: List[String]) =
JsObject(
"description" -> "OpenWhisk".toJson,
"support" -> JsObject(
"github" -> "https://github.com/apache/openwhisk/issues".toJson,
"slack" -> "http://slack.openwhisk.org".toJson),
"api_paths" -> apis.toJson,
"limits" -> JsObject(
"actions_per_minute" -> config.actionInvokePerMinuteLimit.toInt.toJson,
"triggers_per_minute" -> config.triggerFirePerMinuteLimit.toInt.toJson,
"concurrent_actions" -> config.actionInvokeConcurrentLimit.toInt.toJson,
"sequence_length" -> config.actionSequenceLimit.toInt.toJson,
"min_action_duration" -> timeLimit.min.toMillis.toJson,
"max_action_duration" -> timeLimit.max.toMillis.toJson,
"min_action_memory" -> memLimit.min.toBytes.toJson,
"max_action_memory" -> memLimit.max.toBytes.toJson,
"min_action_logs" -> logLimit.min.toBytes.toJson,
"max_action_logs" -> logLimit.max.toBytes.toJson),
"runtimes" -> runtimes.toJson)
def readyState(allInvokers: Int, healthyInvokers: Int, readinessThreshold: Double): Boolean = {
if (allInvokers > 0) (healthyInvokers / allInvokers) >= readinessThreshold else false
}
def main(args: Array[String]): Unit = {
implicit val actorSystem = ActorSystem("controller-actor-system")
implicit val logger = new AkkaLogging(akka.event.Logging.getLogger(actorSystem, this))
start(args)
}
def start(args: Array[String])(implicit actorSystem: ActorSystem, logger: Logging): Unit = {
ConfigMXBean.register()
Kamon.init()
// Prepare Kamon shutdown
CoordinatedShutdown(actorSystem).addTask(CoordinatedShutdown.PhaseActorSystemTerminate, "shutdownKamon") { () =>
logger.info(this, s"Shutting down Kamon with coordinated shutdown")
Kamon.stopModules().map(_ => Done)(Implicits.global)
}
// extract configuration data from the environment
val config = new WhiskConfig(requiredProperties)
val port = config.servicePort.toInt
// if deploying multiple instances (scale out), must pass the instance number as the
require(args.length >= 1, "controller instance required")
val instance = ControllerInstanceId(args(0))
def abort(message: String) = {
logger.error(this, message)
actorSystem.terminate()
Await.result(actorSystem.whenTerminated, 30.seconds)
sys.exit(1)
}
if (!config.isValid) {
abort("Bad configuration, cannot start.")
}
val msgProvider = SpiLoader.get[MessagingProvider]
Seq(
(topicPrefix + "completed" + instance.asString, "completed", Some(ActivationEntityLimit.MAX_ACTIVATION_LIMIT)),
(topicPrefix + "health", "health", None),
(topicPrefix + "cacheInvalidation", "cache-invalidation", None),
(userEventTopicPrefix + "events", "events", None)).foreach {
case (topic, topicConfigurationKey, maxMessageBytes) =>
if (msgProvider.ensureTopic(config, topic, topicConfigurationKey, maxMessageBytes).isFailure) {
abort(s"failure during msgProvider.ensureTopic for topic $topic")
}
}
ExecManifest.initialize(config) match {
case Success(_) =>
val controller = new Controller(instance, ExecManifest.runtimesManifest, config, actorSystem, logger)
val httpsConfig =
if (Controller.protocol == "https") Some(loadConfigOrThrow[HttpsConfig]("whisk.controller.https")) else None
BasicHttpService.startHttpService(controller.route, port, httpsConfig, interface)(actorSystem)
case Failure(t) =>
abort(s"Invalid runtimes manifest: $t")
}
}
}
|
style95/openwhisk
|
core/controller/src/main/scala/org/apache/openwhisk/core/controller/Controller.scala
|
Scala
|
apache-2.0
| 12,814
|
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
import org.typelevel.ci._
object AuthScheme {
val Basic = ci"Basic"
val Digest = ci"Digest"
val Bearer = ci"Bearer"
val OAuth = ci"OAuth"
}
|
rossabaker/http4s
|
core/shared/src/main/scala/org/http4s/AuthScheme.scala
|
Scala
|
apache-2.0
| 764
|
package scala.meta.intellij
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScAnnotation
import scala.meta.trees.TreeConverter
class IDEAContext(project: =>Project) extends TreeConverter {
override def getCurrentProject = project
// annotations filtering isn't required in converter tests
override protected val annotationToSkip: ScAnnotation = null
}
|
loskutov/intellij-scala
|
src/scala/meta/intellij/IDEAContext.scala
|
Scala
|
apache-2.0
| 412
|
package com.airbnb.aerosolve.training
import com.airbnb.aerosolve.core.KDTreeNode
import com.airbnb.aerosolve.core.KDTreeNodeType
import com.airbnb.aerosolve.training.KDTree.KDTreeBuildOptions
import org.junit.Test
import org.slf4j.LoggerFactory
import org.junit.Assert.assertEquals
import org.junit.Assert.assertTrue
import scala.collection.mutable.ArrayBuffer
class KDTreeTest {
val log = LoggerFactory.getLogger("KDTreeTest")
@Test def buildTreeTest: Unit = {
val pts = ArrayBuffer[(Double, Double)]()
for (x <- -20 to 20) {
for (y <- 1 to 5) {
pts.append((x.toDouble, y.toDouble))
}
}
val options = KDTreeBuildOptions(maxTreeDepth = 16, minLeafCount = 1)
val tree = KDTree(options, pts.toArray)
val nodes = tree.nodes
log.info("Num nodes = %d".format(nodes.size))
// Since the x dimension is largest we expect the first node to be an xsplit
assertEquals(KDTreeNodeType.X_SPLIT, nodes(0).nodeType)
assertEquals(-1.81, nodes(0).splitValue, 0.1)
assertEquals(1, nodes(0).leftChild)
assertEquals(2, nodes(0).rightChild) // Ensure every point is bounded in the box of the kdtree
for (pt <- pts) {
val res = tree.query(pt)
for (idx <- res) {
assert(pt._1 >= nodes(idx).minX)
assert(pt._1 <= nodes(idx).maxX)
assert(pt._2 >= nodes(idx).minY)
assert(pt._2 <= nodes(idx).maxY)
}
}
// Ensure all nodes are sensible
for (node <- nodes) {
assert(node.count > 0)
assert(node.minX <= node.maxX)
assert(node.minY <= node.maxY)
if (node.nodeType != KDTreeNodeType.LEAF) {
assert(node.leftChild >= 0 && node.leftChild < nodes.size)
assert(node.rightChild >= 0 && node.rightChild < nodes.size)
}
}
}
}
|
aglne/aerosolve
|
training/src/test/scala/com/airbnb/aerosolve/training/KDTreeTest.scala
|
Scala
|
apache-2.0
| 1,784
|
package org.bone.ircballoon
import org.bone.ircballoon.model._
import org.bone.ircballoon.actor.message._
import I18N.i18n._
import ImageUtil._
import org.eclipse.swt.widgets.{List => SWTList, _}
import org.eclipse.swt.layout._
import org.eclipse.swt.events._
import org.eclipse.swt.graphics._
import org.eclipse.swt.custom.StyledText
import org.eclipse.swt.custom.StackLayout
import org.eclipse.swt.custom.ScrolledComposite
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import org.eclipse.swt._
import scala.collection.mutable.ListBuffer
class VoteStatusWin(parent: Shell, candidates: List[String], duration: Int) extends SWTHelper
{
case class VoteBar(label: Label, bar: ProgressBar, votes: Label)
implicit val display = Display.getDefault
var timeRemaining: Int = duration * 60
var isVoting: Boolean = true
val shell = new Shell(parent, SWT.DIALOG_TRIM|SWT.RESIZE)
val candidateGroup = createGroup(shell, tr("Votes Status"), 3, 4)
val gridLayout = new GridLayout(4, false)
val voteBars: Vector[VoteBar] = createVoteBar(candidates)
val timeLabel: Label = createTimeLabel()
val resetTimeButton: Button = createResetTimeButton()
val stopVoteButton: Button = createStopVoteButton()
val closeButton: Button = createCloseButton()
def displayFinishWindow()
{
val thread = new Thread() {
override def run() {
runByThread {
val messageBox = new MessageBox(shell, SWT.OK|SWT.ICON_INFORMATION)
SoundUtils.playSound("/sound/finish.wav")
messageBox.setMessage(tr("Vote finsihed!"))
messageBox.open()
}
}
}
thread.start()
}
def stopVote()
{
isVoting = false
timeRemaining = 0
timeLabel.setText(tr("Vote finished"))
resetTimeButton.setEnabled(false)
stopVoteButton.setEnabled(false)
closeButton.setEnabled(true)
displayFinishWindow()
}
def formatTime(timeInSeconds: Int): String =
{
def addPrefixZero(value: Int) = if (value < 10) "0" + value.toString else value.toString
val seconds = timeInSeconds % 60
val minutes = timeInSeconds / 60
addPrefixZero(minutes) + ":" + addPrefixZero(seconds)
}
def createTimeLabel(): Label =
{
val label = new Label(shell, SWT.LEFT)
label.setText(tr("Time Remaining: %s") format(formatTime(duration * 60)))
label
}
def createCloseButton(): Button =
{
val data = new GridData(SWT.RIGHT, SWT.FILL, false, false)
val button = new Button(shell, SWT.PUSH)
button.setLayoutData(data)
button.setText(tr("Close"))
button.setImage(MyIcon.close)
button.addSelectionListener { e: SelectionEvent =>
shell.dispose()
}
button.setEnabled(false)
button
}
def createStopVoteButton(): Button =
{
val data = new GridData(SWT.RIGHT, SWT.FILL, false, false)
val button = new Button(shell, SWT.PUSH)
button.setLayoutData(data)
button.setText(tr("Stop Vote"))
button.addSelectionListener { e: SelectionEvent =>
MainWindow.controller ! StopVoting
}
button
}
def createResetTimeButton(): Button =
{
val data = new GridData(SWT.RIGHT, SWT.FILL, true, false)
val button = new Button(shell, SWT.PUSH)
button.setLayoutData(data)
button.setText(tr("Reset Time"))
button.addSelectionListener { e: SelectionEvent =>
MainWindow.controller ! ResetTime
timeRemaining = duration * 60
timeLabel.setText(tr("Time Remaining: %s") format(formatTime(timeRemaining)))
}
button.setEnabled(true)
button
}
def createVoteBar(candidate: List[String]): Vector[VoteBar] =
{
candidates.zipWithIndex.map { case (option, i) =>
val barData = new GridData(SWT.FILL, SWT.NONE, true, false)
val label = new Label(candidateGroup, SWT.LEFT)
val bar = new ProgressBar(candidateGroup, SWT.SMOOTH|SWT.HORIZONTAL)
val votes = new Label(candidateGroup, SWT.LEFT)
label.setText(s"$i. $option")
bar.setSelection(0)
bar.setMaximum(100)
bar.setLayoutData(barData)
votes.setText(tr("%s votes") format(0))
new VoteBar(label, bar, votes)
}.toVector
}
def decreaseTimeLabel()
{
if (isVoting && timeRemaining > 0) {
display.timerExec(1000, new Runnable() {
override def run() {
if (!timeLabel.isDisposed && timeRemaining > 0 && isVoting) {
timeRemaining -= 1
timeLabel.setText(tr("Time Remaining: %s") format(formatTime(timeRemaining)))
decreaseTimeLabel()
}
}
})
}
}
def updateFinalVote(voteStatus: List[(String, Int)])
{
runByThread {
updateVoteBar(voteStatus)
stopVote()
}
}
def updateVoteBar(voteStatus: List[(String, Int)])
{
runByThread {
val totalVotes = voteStatus.map(_._2).sum.toDouble
for (((voteTo, votes), i) <- voteStatus.zipWithIndex) {
val percentage = ((votes / totalVotes) * 100).toInt
voteBars(i).bar.setSelection(percentage)
voteBars(i).votes.setText(tr("%s votes") format(votes))
}
}
}
def open()
{
shell.setLayout(gridLayout)
shell.setText(tr("Vote Status"))
shell.setSize(600, 400)
shell.open()
decreaseTimeLabel()
}
}
|
brianhsu/IRCBalloon
|
src/main/scala/ui/VoteStatusWin.scala
|
Scala
|
gpl-3.0
| 5,354
|
package com.duffmanstudios.memory
/**
* Allows translation of card numbers selected by the player into (y,x) coordinates
* on the game board. This was added as it is more user friendly than selecting
* a card by coordinates.
*
* @author Iain Duff
*/
object CardToBoardTranslator {
def translateCardNumberIntoBoardGridLocation(cardNumber: Int, numCardsInGame: Int) = {
checkNumberValid(cardNumber, numCardsInGame)
//subtract 1, as board numbers count from 0 but users enter card numbers from 1, for ease of use
val cardNumberFromZero = cardNumber - 1
val rowNumber = cardNumberFromZero / 4
val columnNumber = cardNumberFromZero % 4
(columnNumber, rowNumber)
}
private def checkNumberValid(cardNumber: Int, numCardsInGame: Int) = {
if (cardNumber < 1) {
throw new IllegalArgumentException("The card number must be 1 or above (value entered=" + cardNumber + ")")
} else if (cardNumber > numCardsInGame) {
throw new IllegalArgumentException("The card number must be less than the total number of cards (value entered=" +
cardNumber + "; number of cards=" + numCardsInGame+ ")")
}
}
}
|
iainduff91/memory-backend
|
src/main/java/com/duffmanstudios/memory/CardToBoardTranslator.scala
|
Scala
|
gpl-2.0
| 1,156
|
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.intg
import akka.event.slf4j.SLF4JLogging
import java.io.File
import akka.testkit._
import java.io.InputStream
import java.util.Scanner
import org.ensime.api._
import org.ensime.core._
import org.ensime.fixture._
import org.ensime.util._
import org.ensime.util.file._
import org.scalatest.Matchers
import scala.concurrent.{ Await, Future, Promise }
import scala.util.{ Properties, Try }
import scala.concurrent.duration._
// must be refreshing as the tests don't clean up after themselves properly
class DebugTest extends EnsimeSpec
with IsolatedEnsimeConfigFixture
with IsolatedTestKitFixture
with IsolatedProjectFixture
with DebugTestUtils {
val original = EnsimeConfigFixture.DebugTestProject.copy(
javaLibs = Nil // no need to index the JRE
)
"Debug - stepping" should "be able to step over/in/out" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
// Start on line 8, which is first line in main method
withDebugSession(
"stepping.BasicStepping",
"stepping/BasicStepping.scala",
8
) { (threadId, breakpointsFile) =>
import testkit._
// Should be able to step over a method call
project ! DebugNextReq(threadId)
expectMsg(remaining, "Failed to step over line!", TrueResponse)
// Should be able to step into a method call
project ! DebugStepReq(threadId)
expectMsg(remaining, "Failed to step into method call!", TrueResponse)
// Should be able to step out of a method call
project ! DebugStepOutReq(threadId)
expectMsg(remaining, "Failed to step out of method call!", TrueResponse)
}
}
}
}
"Breakpoints" should "trigger/continue" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
withDebugSession(
"breakpoints.Breakpoints",
"breakpoints/Breakpoints.scala",
32
) { (threadId, breakpointsFile) =>
import testkit._
// NOTE: Can encounter scala/Predef.scala if picking stack trace
// at arbitrary point
project ! DebugBacktraceReq(threadId, 0, 3)
expectMsgType[DebugBacktrace] should matchPattern {
case DebugBacktrace(List(
DebugStackFrame(0, List(), 0, "breakpoints.Breakpoints", "mainTest",
LineSourcePosition(`breakpointsFile`, 32), _),
DebugStackFrame(1, List(
DebugStackLocal(0, "args", "Array(length = 0)[<EMPTY>]", "java.lang.String[]")
), 1, "breakpoints.Breakpoints$", "main",
LineSourcePosition(`breakpointsFile`, 42), _),
DebugStackFrame(2, List(), 1, "breakpoints.Breakpoints", "main",
LineSourcePosition(`breakpointsFile`, _), _)
), `threadId`, "main") =>
}
project ! DebugSetBreakReq(breakpointsFile, 11)
expectMsg(TrueResponse)
project ! DebugSetBreakReq(breakpointsFile, 13)
expectMsg(TrueResponse)
project ! DebugContinueReq(threadId)
expectMsg(TrueResponse)
asyncHelper.expectMsg(DebugBreakEvent(threadId, "main", breakpointsFile, 11))
project ! DebugContinueReq(threadId)
expectMsg(TrueResponse)
asyncHelper.expectMsg(DebugBreakEvent(threadId, "main", breakpointsFile, 13))
project ! DebugClearBreakReq(breakpointsFile, 11)
expectMsg(TrueResponse)
project ! DebugContinueReq(threadId)
expectMsg(TrueResponse)
asyncHelper.expectMsg(DebugBreakEvent(threadId, "main", breakpointsFile, 13))
project ! DebugSetBreakReq(breakpointsFile, 11)
expectMsg(TrueResponse)
project ! DebugClearBreakReq(breakpointsFile, 13)
expectMsg(TrueResponse)
project ! DebugContinueReq(threadId)
expectMsg(TrueResponse)
asyncHelper.expectMsg(DebugBreakEvent(threadId, "main", breakpointsFile, 11))
project ! DebugContinueReq(threadId)
expectMsg(TrueResponse)
asyncHelper.expectMsg(DebugBreakEvent(threadId, "main", breakpointsFile, 11))
project ! DebugContinueReq(threadId)
expectMsg(TrueResponse)
}
}
}
}
it should "list/clear" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
withDebugSession(
"breakpoints.Breakpoints",
"breakpoints/Breakpoints.scala",
32
) {
case (threadId, breakpointsFile) =>
import testkit._
project ! DebugListBreakpointsReq
expectMsgType[BreakpointList] should matchPattern {
case BreakpointList(Nil, Nil) =>
}
// break in main
project ! DebugSetBreakReq(breakpointsFile, 11)
expectMsg(TrueResponse)
project ! DebugSetBreakReq(breakpointsFile, 13)
expectMsg(TrueResponse)
// breakpoints should now be active
project ! DebugListBreakpointsReq
inside(expectMsgType[BreakpointList]) {
case BreakpointList(activeBreakpoints, pendingBreakpoints) =>
activeBreakpoints should contain theSameElementsAs Set(
Breakpoint(breakpointsFile, 11), Breakpoint(breakpointsFile, 13)
)
pendingBreakpoints shouldBe empty
}
// check clear works again
project ! DebugClearAllBreaksReq
expectMsg(TrueResponse)
project ! DebugListBreakpointsReq
expectMsgType[BreakpointList] should matchPattern {
case BreakpointList(Nil, Nil) =>
}
}
}
}
}
"Debug variables" should "inspect variables" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
withDebugSession(
"variables.ReadVariables",
"variables/ReadVariables.scala",
21
) { (threadId, variablesFile) =>
// boolean local
getVariableValue(threadId, "a") should matchPattern {
case DebugPrimitiveValue("true", "boolean") =>
}
// char local
getVariableValue(threadId, "b") should matchPattern {
case DebugPrimitiveValue("'c'", "char") =>
}
// short local
getVariableValue(threadId, "c") should matchPattern {
case DebugPrimitiveValue("3", "short") =>
}
// int local
getVariableValue(threadId, "d") should matchPattern {
case DebugPrimitiveValue("4", "int") =>
}
// long local
getVariableValue(threadId, "e") should matchPattern {
case DebugPrimitiveValue("5", "long") =>
}
// float local
getVariableValue(threadId, "f") should matchPattern {
case DebugPrimitiveValue("1.0", "float") =>
}
// double local
getVariableValue(threadId, "g") should matchPattern {
case DebugPrimitiveValue("2.0", "double") =>
}
// String local
inside(getVariableValue(threadId, "h")) {
case DebugStringInstance("\\"test\\"", debugFields, "java.lang.String", _) =>
exactly(1, debugFields) should matchPattern {
case DebugClassField(_, "value", "char[]", "Array(length = 4)['t','e','s',...]") =>
}
}
// primitive array local
getVariableValue(threadId, "i") should matchPattern {
case DebugArrayInstance(3, "int[]", "int", _) =>
}
// type local
inside(getVariableValue(threadId, "j")) {
case DebugObjectInstance(summary, debugFields, "scala.collection.immutable.$colon$colon", _) =>
summary should startWith("Instance of scala.collection.immutable.$colon$colon")
exactly(1, debugFields) should matchPattern {
case DebugClassField(_, head, "java.lang.Object", summary) if (
(head == "head" || head == "scala$collection$immutable$$colon$colon$$hd") &&
summary.startsWith("Instance of java.lang.Integer")
) =>
}
}
// object array local
getVariableValue(threadId, "k") should matchPattern {
case DebugArrayInstance(3, "java.lang.Object[]", "java.lang.Object", _) =>
}
}
}
}
}
they should "be able to convert variables to string representations" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
withDebugSession(
"variables.ReadVariables",
"variables/ReadVariables.scala",
21
) { (threadId, variablesFile) =>
// boolean local
getVariableAsString(threadId, "a").text should be("true")
// char local
getVariableAsString(threadId, "b").text should be("'c'")
// short local
getVariableAsString(threadId, "c").text should be("3")
// int local
getVariableAsString(threadId, "d").text should be("4")
// long local
getVariableAsString(threadId, "e").text should be("5")
// float local
getVariableAsString(threadId, "f").text should be("1.0")
// double local
getVariableAsString(threadId, "g").text should be("2.0")
// String local
getVariableAsString(threadId, "h").text should be("\\"test\\"")
// primitive array local
getVariableAsString(threadId, "i").text should be("Array(length = 3)[1,2,3]")
// type local
getVariableAsString(threadId, "j").text should
startWith("Instance of scala.collection.immutable.$colon$colon")
// object array local
val objArrayText = getVariableAsString(threadId, "k").text
objArrayText should startWith("Array(length = 3)")
objArrayText should include("Instance of variables.One")
objArrayText should include("Instance of java.lang.Boolean")
objArrayText should include("Instance of java.lang.Integer")
}
}
}
}
they should "set variable values" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
withDebugSession(
"variables.WriteVariables",
"variables/WriteVariables.scala",
21
) { (threadId, variablesFile) =>
import testkit._
/* boolean local */ {
val n = "a"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "false")
expectMsg(remaining, s"Unable to set boolean for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("false", "boolean") =>
}
}
/* char local */ {
val n = "b"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "a")
expectMsg(remaining, s"Unable to set char for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("'a'", "char") =>
}
}
/* short local */ {
val n = "c"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "99")
expectMsg(remaining, s"Unable to set short for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("99", "short") =>
}
}
/* int local */ {
val n = "d"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "99")
expectMsg(remaining, s"Unable to set int for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("99", "int") =>
}
}
/* long local */ {
val n = "e"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "99")
expectMsg(remaining, s"Unable to set long for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("99", "long") =>
}
}
/* float local */ {
val n = "f"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "99.5")
expectMsg(remaining, s"Unable to set float for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("99.5", "float") =>
}
}
/* double local */ {
val n = "g"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "99.5")
expectMsg(remaining, s"Unable to set double for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugPrimitiveValue("99.5", "double") =>
}
}
/* string local */ {
val n = "h"
project ! DebugLocateNameReq(threadId, n)
val location = testkit.expectMsgType[DebugStackSlot]
project ! DebugSetValueReq(location, "\\"fish\\"")
expectMsg(remaining, s"Unable to set string for '$n'!", TrueResponse)
getVariableValue(threadId, n) should matchPattern {
case DebugStringInstance("\\"fish\\"", _, "java.lang.String", _) =>
}
}
}
}
}
}
"Debug backtrace" should "generate backtrace" taggedAs Debugger in withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
implicit val p = (project, asyncHelper)
withDebugSession(
"debug.Backtrace",
"debug/Backtrace.scala",
13
) { (threadId, f) =>
import testkit._
project ! DebugBacktraceReq(threadId, 0, 20)
val backTrace = expectMsgType[DebugBacktrace]
// just some sanity assertions
assert(backTrace.frames.forall(_.className.startsWith("debug.Backtrace")))
assert(backTrace.frames.forall(_.pcLocation.file.toString.endsWith("Backtrace.scala")))
}
}
}
}
}
trait DebugTestUtils {
this: ProjectFixture with Matchers with EnsimeConfigFixture =>
/**
* Launches a new JVM using the given class name as the entrypoint. Pauses
* the JVM at the specified source path and line.
*
* @param className containing the main method
* @param fileName to place the breakpoint
* @param breakLine where to start the session in the fileName
* @param func The test code to execute (gives the debug thread id of the main thread and file handle)
*/
def withDebugSession(
className: String,
fileName: String,
breakLine: Int
)(
func: (DebugThreadId, File) => Any
)(
implicit
config: EnsimeConfig,
testkit: TestKitFix,
// don't take an implicit TestActorRef or it steals the implicit sender
p: (TestActorRef[Project], TestProbe)
): Any = {
import testkit._
val resolvedFile = scalaMain(config) / fileName
val project = p._1
val asyncHelper = p._2
project ! DebugSetBreakReq(resolvedFile, breakLine)
expectMsg(TrueResponse)
val vm = VMStarter(config, className)
try {
Await.result(vm._4, (5 seconds).dilated)
project ! DebugAttachReq(vm._2, vm._3.toString)
expectMsg(DebugVmSuccess())
asyncHelper.expectMsg(DebugVMStartEvent)
val gotOnStartup = asyncHelper.expectMsgType[EnsimeServerMessage]
// weird! we sometimes see a duplicate break event instantly, not really expected
val additionalOnStartup = Try(asyncHelper.expectMsgType[EnsimeServerMessage](1 second)).toOption.toSeq
// but it doesn't always come through
val allEvents = gotOnStartup +: additionalOnStartup
val threadId = allEvents.flatMap {
case DebugBreakEvent(foundThreadId, "main", `resolvedFile`, `breakLine`) =>
List(foundThreadId)
case _ =>
Nil
}.headOption.getOrElse(fail("Cannot work out main threadId"))
project ! DebugClearBreakReq(resolvedFile, breakLine)
expectMsg(TrueResponse)
func(threadId, resolvedFile)
} finally {
// Attempt graceful shutdown (disposes of JVM, clearing all requests
// to let it finish naturally)
project ! DebugStopReq
// If shutdown fails, attempt to forcefully kill the process
Try(expectMsgPF() { case TrueResponse => })
.failed.foreach(_ => vm._1.destroy())
}
}
def getVariableValue(
threadId: DebugThreadId,
variableName: String
)(implicit
testkit: TestKitFix,
p: (TestActorRef[Project], TestProbe)): DebugValue = {
import testkit._
val project = p._1
project ! DebugLocateNameReq(threadId, variableName)
val vLoc = expectMsgType[DebugLocation]
project ! DebugValueReq(vLoc)
expectMsgType[DebugValue]
}
def getVariableAsString(
threadId: DebugThreadId,
variableName: String
)(implicit
testkit: TestKitFix,
p: (TestActorRef[Project], TestProbe)): StringResponse = {
import testkit._
val project = p._1
project ! DebugLocateNameReq(threadId, variableName)
val vLoc = expectMsgType[DebugLocation]
project ! DebugToStringReq(threadId, vLoc)
expectMsgType[StringResponse]
}
def checkTopStackFrame(threadId: DebugThreadId, className: String, method: String, line: Int)(implicit testkit: TestKitFix, p: (TestActorRef[Project], TestProbe)): Unit = {
import testkit._
val project = p._1
project ! DebugBacktraceReq(threadId, 0, 1)
expectMsgType[DebugBacktrace] should matchPattern {
case DebugBacktrace(List(DebugStackFrame(0, _, 1, `className`, `method`,
LineSourcePosition(_, `line`), _)),
`threadId`, "main") =>
}
}
}
// only for test because only the build tool really knows how to launch the JVM
object VMStarter extends SLF4JLogging {
def logLines(src: InputStream): Future[Unit] = {
val promise = Promise[Unit]()
new Thread(new Runnable() {
override def run(): Unit = {
val sc = new Scanner(src)
while (sc.hasNextLine) {
if (!promise.isCompleted) promise.trySuccess(())
log.info("DEBUGGING_PROCESS:" + sc.nextLine())
}
}
}).start()
promise.future
}
def java: String =
if (Properties.isWin) Properties.javaHome + """\\bin\\javaw.exe"""
else Properties.javaHome + "/bin/java"
def apply(config: EnsimeConfig, clazz: String): (Process, String, Int, Future[Unit]) = {
import collection.JavaConverters._
// would be nice to have ephemeral debug ports
val port = 5000 + scala.util.Random.nextInt(1000)
val classpath = (config.compileClasspath ++ config.targetClasspath).mkString(File.pathSeparator)
val args = Seq(
java,
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=" + port,
"-Xms32m", "-Xmx64m",
"-classpath", classpath,
clazz
)
val process = new ProcessBuilder(
args.asJava
).redirectErrorStream(true).start()
val logging = logLines(process.getInputStream)
(process, "127.0.0.1", port, logging)
}
}
|
d1egoaz/ensime-sbt
|
src/sbt-test/sbt-ensime/ensime-server/core/src/it/scala/org/ensime/intg/DebugTest.scala
|
Scala
|
apache-2.0
| 21,511
|
package com.sksamuel.elastic4s.search.queries
import com.sksamuel.elastic4s.requests.common.Operator
import com.sksamuel.elastic4s.testkit.DockerTests
import com.sksamuel.elastic4s.{ElasticDsl, Indexable}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.util.Try
class CombinedFieldsQueryTest extends AnyWordSpec with Matchers with DockerTests with ElasticDsl {
case class Game(name: String, alternatives: String*)
implicit object GameIndexable extends Indexable[Game] {
def quote(value: String): String = s""""${value}""""
override def json(t: Game): String = s""" { "name": "${t.name}", "alternatives": [ ${t.alternatives.map(quote).mkString(",")} ] } """
}
Try {
client.execute {
ElasticDsl.deleteIndex("boardgames")
}.await
}
client.execute {
bulk(
indexInto("boardgames") source Game("Imperial Settlers"),
indexInto("boardgames") source Game("Imperial Settlers - Empires of the North"),
indexInto("boardgames") source Game("Catan", "Settlers of Catan"),
indexInto("boardgames") source Game("Imperial"),
indexInto("boardgames") source Game("Star Wars: Imperial Assault")
).refreshImmediately
}.await
"combined fields query" should {
"perform query" in {
val resp = client.execute {
search("boardgames") query {
combinedFieldsQuery("imperial settlers", Seq("name", "alternatives"))
}
}.await.result
resp.totalHits shouldBe 5
}
"support and operator" in {
val resp = client.execute {
search("boardgames") query {
combinedFieldsQuery("imperial settlers", Seq("name", "alternatives")).operator(Operator.AND)
}
}.await.result
resp.totalHits shouldBe 2
}
"support minimumShouldMatch" in {
val resp = client.execute {
search("boardgames") query {
combinedFieldsQuery("imperial wars of the north", Seq("name", "alternatives")).minimumShouldMatch("2")
}
}.await.result
resp.totalHits shouldBe 2
}
}
}
|
sksamuel/elastic4s
|
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/search/queries/CombinedFieldsQueryTest.scala
|
Scala
|
apache-2.0
| 2,092
|
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.registry.impl
import java.util.Collections
import java.util.Optional
import java.util.concurrent.ExecutionException
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.actor.Props
import akka.pattern.ask
import com.lightbend.lagom.javadsl.api.ServiceAcl
import com.lightbend.lagom.javadsl.api.transport.Method
import com.lightbend.lagom.javadsl.api.transport.NotFound
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
import akka.NotUsed
import java.util.concurrent.TimeUnit
import java.net.URI
import akka.util.Timeout
import com.lightbend.lagom.internal.javadsl.registry.RegisteredService
import com.lightbend.lagom.internal.javadsl.registry.ServiceRegistry
import com.lightbend.lagom.internal.javadsl.registry.ServiceRegistryService
import scala.concurrent.Await
import scala.concurrent.duration._
class ServiceRegistryImplSpec extends WordSpecLike with Matchers {
private val testTimeoutInSeconds = 5
private implicit val testTimeout = Timeout(testTimeoutInSeconds.seconds)
"A service registry" should {
"allow to register a service" in withServiceRegistry() { registry =>
val expectedUrl = new URI("http://localhost:9000")
val serviceName = "fooservice"
registry.register(serviceName).invoke(ServiceRegistryService.of(expectedUrl, Collections.emptyList[ServiceAcl]))
val registeredUrl = registry
.lookup(serviceName, Optional.empty())
.invoke(NotUsed)
.toCompletableFuture()
.get(
testTimeoutInSeconds,
TimeUnit.SECONDS
)
assertResult(expectedUrl)(registeredUrl)
}
"allow to register a service of same service twice (idempotent)" in withServiceRegistry() { registry =>
val expectedUrl = new URI("http://localhost:9000")
val serviceName = "fooservice"
registry
.register(serviceName)
.invoke(ServiceRegistryService.of(expectedUrl, Collections.emptyList[ServiceAcl]))
.toCompletableFuture()
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
registry
.register(serviceName)
.invoke(ServiceRegistryService.of(expectedUrl, Collections.emptyList[ServiceAcl]))
.toCompletableFuture()
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
val registeredUrl = registry
.lookup(serviceName, Optional.empty())
.invoke(NotUsed)
.toCompletableFuture()
.get(
testTimeoutInSeconds,
TimeUnit.SECONDS
)
assertResult(expectedUrl)(registeredUrl)
}
"throw NotFound for services that aren't registered" in withServiceRegistry() { registry =>
val ee = the[ExecutionException] thrownBy registry
.lookup("fooservice", Optional.empty())
.invoke(NotUsed)
.toCompletableFuture
.get(
testTimeoutInSeconds,
TimeUnit.SECONDS
)
ee.getCause shouldBe a[NotFound]
}
"disallow registering the different endpoint for same name twice or more" in withServiceRegistry() { registry =>
val url1 = new URI("http://localhost:9000")
val url2 = new URI("http://localhost:9001")
val serviceName = "fooservice"
registry
.register(serviceName)
.invoke(ServiceRegistryService.of(url1, Collections.emptyList[ServiceAcl]))
.toCompletableFuture
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
intercept[ExecutionException] {
registry
.register(serviceName)
.invoke(ServiceRegistryService.of(url2, Collections.emptyList[ServiceAcl]))
.toCompletableFuture
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
}
}
"allow to retrieve the full list of registered services" in {
val url = new URI("http://localhost:9000")
val name = "fooservice"
val service = ServiceRegistryService.of(url, Collections.emptyList[ServiceAcl])
val registeredService = Map(name -> service)
val expectedRegisteredServices: List[RegisteredService] = List(
RegisteredService.of(name, service.uris().get(0), Optional.empty()),
RegisteredService.of(name, service.uris().get(0), Optional.of("http"))
)
// SUT
withServiceRegistry(registeredService) { registry =>
val registered =
registry.registeredServices().invoke().toCompletableFuture().get(testTimeoutInSeconds, TimeUnit.SECONDS)
// List(RegisteredService{name=fooservice, url=http://localhost:9000})
// List(RegisteredService{name=fooservice, url=http://localhost:9000}, RegisteredService{name=fooservice, url=http://localhost:9000})
import scala.collection.JavaConverters._
assertResult(expectedRegisteredServices)(registered.asScala.toList)
}
}
"default to well-known port for http URLs if no port number provided" ignore withServiceRegistryActor() { actor =>
val registry = new ServiceRegistryImpl(actor)
registry
.register("fooservice")
.invoke(
ServiceRegistryService.of(
URI.create("http://localhost"),
Collections.singletonList(new ServiceAcl(Optional.of(Method.GET), Optional.of("/")))
)
)
.toCompletableFuture
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
Await.result(actor ? ServiceRegistryActor.Route("GET", "/", None), testTimeoutInSeconds.seconds) match {
case ServiceRegistryActor.Found(address) =>
address.getHost should ===("localhost")
address.getPort should ===(80)
}
}
"default to well-known port for https URLs if no port number provided" ignore withServiceRegistryActor() { actor =>
val registry = new ServiceRegistryImpl(actor)
registry
.register("fooservice")
.invoke(
ServiceRegistryService.of(
URI.create("https://localhost"),
Collections.singletonList(new ServiceAcl(Optional.of(Method.GET), Optional.of("/")))
)
)
.toCompletableFuture
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
Await.result(actor ? ServiceRegistryActor.Route("GET", "/", None), testTimeoutInSeconds.seconds) match {
case ServiceRegistryActor.Found(address) =>
address.getHost should ===("localhost")
address.getPort should ===(443)
}
}
"be able to register URLs that have no port and no ACLs" ignore withServiceRegistry() { registry =>
registry
.register("fooservice")
.invoke(ServiceRegistryService.of(URI.create("tcp://localhost"), Collections.emptyList[ServiceAcl]))
.toCompletableFuture
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
val registeredUrl = registry
.lookup("fooservice", Optional.empty())
.invoke(NotUsed)
.toCompletableFuture
.get(testTimeoutInSeconds, TimeUnit.SECONDS)
registeredUrl should ===(URI.create("tcp://localhost"))
}
def withServiceRegistry[T](
registeredServices: Map[String, ServiceRegistryService] = Map.empty
)(body: ServiceRegistry => T): T = {
withServiceRegistryActor(registeredServices) { actor =>
body(new ServiceRegistryImpl(actor));
}
}
def withServiceRegistryActor[T](
registeredServices: Map[String, ServiceRegistryService] = Map.empty
)(body: ActorRef => T): T = {
val system = ActorSystem()
try {
val actor = system.actorOf(Props(new ServiceRegistryActor(new UnmanagedServices(registeredServices))))
body(actor)
} finally {
system.terminate()
}
}
}
}
|
ignasi35/lagom
|
dev/service-registry/service-locator/src/test/scala/com/lightbend/lagom/registry/impl/ServiceRegistryImplSpec.scala
|
Scala
|
apache-2.0
| 7,758
|
/*
* Copyright (c) 2016. Fengguo (Hugo) Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.cit.intellij.jawa.hierarchy
import com.intellij.ide.hierarchy.call.CallHierarchyNodeDescriptor
import com.intellij.ide.hierarchy.{HierarchyNodeDescriptor, HierarchyTreeStructure}
import com.intellij.openapi.project.Project
import com.intellij.psi.search.searches.OverridingMethodsSearch
import com.intellij.psi.{PsiElement, PsiMethod, _}
import com.intellij.util.ArrayUtil
import org.argus.cit.intellij.jawa.lang.psi.api.base.JawaReference
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
*/
class JawaCalleeMethodsTreeStructure(project: Project, method: PsiMethod, myScopeType: String)
extends HierarchyTreeStructure(project, new CallHierarchyNodeDescriptor(project, null, method, true, false)) {
protected final def buildChildren(descriptor: HierarchyNodeDescriptor): Array[AnyRef] = {
val enclosingElement: PsiMember = descriptor.asInstanceOf[CallHierarchyNodeDescriptor].getEnclosingElement
val method: PsiMethod = enclosingElement match {
case method: PsiMethod => method
case _ => return ArrayUtil.EMPTY_OBJECT_ARRAY
}
val methods: ArrayBuffer[PsiMethod] = new ArrayBuffer[PsiMethod]
val body = method.getBody
JawaCalleeMethodsTreeStructure.visitor(body, methods)
val baseMethod: PsiMethod = getBaseDescriptor.asInstanceOf[CallHierarchyNodeDescriptor].getTargetElement.asInstanceOf[PsiMethod]
val baseClass: PsiClass = baseMethod.getContainingClass
val methodToDescriptorMap: mutable.HashMap[PsiMethod, CallHierarchyNodeDescriptor] =
new mutable.HashMap[PsiMethod, CallHierarchyNodeDescriptor]
val result: ArrayBuffer[CallHierarchyNodeDescriptor] = new ArrayBuffer[CallHierarchyNodeDescriptor]
for (calledMethod <- methods if isInScope(baseClass, calledMethod, myScopeType)) {
methodToDescriptorMap.get(calledMethod) match {
case Some(d) => d.incrementUsageCount()
case _ =>
val d = new CallHierarchyNodeDescriptor(myProject, descriptor, calledMethod, false, false)
methodToDescriptorMap.put(calledMethod, d)
result += d
}
}
val overridingMethods: Array[PsiMethod] =
OverridingMethodsSearch.search(method, method.getUseScope, true).toArray(PsiMethod.EMPTY_ARRAY)
for (overridingMethod <- overridingMethods if isInScope(baseClass, overridingMethod, myScopeType)) {
val node: CallHierarchyNodeDescriptor = new CallHierarchyNodeDescriptor(myProject, descriptor, overridingMethod, false, false)
if (!result.contains(node)) result += node
}
result.toArray
}
}
object JawaCalleeMethodsTreeStructure {
private[hierarchy] def visitor(element: PsiElement, methods: ArrayBuffer[PsiMethod]): Unit = {
if (element == null) return
element match {
case ref: JawaReference =>
val resolve = ref.resolve()
resolve match {
case meth: PsiMethod => methods += meth
case _ =>
}
case callExpression: PsiMethodCallExpression =>
val methodExpression: PsiReferenceExpression = callExpression.getMethodExpression
val method: PsiMethod = methodExpression.resolve.asInstanceOf[PsiMethod]
if (method != null) {
methods += method
}
case newExpression: PsiNewExpression =>
val method: PsiMethod = newExpression.resolveConstructor
if (method != null) {
methods += method
}
case _ =>
}
for (child <- element.getChildren) {
visitor(child, methods)
}
}
}
|
arguslab/argus-cit-intellij
|
src/main/scala/org/argus/cit/intellij/jawa/hierarchy/JawaCalleeMethodsTreeStructure.scala
|
Scala
|
epl-1.0
| 3,954
|
package pregnaware.utils
import com.typesafe.scalalogging.StrictLogging
import spray.http.{HttpMethods, HttpMethod, HttpHeaders, SomeOrigins}
import spray.routing._
/** Wraps a Route in the appropriate CORS header to allow cross-origin resource sharing */
object CorsWrapper extends Directives with StrictLogging {
def allowOrigins(allowedOrigins : Set[String]): Directive0 = mapInnerRoute { innerRoute =>
val originHeader = optionalHeaderValueByType[HttpHeaders.Origin](())
originHeader { originOption =>
val origin = originOption match {
case Some(l) =>
val validHost = l.originList.find(x => allowedOrigins.contains(x.host.host))
validHost match {
case Some(host) => logger.info(s"Allowing cross-origin request from host: $host (mapped to $validHost)")
case None => logger.info(s"Blocking cross-origin request from host: ${l.originList}")
}
validHost
case None =>
None
}
origin match {
case Some(o) =>
val accessControlAllowHeader =
HttpHeaders.`Access-Control-Allow-Headers`("Origin", "X-Requested-With", "Content-Type", "Accept")
val accessControlAllowMethods =
HttpHeaders.`Access-Control-Allow-Methods`(
HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE)
val accessControlAllowOrigin =
HttpHeaders.`Access-Control-Allow-Origin`(SomeOrigins(Seq(o)))
respondWithHeaders(accessControlAllowHeader, accessControlAllowOrigin, accessControlAllowMethods) {
options { complete { "" } } ~ innerRoute
}
case None => innerRoute
}
}
}
}
|
jds106/pregnaware
|
service/src/main/scala/pregnaware/utils/CorsWrapper.scala
|
Scala
|
mit
| 1,723
|
import dotty.tools.dotc.ast.Trees.Thicket
import dotty.tools.dotc.ast.tpd._
object Labels {
def main(args: Array[String]): Unit = {
var i = 10
while(i>0) {
var j = 0
while(j<i) {
println(j +" " + i)
j = j + 1
}
i = i - 1}
pattern(1)
pattern(2)
pattern(3)
}
def pattern(a: Int) = a match {
case 1 if (a>0) => println("one")
case t@2 => println("two" + t)
case _ => println("default")
}
def flatten(trees: Tree): Int = {
trees match {
case Thicket(elems) =>
while (trees ne trees) {
}
case tree =>
33
}
55
}
}
|
yusuke2255/dotty
|
tests/pos/Labels.scala
|
Scala
|
bsd-3-clause
| 598
|
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package operator
package user
import java.lang.{ Iterable => JIterable }
import java.util.{ List => JList }
import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.reflect.ClassTag
import org.objectweb.asm.Type
import org.objectweb.asm.signature.SignatureVisitor
import com.asakusafw.lang.compiler.analyzer.util.GroupOperatorUtil
import com.asakusafw.lang.compiler.model.graph.{ OperatorInput, UserOperator }
import com.asakusafw.runtime.core.Result
import com.asakusafw.runtime.flow.{ ArrayListBuffer, FileMapListBuffer, ListBuffer }
import com.asakusafw.runtime.core.GroupView
import com.asakusafw.runtime.model.DataModel
import com.asakusafw.spark.compiler.spi.{ OperatorCompiler, OperatorType }
import com.asakusafw.spark.runtime.fragment.user._
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm.MethodBuilder._
import com.asakusafw.spark.tools.asm4s._
import com.asakusafw.vocabulary.attribute.BufferType
import com.asakusafw.vocabulary.operator.{ CoGroup, GroupSort }
class CoGroupOperatorCompiler extends UserOperatorCompiler {
override def support(
operator: UserOperator)(
implicit context: OperatorCompiler.Context): Boolean = {
(operator.annotationDesc.resolveClass == classOf[CoGroup]
|| operator.annotationDesc.resolveClass == classOf[GroupSort])
}
override def operatorType: OperatorType = OperatorType.CoGroupType
override def compile(
operator: UserOperator)(
implicit context: OperatorCompiler.Context): Type = {
assert(support(operator),
s"The operator type is not supported: ${operator.annotationDesc.resolveClass.getSimpleName}"
+ s" [${operator}]")
assert(operator.inputs.size > 0,
s"The size of inputs should be greater than 0: ${operator.inputs.size} [${operator}]")
assert(
operator.methodDesc.parameterClasses
.zip(operator.inputs.takeWhile(_.getInputUnit != OperatorInput.InputUnit.WHOLE)
.map(_ => classOf[JList[_]])
++: operator.inputs.dropWhile(_.getInputUnit != OperatorInput.InputUnit.WHOLE)
.collect {
case input: OperatorInput if input.getInputUnit == OperatorInput.InputUnit.WHOLE =>
classOf[GroupView[_]]
}
++: operator.outputs.map(_ => classOf[Result[_]])
++: operator.arguments.map(_.resolveClass))
.forall {
case (method, model) => method.isAssignableFrom(model)
},
s"The operator method parameter types are not compatible: (${
operator.methodDesc.parameterClasses.map(_.getName).mkString("(", ",", ")")
}, ${
(operator.inputs.takeWhile { input =>
input.getInputUnit != OperatorInput.InputUnit.WHOLE
}.map(_ => classOf[JList[_]])
++: operator.inputs.dropWhile { input =>
input.getInputUnit != OperatorInput.InputUnit.WHOLE
}.collect {
case input: OperatorInput if input.getInputUnit == OperatorInput.InputUnit.WHOLE =>
classOf[GroupView[_]]
}
++: operator.outputs.map(_ => classOf[Result[_]])
++: operator.arguments.map(_.resolveClass)).map(_.getName).mkString("(", ",", ")")
}) [${operator}]")
val builder = new CoGroupOperatorFragmentClassBuilder(operator)
context.addClass(builder)
}
}
private class CoGroupOperatorFragmentClassBuilder(
operator: UserOperator)(
implicit context: OperatorCompiler.Context)
extends UserOperatorFragmentClassBuilder(
classOf[IndexedSeq[Iterator[_]]].asType,
operator.implementationClass.asType,
operator.inputs,
operator.outputs)(
None,
classOf[CoGroupOperatorFragment].asType) {
override def defCtor()(implicit mb: MethodBuilder): Unit = {
val thisVar :: _ :: fragmentVars = mb.argVars
thisVar.push().invokeInit(
superType,
buildIndexedSeq { builder =>
for {
input <- operator.inputs
} {
val bufferType = GroupOperatorUtil.getBufferType(input)
builder += pushNew0(
bufferType match {
case BufferType.HEAP =>
ListLikeBufferClassBuilder.getOrCompile(input.dataModelType, spill = false)
case BufferType.SPILL =>
ListLikeBufferClassBuilder.getOrCompile(input.dataModelType, spill = true)
case BufferType.VOLATILE =>
classOf[IterableBuffer[_]].asType
})
}
},
buildIndexedSeq { builder =>
fragmentVars.foreach {
builder += _.push()
}
})
}
override def defMethods(methodDef: MethodDef): Unit = {
super.defMethods(methodDef)
methodDef.newMethod(
"cogroup",
Seq(
classOf[IndexedSeq[JIterable[_ <: DataModel[_]]]].asType,
classOf[IndexedSeq[Result[_]]].asType),
new MethodSignatureBuilder()
.newParameterType {
_.newClassType(classOf[IndexedSeq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF) {
_.newClassType(classOf[JIterable[_]].asType) {
_.newTypeArgument(SignatureVisitor.EXTENDS) {
_.newClassType(classOf[DataModel[_]].asType) {
_.newTypeArgument()
}
}
}
}
}
}
.newParameterType {
_.newClassType(classOf[IndexedSeq[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF) {
_.newClassType(classOf[Result[_]].asType) {
_.newTypeArgument()
}
}
}
}
.newVoidReturnType()) { implicit mb =>
val thisVar :: buffersVar :: outputsVar :: _ = mb.argVars
getOperatorField()
.invokeV(
operator.methodDesc.getName,
(operator.inputs.takeWhile(_.getInputUnit != OperatorInput.InputUnit.WHOLE)
.zipWithIndex.map {
case (_, i) => applySeq(buffersVar.push(), ldc(i))
}
++ operator.inputs.dropWhile(_.getInputUnit != OperatorInput.InputUnit.WHOLE)
.collect {
case input: OperatorInput if input.getInputUnit == OperatorInput.InputUnit.WHOLE =>
getViewField(input)
}
++ (0 until operator.outputs.size).map { i =>
applySeq(outputsVar.push(), ldc(i))
}
++ operator.arguments.map { argument =>
Option(argument.value).map { value =>
ldc(value)(ClassTag(argument.resolveClass), implicitly)
}.getOrElse {
pushNull(argument.resolveClass.asType)
}
}).zip(operator.methodDesc.asType.getArgumentTypes()).map {
case (s, t) => s.asType(t)
}: _*)
`return`()
}
}
}
private class ListLikeBufferClassBuilder(
dataModelType: Type,
spill: Boolean)(
implicit context: CompilerContext)
extends ClassBuilder(
Type.getType(
s"L${GeneratedClassPackageInternalName}/${context.flowId}/fragment/ListLikeBuffer$$${ListLikeBufferClassBuilder.nextId};"), // scalastyle:ignore
new ClassSignatureBuilder()
.newSuperclass {
_.newClassType(classOf[ListLikeBuffer[_]].asType) {
_.newTypeArgument(SignatureVisitor.INSTANCEOF, dataModelType)
}
},
classOf[ListLikeBuffer[_]].asType) {
override def defConstructors(ctorDef: ConstructorDef): Unit = {
ctorDef.newInit(Seq.empty) { implicit mb =>
val thisVar :: _ = mb.argVars
thisVar.push().invokeInit(
superType,
pushNew0(
if (spill) {
classOf[FileMapListBuffer[_]].asType
} else {
classOf[ArrayListBuffer[_]].asType
}).asType(classOf[ListBuffer[_]].asType))
}
}
override def defMethods(methodDef: MethodDef): Unit = {
super.defMethods(methodDef)
methodDef.newMethod("newDataModel", classOf[DataModel[_]].asType, Seq.empty) { implicit mb =>
val thisVar :: _ = mb.argVars
`return`(thisVar.push().invokeV("newDataModel", dataModelType))
}
methodDef.newMethod("newDataModel", dataModelType, Seq.empty) { implicit mb =>
`return`(pushNew0(dataModelType))
}
}
}
private object ListLikeBufferClassBuilder {
private[this] val curIds: mutable.Map[CompilerContext, AtomicLong] =
mutable.WeakHashMap.empty
def nextId(implicit context: CompilerContext): Long =
curIds.getOrElseUpdate(context, new AtomicLong(0L)).getAndIncrement()
private[this] val cache: mutable.Map[CompilerContext, mutable.Map[(Type, Boolean), Type]] =
mutable.WeakHashMap.empty
def getOrCompile(
dataModelType: Type, spill: Boolean)(
implicit context: CompilerContext): Type = {
cache.getOrElseUpdate(context, mutable.Map.empty)
.getOrElseUpdate(
(dataModelType, spill),
context.addClass(new ListLikeBufferClassBuilder(dataModelType, spill)))
}
}
|
asakusafw/asakusafw-spark
|
compiler/src/main/scala/com/asakusafw/spark/compiler/operator/user/CoGroupOperatorCompiler.scala
|
Scala
|
apache-2.0
| 9,740
|
import scala.reflect.macros.blackbox.Context
import language.experimental.macros
trait Tree
case object SomeTree extends Tree
object NewQuasiquotes {
implicit class QuasiquoteInterpolation(c: StringContext) {
object nq {
def unapply(t: Tree) = macro QuasiquoteMacros.unapplyImpl
}
}
}
|
nativelibs4java/Scalaxy
|
Experiments/Quasiquotes/src/main/scala/NewQuasiquotes.scala
|
Scala
|
bsd-3-clause
| 305
|
package io.finch.benchmarks.service
package argonaut
class ArgonautBenchmark extends UserServiceBenchmark(() => userService)
|
travisbrown/finch
|
benchmarks/src/main/scala/io/finch/benchmarks/service/argonaut/benchmark.scala
|
Scala
|
apache-2.0
| 126
|
package misc
import org.scalatest.{Matchers, FlatSpec}
class EightQueensSpec extends FlatSpec with Matchers{
it should "check Cols" in {
val eightQueens = new EightQueens(5)
eightQueens.checkCol(4,2) should be (true)
eightQueens.modifyQueenPosition(3,2,true)
eightQueens.checkCol(4,2) should be (false)
}
it should "check Cols Init " in {
val eightQueens = new EightQueens(5)
eightQueens.checkCol(4, 2) should be(true)
}
it should "check fwd diagonals" in {
val eightQueens = new EightQueens(5)
eightQueens.checkFwdDiagonal(4,2) should be (true)
eightQueens.modifyQueenPosition(2,0,true)
eightQueens.checkFwdDiagonal(4,2) should be (false)
}
it should "check backward diagonals" in {
val eightQueens = new EightQueens(5)
eightQueens.checkBwdDiagonal(4,2) should be (true)
eightQueens.modifyQueenPosition(2,4,true)
eightQueens.checkBwdDiagonal(4,2) should be (false)
}
it should "place the queens" in {
val eightQueens = new EightQueens(5)
eightQueens.placeQueenOnBoard(0)
}
}
|
sajit/skalad
|
scala/src/test/scala/misc/EightQueensSpec.scala
|
Scala
|
apache-2.0
| 1,072
|
package io.skysail.server.app
import akka.actor.ActorSystem
import akka.pattern.ask
import akka.util.Timeout
import io.skysail.api.ui.Client
import io.skysail.domain.app.Application
import io.skysail.server.actors.ApplicationsActor
import scala.concurrent.Future
import scala.concurrent.duration.DurationInt
class ApplicationService() {
implicit val timeout: Timeout = 3.seconds
var clients = List[Client]()
def getAllApplications(system: ActorSystem): Future[List[Application]] = {
val appsActor = BackendApplication.getApplicationsActor(system)
(appsActor ? ApplicationsActor.GetAllApplications()).mapTo[List[Application]]
}
}
|
evandor/skysail-server
|
skysail.server/src/io/skysail/server/app/ApplicationService.scala
|
Scala
|
apache-2.0
| 653
|
package scala.meta
package internal
package prettyprinters
import scala.meta.internal.{ast => impl}
import scala.meta.dialects.`package`.Scala211
private[meta] object toString {
def apply(tree: Tree) = {
val prettyprinter = TreeSyntax[Tree](Scala211)
val code = prettyprinter(tree).toString
tree match {
case _: impl.Quasi => code
case impl.Ctor.Primary(_, name, _) => s"def this$code"
case _ => code
}
}
}
|
beni55/scalameta
|
scalameta/trees/src/main/scala/scala/meta/internal/prettyprinters/ToString.scala
|
Scala
|
bsd-3-clause
| 447
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.utils._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import org.apache.kafka.common.requests._
import org.junit.Assert._
import org.junit.Test
import collection.JavaConverters._
class StopReplicaRequestTest extends BaseRequestTest {
override val logDirCount = 2
override val numBrokers: Int = 1
val topic = "topic"
val partitionNum = 2
val tp0 = new TopicPartition(topic, 0)
val tp1 = new TopicPartition(topic, 1)
@Test
def testStopReplicaRequest(): Unit = {
createTopic(topic, partitionNum, 1)
TestUtils.generateAndProduceMessages(servers, topic, 10)
val server = servers.head
val offlineDir = server.logManager.getLog(tp1).get.dir.getParent
server.replicaManager.handleLogDirFailure(offlineDir, sendZkNotification = false)
for (i <- 1 to 2) {
val request1 = new StopReplicaRequest.Builder(1,
server.config.brokerId, server.replicaManager.controllerEpoch, server.kafkaController.brokerEpoch,
true, Set(tp0, tp1).asJava).build()
val response1 = connectAndSend(request1, ApiKeys.STOP_REPLICA, controllerSocketServer)
val partitionErrors1 = StopReplicaResponse.parse(response1, request1.version).responses()
assertEquals(Errors.NONE, partitionErrors1.get(tp0))
assertEquals(Errors.KAFKA_STORAGE_ERROR, partitionErrors1.get(tp1))
}
}
}
|
gf53520/kafka
|
core/src/test/scala/unit/kafka/server/StopReplicaRequestTest.scala
|
Scala
|
apache-2.0
| 2,244
|
package experiments
import org.junit.runner._
import org.specs2.mutable._
import org.specs2.runner._
import play.api.libs.iteratee._
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
@RunWith(classOf[JUnitRunner])
class IterateeSpec extends Specification {
"Enumerator" should {
"be used this way" in {
Await.result(
Enumerator(true, true, false, true, true) &>
Enumeratee.dropWhile(_ == true) &>
Enumeratee.take(1) |>>>
Iteratee.getChunks, 10.seconds
) mustEqual List(false)
}
}
}
|
lizepeng/app.io
|
test/experiments/IterateeSpec.scala
|
Scala
|
apache-2.0
| 621
|
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.util
private[recorder] trait Labelled { def label: String }
|
gatling/gatling
|
gatling-recorder/src/main/scala/io/gatling/recorder/util/Labelled.scala
|
Scala
|
apache-2.0
| 708
|
package rml.args.run
import scala.util.Try
import com.typesafe.scalalogging.{LazyLogging => Logging}
import rml.args.arg.FuncArg
import rml.args.config.ConfigAdjuster
import rml.args.config.FullConfig
import rml.args.exceptions.FunctionNotFoundException
import rml.args.register.FunctionRegister
import scala.util.Failure
object FunctionRunner extends Logging {
import logger._
/**
* Try to find a function that matches the FunctionArguments and run it
*/
def run(fullConfig: FullConfig): Try[Any] = {
debug("fullConfig: {}", fullConfig)
val leadingTokens = fullConfig.funcName
debug("leadingTokens: {}", leadingTokens.mkString(" "))
val functionName = FunctionRegister.findLongestMatching(leadingTokens) match {
case Some(k) => k
case None => return Failure(new FunctionNotFoundException(leadingTokens))
}
debug("functionName: {}", functionName.mkString(" "))
val function: FuncArg[_] = FunctionRegister.get(functionName)
debug("function: {}", function)
val adjustedConfig: FullConfig = ConfigAdjuster(fullConfig, function, functionName)
debug("adjustedConfig: {}", adjustedConfig)
val result = function(adjustedConfig)
trace("result: {}", result.toString)
result
}
}
|
rml/scala_args
|
src/main/scala/rml/args/run/FunctionRunner.scala
|
Scala
|
gpl-3.0
| 1,304
|
package bad.robot.temperature.rrd
import java.io.File
import org.rrd4j.DsType.GAUGE
import org.rrd4j.core.{DsDef, RrdDb, RrdDef}
import org.specs2.mutable.Specification
import org.specs2.specification.BeforeEach
import scala.Double._
import scala.util.{Failure, Success, Try}
import bad.robot.temperature.Files._
class RrdDbOpsTest extends Specification with BeforeEach {
sequential
def before = DataSources.updated = Set[String]()
val frequency = Seconds(30)
"Rrd file with no values" >> {
val file = File.createTempFile("test", ".rrd")
createRrdFile(createDefinition(file, "example"))
val database = new RrdDb(file)
database.hasValuesFor("example") must_== false
}
"Attempting to check values for unknown datasource" >> {
val file = File.createTempFile("test", ".rrd")
createRrdFile(createDefinition(file, "example"))
val database = new RrdDb(file)
database.hasValuesFor("doesnt_exist") must throwA(new IllegalArgumentException("Unknown datasource name: doesnt_exist"))
}
"A failed update doesn't have values" >> {
val file = File.createTempFile("test", ".rrd")
createRrdFile(createDefinition(file, "example"))
Try {
update(file, Seconds(5), 1.0, 2.0)
} match {
case Success(_) => ko("Should have got 'IllegalArgumentException: Invalid number of values specified (found 2, only 1 allowed'")
case Failure(_) => {
val database = new RrdDb(file)
database.hasValuesFor("example").aka("the 'hasValuesFor' result for the 'example' datasource") must_== false
}
}
}
"Partially populated archive passes the 'hasValuesFor' check" >> {
val file = File.createTempFile("test", ".rrd")
createRrdFile(createDefinition(file, "example"))
update(file, Seconds( 5), 1.0)
update(file, Seconds(35), 2.0)
update(file, Seconds(65), NaN)
val database = new RrdDb(file)
database.hasValuesFor("example").aka("the 'hasValuesFor' result for the 'example' datasource") must_== true
}
"Partially populated, multiple datasource archive passes the 'hasValuesFor' check" >> {
val file = File.createTempFile("test", ".rrd")
createRrdFile(createDefinition(file, "example-1", "example-2"))
update(file, Seconds( 5), 1.0, NaN)
update(file, Seconds(35), NaN, 2.0)
update(file, Seconds(60), NaN, NaN)
val database = new RrdDb(file)
database.hasValuesFor("example-1").aka("the 'hasValuesFor' result for the 'example-1' datasource") must_== true
database.hasValuesFor("example-2").aka("the 'hasValuesFor' result for the 'example-2' datasource") must_== true
}
"More complex example of partially populated, multiple datasource archive" >> {
val file = File.createTempFile("test", ".rrd")
createRrdFile(createDefinition(file, "example-1", "example-2", "example-3", "example-4"))
update(file, Seconds( 5), 1.0, NaN, NaN, NaN)
update(file, Seconds(35), NaN, NaN, 2.0, NaN)
update(file, Seconds(65), NaN, NaN, NaN, NaN)
update(file, Seconds(95), NaN, NaN, NaN, NaN)
val database = new RrdDb(file)
database.hasValuesFor("example-1").aka("the 'hasValuesFor' result for the 'example-1' datasource") must_== true
database.hasValuesFor("example-2").aka("the 'hasValuesFor' result for the 'example-2' datasource") must_== false // never set
database.hasValuesFor("example-3").aka("the 'hasValuesFor' result for the 'example-3' datasource") must_== true
database.hasValuesFor("example-4").aka("the 'hasValuesFor' result for the 'example-4' datasource") must_== false // never set
}
def createDefinition(file: File, datasources: String*): RrdDef = {
val definition = new RrdDef(file, Seconds(0), frequency)
datasources.foreach { name =>
definition.addDatasource(new DsDef(name, GAUGE, frequency, NaN, NaN))
}
definition.addArchive(Archive(aDay, frequency, frequency))
definition
}
private def createRrdFile(definition: RrdDef) {
val database = new RrdDb(definition)
database.close()
}
private def update(file: File, time: Seconds, values: Double*) = {
val database = new RrdDb(file)
database.createSample().setValues(database, time, values:_*)
database.close()
}
}
|
tobyweston/temperature-machine
|
src/test/scala/bad/robot/temperature/rrd/RrdDbOpsTest.scala
|
Scala
|
apache-2.0
| 4,225
|
package com.example
import com.twitter.algebird._
case class Example(
x: Int,
y: Long)
object Foo {
def main(args: Array[String]) = {
println("Hello from Foo")
val m = (0 until 1000).map(x => x % 17 -> x).toMap
val r = MapAlgebra.sumByKey(m).toSeq.sorted
r.foreach{ case (x, y) =>
println(s"Entry: $x -> $y")
}
}
}
|
ianoc/ExampleScalaProject
|
example-core/src/main/scala/com/example/Example.scala
|
Scala
|
apache-2.0
| 357
|
package quizleague.web.names
trait ComponentNames {
val typeName:String
}
|
gumdrop/quizleague-maintain
|
js/src/main/scala/quizleague/web/names/ComponentNames.scala
|
Scala
|
mit
| 76
|
package scalanlp.classify
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import scalala.tensor.mutable.Counter
/**
*
* @author dlwh
*/
@RunWith(classOf[JUnitRunner])
class NaiveBayesTrainerTest extends ClassifierTrainerTestHarness {
def trainer[L,T]:Classifier.Trainer[L,Counter[T,Double]] = new NaiveBayes.Trainer[L,T]();
}
|
MLnick/scalanlp-core
|
learn/src/test/scala/scalanlp/classify/NaiveBayesTrainerTest.scala
|
Scala
|
apache-2.0
| 356
|
package com.twitter.scalding.bdd
import cascading.flow.FlowDef
import com.twitter.scalding._
import com.twitter.scalding.source.TypedText
import scala.collection.mutable.Buffer
import TDsl._
trait TBddDsl extends FieldConversions with TypedPipeOperationsConversions {
def Given[TypeIn](source: TypedTestSource[TypeIn]): TestCaseGiven1[TypeIn] = new TestCaseGiven1[TypeIn](source)
def GivenSources(sources: List[TypedTestSource[_]]): TestCaseGivenList = new TestCaseGivenList(sources)
abstract class TypedTestSource[T] {
def data: Iterable[T]
def asSource: Source =
IterableSource(data map { Tuple1(_) }, 'tuple)
def readFromSourceAsTyped(implicit flowDef: FlowDef, mode: Mode): TypedPipe[T] =
asSource.read.toTypedPipe[Tuple1[T]]('tuple) map { _._1 }
def addSourceDataToJobTest(jobTest: JobTest) = jobTest.source(asSource, data)
}
class SimpleTypedTestSource[T](val data: Iterable[T]) extends TypedTestSource[T] {
def addSourceToJob(jobTest: JobTest, source: Source): JobTest =
jobTest.source[T](source, data)
}
implicit def fromSimpleTypeToTypedSource[T](data: Iterable[T]) =
new SimpleTypedTestSource(data)
case class TestCaseGiven1[TypeIn](source: TypedTestSource[TypeIn]) {
def And[TypeIn2](other: TypedTestSource[TypeIn2]) = TestCaseGiven2[TypeIn, TypeIn2](source, other)
def When[TypeOut: Manifest: TupleConverter: TupleSetter](op: OneTypedPipeOperation[TypeIn, TypeOut]): TestCaseWhen[TypeOut] = TestCaseWhen(List(source), op)
}
case class TestCaseGiven2[TypeIn1, TypeIn2](source: TypedTestSource[TypeIn1], other: TypedTestSource[TypeIn2]) {
def And[TypeIn3](third: TypedTestSource[TypeIn3]) = TestCaseGiven3(source, other, third)
def When[TypeOut: Manifest: TupleConverter: TupleSetter](op: TwoTypedPipesOperation[TypeIn1, TypeIn2, TypeOut]): TestCaseWhen[TypeOut] = TestCaseWhen(List(source, other), op)
}
case class TestCaseGiven3[TypeIn1, TypeIn2, TypeIn3](source: TypedTestSource[TypeIn1], other: TypedTestSource[TypeIn2], third: TypedTestSource[TypeIn3]) {
def And(next: TypedTestSource[_]) = TestCaseGivenList(List(source, other, third, next))
def When[TypeOut: Manifest: TupleConverter: TupleSetter](op: ThreeTypedPipesOperation[TypeIn1, TypeIn2, TypeIn3, TypeOut]): TestCaseWhen[TypeOut] = TestCaseWhen(List(source, other, third), op)
}
case class TestCaseGivenList(sources: List[TypedTestSource[_]]) {
def And(next: TypedTestSource[_]) = TestCaseGivenList((next :: sources.reverse).reverse)
def When[TypeOut: Manifest](op: ListOfTypedPipesOperations[TypeOut]): TestCaseWhen[TypeOut] = TestCaseWhen(sources, op)
}
case class TestCaseWhen[OutputType: Manifest](sources: List[TypedTestSource[_]], operation: TypedPipeOperation[OutputType]) {
def Then(assertion: Buffer[OutputType] => Unit): Unit = {
CompleteTestCase(sources, operation, assertion).run()
}
}
case class CompleteTestCase[OutputType: Manifest](sources: List[TypedTestSource[_]], operation: TypedPipeOperation[OutputType], assertion: Buffer[OutputType] => Unit) {
class DummyJob(args: Args) extends Job(args) {
val inputPipes: List[TypedPipe[_]] = sources.map(testSource => testSource.readFromSourceAsTyped)
val outputPipe = operation(inputPipes)
implicit val td: TypeDescriptor[OutputType] = new TypeDescriptor[OutputType] {
def converter = TupleConverter.singleConverter
def setter = TupleSetter.singleSetter
def fields = new cascading.tuple.Fields("item")
}
outputPipe.write(TypedText.tsv[OutputType]("output"))
}
def run(): Unit = {
val jobTest = JobTest(new DummyJob(_))
// Add Sources
sources foreach { _.addSourceDataToJobTest(jobTest) }
implicit val td: TypeDescriptor[OutputType] = new TypeDescriptor[OutputType] {
def converter = TupleConverter.singleConverter
def setter = TupleSetter.singleSetter
def fields = new cascading.tuple.Fields("item")
}
// Add Sink
jobTest.sink[OutputType](TypedText.tsv[OutputType]("output")) {
buffer: Buffer[OutputType] => assertion(buffer)
}
// Execute
jobTest.run.finish
}
}
}
|
tglstory/scalding
|
scalding-core/src/main/scala/com/twitter/scalding/bdd/TBddDsl.scala
|
Scala
|
apache-2.0
| 4,215
|
package recommender
import breeze.collection.mutable.SparseArray
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.rdd.RDD
abstract class ClusteredKnnRecommender(vectorsRDD: RDD[UserVector], numberOfClusters: Int, numberOfNeighbors: Int, distanceMetric: DistanceMetric = CosineDistance) extends RecommenderWithUserVectorRepresentation(vectorsRDD) with Serializable {
//def this(numberOfClusters: Int, numberOfNeighbors: Int, distanceMetric: DistanceMetric = CosineDistance) = this(UserSparseVector.convertRatingsFromHolderToUserSparseVectors(mainHolder.getDataHolder()), numberOfClusters, numberOfNeighbors, distanceMetric)
//def this(numberOfClusters: Int, numberOfNeighbors: Int, distanceMetric: DistanceMetric = CosineDistance) = this(UserSparseVector.classify(mainHolder.getDataHolder()), numberOfClusters, numberOfNeighbors, distanceMetric)
//Get centroids and clusters
val (centroids, arrayOfClustersRDDs) = clustering()
//One k-nn recommender per cluster
val KnnRecommenders = arrayOfClustersRDDs.map(v => new KnnRecommender(v, numberOfNeighbors, distanceMetric))
//"Recommender" for choosing nearest centroids
val centroidsRDD = startSpark.sc.parallelize(centroids)//.persist()
val nearestCentroidsRecommender = new KnnRecommender(centroidsRDD, numberOfClusters, distanceMetric)
/**
* Groups vectors into clusters
* @return Sequence of RDDs. Each RDD contains UserVectors corresponding to one cluster
**/
protected def clustering(): (Array[UserVector], Seq[RDD[UserVector]])
override def recommend(userVector: UserVector, numberOfRecommendedProducts: Int) = {
val nearestCentroids = nearestCentroidsRecommender.getNearestNeighbors(userVector)
val nearestCentroidsIDs = nearestCentroids.map(v => v.getUserID())
println(arrayOfClustersRDDs(0).count.toString+":"+arrayOfClustersRDDs(1).count.toString+":"+arrayOfClustersRDDs(2).count.toString+":"+arrayOfClustersRDDs(3).count.toString)
println()
//TODO iterate more nearest clusters
val nearestRecommender = KnnRecommenders(nearestCentroidsIDs(0))
nearestRecommender.recommend(userVector, numberOfRecommendedProducts)
}
}
/**
* ClusteredKnnRecommender using k-Means algorithm from MLlib for clustering
* @param vectorsRDD Vectors representing a set of users. Ratings of users are taken from the Recommender's dataHolder if this field is not specified.
* @param numberOfClusters Number of clusters
* @param numberOfKMeansIterations Number of k-Means algorithm
* @param numberOfNeighbors Number of considered neighbors by the k-nn algorithm
* @param distanceMetric Metric which determines similarity between users in k-NN
*/
class KMeansClusteredKnnRecommender(vectorsRDD: RDD[UserVector], numberOfClusters: Int, numberOfKMeansIterations: Int, numberOfNeighbors: Int, distanceMetric: DistanceMetric = CosineDistance) extends ClusteredKnnRecommender(vectorsRDD, numberOfClusters, numberOfNeighbors, distanceMetric) with Serializable {
def this(numberOfClusters: Int=2, numberOfKMeansIterations: Int=20, numberOfNeighbors: Int=10, distanceMetric: DistanceMetric = CosineDistance) = this(UserSparseVector.convertRatingsFromHolderToUserSparseVectors(mainHolder.getDataHolder()), numberOfClusters, numberOfKMeansIterations, numberOfNeighbors, distanceMetric)
override def clustering(): (Array[UserVector], Seq[RDD[UserVector]]) = {
KMeansClustering.clustering(vectorsRDD, numberOfClusters, numberOfKMeansIterations)
}
override def recommendFromUserID(userID: Int, numberOfRecommendedProducts: Int = 10): Seq[(Int,String,Double)] = {
val userVector = (vectorsPairRDD.lookup(userID)(0).toList)(0)
println()
println("*******************************************************************************************************")
println( userVector)
println()
recommend(userVector, numberOfRecommendedProducts)
}
}
/**
* Clustering by MLlib's k-Means
*/
object KMeansClustering {
def clustering(vectorsRDD: RDD[UserVector], numberOfClusters: Int, numberOfKMeansIterations: Int): (Array[UserVector], Seq[RDD[UserVector]]) = {
val mllibVectorsRDD = vectorsRDD.map(v => v.toMLlibVector()).persist()
val model = KMeans.train(mllibVectorsRDD,numberOfClusters,numberOfKMeansIterations)
val predictedClusters = model.predict(mllibVectorsRDD)
val vectorsWithClusterNumbersRDD = predictedClusters.zip(vectorsRDD)
val centroidsMLlibVectors = model.clusterCenters
val centroidsUserVector: Array[UserVector] = centroidsMLlibVectors.zipWithIndex.map { case (v, i) =>
val data = v.toArray
val sorted = data.zipWithIndex.toList.sortBy(rat=>rat._2)
val index = sorted.map(rat => rat._2).toSeq.toArray
val values = sorted.map(rat => rat._1).toSeq.toArray
val sparseArray = new SparseArray[Double](index, values, index.length, index.length, 0)
new UserSparseVector(i, sparseArray)
}
val arrayOfClustersRDDsBeforeRepartition = (0 until numberOfClusters).map(i => vectorsWithClusterNumbersRDD.filter { case (j, cluster) => (i == j)}.map { case (j, vec) => vec})
val numberOfVectorsInOriginalRDD = vectorsRDD.count()
val numberOfOriginalPartitions = vectorsRDD.partitions.length
for (i <- 1 to 100){
//println(numberOfOriginalPartitions)
}
val seqOfClustersRDDs = arrayOfClustersRDDsBeforeRepartition.map(rdd => {
val ratioBetweenVectorsInThisClustersAndAllClusters = rdd.count().toDouble / numberOfVectorsInOriginalRDD
val numberOfPartitions = (numberOfOriginalPartitions * ratioBetweenVectorsInThisClustersAndAllClusters).ceil.toInt
rdd.repartition(numberOfPartitions).persist()
})
(centroidsUserVector, seqOfClustersRDDs)
}
}
|
litaoran/spray-sample
|
src/main/scala/recommender/ClusterRecommender.scala
|
Scala
|
mit
| 5,722
|
package scales.component
import scala.scalajs.js
import scala.scalajs.js.UndefOr
import org.scalajs.dom.Element
import scales.el.ExpressionContext
class ComponentContext[A <: Element](
val element: Element,
val parent: Option[ExpressionContext]) extends ExpressionContext {
require(element != null, "Missing argument 'element'.")
require(parent != null, "Missing argument 'parent'.")
def get[T](name: String): Option[T] = {
val value: UndefOr[Any] = element.asInstanceOf[js.Dynamic].selectDynamic(name)
value.toOption map {
case f: js.Function => f.bind(element).asInstanceOf[T]
case v => v.asInstanceOf[T]
}
}
}
|
nightscape/scales
|
core/src/main/scala/scales/component/ComponentContext.scala
|
Scala
|
apache-2.0
| 654
|
package com.crackcell.jiezi.dict.loader.io
import java.io.{ByteArrayInputStream, InputStream}
import org.apache.spark.sql.SparkSession
/**
* 将HDFS路径转换成流
*
* @author Menglong TAN
*/
class HdfsToStream extends ToStream[String] {
private lazy val spark = SparkSession.builder().getOrCreate()
override def toStream(path: String): InputStream = {
val data = spark.read.textFile(path).collect().filter(_.nonEmpty).mkString("\\n")
new ByteArrayInputStream(data.getBytes())
}
}
|
crackcell/jiezi
|
spark/src/main/scala/com/crackcell/jiezi/dict/loader/io/HdfsToStream.scala
|
Scala
|
apache-2.0
| 510
|
package scala.internal
/** A type for skolems that are generated during capture conversion. Capture conversion
* narrows the type of a tree whose type has wildcard arguments. A typical situation
* is a tree `t` of type `C[_ >: L <: U]` and an expected type `C[X]` where `X` is an
* instantiatable type variable. To be able to instantiate `X`, we cast the tree to type
* `X[$n.CAP]` where `$n` is a fresh skolem type with underlying type `TypeBox[L, U]`.
*/
final abstract class TypeBox[-L <: U, +U] {
type CAP >: L <: U
}
|
som-snytt/dotty
|
library/src/scala/internal/TypeBox.scala
|
Scala
|
apache-2.0
| 534
|
trait Foo[T]
object Foo:
given [T] as Foo[Tuple1[T]]
given [T, U] as Foo[(T, U)]
given [T, U, V] as Foo[(T, U, V)]
|
som-snytt/dotty
|
tests/pos/i6938.scala
|
Scala
|
apache-2.0
| 120
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.zookeeper
import java.util
import java.util.Locale
import java.util.concurrent.locks.{ReentrantLock, ReentrantReadWriteLock}
import java.util.concurrent._
import com.yammer.metrics.core.{Gauge, MetricName}
import kafka.metrics.KafkaMetricsGroup
import kafka.utils.CoreUtils.{inLock, inReadLock, inWriteLock}
import kafka.utils.{KafkaScheduler, Logging}
import org.apache.kafka.common.utils.Time
import org.apache.zookeeper.AsyncCallback._
import org.apache.zookeeper.KeeperException.Code
import org.apache.zookeeper.Watcher.Event.{EventType, KeeperState}
import org.apache.zookeeper.ZooKeeper.States
import org.apache.zookeeper.data.{ACL, Stat}
import org.apache.zookeeper._
import scala.collection.JavaConverters._
import scala.collection.mutable.Set
/**
* A ZooKeeper client that encourages pipelined requests.
*
* @param connectString comma separated host:port pairs, each corresponding to a zk server
* @param sessionTimeoutMs session timeout in milliseconds
* @param connectionTimeoutMs connection timeout in milliseconds
* @param maxInFlightRequests maximum number of unacknowledged requests the client will send before blocking.
* @param name name of the client instance
*/
class ZooKeeperClient(connectString: String,
sessionTimeoutMs: Int,
connectionTimeoutMs: Int,
maxInFlightRequests: Int,
time: Time,
metricGroup: String,
metricType: String,
name: Option[String]) extends Logging with KafkaMetricsGroup {
def this(connectString: String,
sessionTimeoutMs: Int,
connectionTimeoutMs: Int,
maxInFlightRequests: Int,
time: Time,
metricGroup: String,
metricType: String) = {
this(connectString, sessionTimeoutMs, connectionTimeoutMs, maxInFlightRequests, time, metricGroup, metricType, None)
}
this.logIdent = name match {
case Some(n) => s"[ZooKeeperClient $n] "
case _ => "[ZooKeeperClient] "
}
private val initializationLock = new ReentrantReadWriteLock()
private val isConnectedOrExpiredLock = new ReentrantLock()
private val isConnectedOrExpiredCondition = isConnectedOrExpiredLock.newCondition()
private val zNodeChangeHandlers = new ConcurrentHashMap[String, ZNodeChangeHandler]().asScala
private val zNodeChildChangeHandlers = new ConcurrentHashMap[String, ZNodeChildChangeHandler]().asScala
private val inFlightRequests = new Semaphore(maxInFlightRequests)
private val stateChangeHandlers = new ConcurrentHashMap[String, StateChangeHandler]().asScala
private[zookeeper] val expiryScheduler = new KafkaScheduler(threads = 1, "zk-session-expiry-handler")
private val metricNames = Set[String]()
// The state map has to be created before creating ZooKeeper since it's needed in the ZooKeeper callback.
private val stateToMeterMap = {
import KeeperState._
val stateToEventTypeMap = Map(
Disconnected -> "Disconnects",
SyncConnected -> "SyncConnects",
AuthFailed -> "AuthFailures",
ConnectedReadOnly -> "ReadOnlyConnects",
SaslAuthenticated -> "SaslAuthentications",
Expired -> "Expires"
)
stateToEventTypeMap.map { case (state, eventType) =>
val name = s"ZooKeeper${eventType}PerSec"
metricNames += name
state -> newMeter(name, eventType.toLowerCase(Locale.ROOT), TimeUnit.SECONDS)
}
}
info(s"Initializing a new session to $connectString.")
// Fail-fast if there's an error during construction (so don't call initialize, which retries forever)
@volatile private var zooKeeper = new ZooKeeper(connectString, sessionTimeoutMs, ZooKeeperClientWatcher)
newGauge("SessionState", new Gauge[String] {
override def value: String = Option(connectionState.toString).getOrElse("DISCONNECTED")
})
metricNames += "SessionState"
expiryScheduler.startup()
try waitUntilConnected(connectionTimeoutMs, TimeUnit.MILLISECONDS)
catch {
case e: Throwable =>
close()
throw e
}
override def metricName(name: String, metricTags: scala.collection.Map[String, String]): MetricName = {
explicitMetricName(metricGroup, metricType, name, metricTags)
}
/**
* Return the state of the ZooKeeper connection.
*/
def connectionState: States = zooKeeper.getState
/**
* Send a request and wait for its response. See handle(Seq[AsyncRequest]) for details.
*
* @param request a single request to send and wait on.
* @return an instance of the response with the specific type (e.g. CreateRequest -> CreateResponse).
*/
def handleRequest[Req <: AsyncRequest](request: Req): Req#Response = {
handleRequests(Seq(request)).head
}
/**
* Send a pipelined sequence of requests and wait for all of their responses.
*
* The watch flag on each outgoing request will be set if we've already registered a handler for the
* path associated with the request.
*
* @param requests a sequence of requests to send and wait on.
* @return the responses for the requests. If all requests have the same type, the responses will have the respective
* response type (e.g. Seq[CreateRequest] -> Seq[CreateResponse]). Otherwise, the most specific common supertype
* will be used (e.g. Seq[AsyncRequest] -> Seq[AsyncResponse]).
*/
def handleRequests[Req <: AsyncRequest](requests: Seq[Req]): Seq[Req#Response] = {
if (requests.isEmpty)
Seq.empty
else {
val countDownLatch = new CountDownLatch(requests.size)
val responseQueue = new ArrayBlockingQueue[Req#Response](requests.size)
requests.foreach { request =>
inFlightRequests.acquire()
try {
inReadLock(initializationLock) {
send(request) { response =>
responseQueue.add(response)
inFlightRequests.release()
countDownLatch.countDown()
}
}
} catch {
case e: Throwable =>
inFlightRequests.release()
throw e
}
}
countDownLatch.await()
responseQueue.asScala.toBuffer
}
}
// Visibility to override for testing
private[zookeeper] def send[Req <: AsyncRequest](request: Req)(processResponse: Req#Response => Unit): Unit = {
// Safe to cast as we always create a response of the right type
def callback(response: AsyncResponse): Unit = processResponse(response.asInstanceOf[Req#Response])
def responseMetadata(sendTimeMs: Long) = new ResponseMetadata(sendTimeMs, receivedTimeMs = time.hiResClockMs())
val sendTimeMs = time.hiResClockMs()
request match {
case ExistsRequest(path, ctx) =>
zooKeeper.exists(path, shouldWatch(request), new StatCallback {
override def processResult(rc: Int, path: String, ctx: Any, stat: Stat): Unit =
callback(ExistsResponse(Code.get(rc), path, Option(ctx), stat, responseMetadata(sendTimeMs)))
}, ctx.orNull)
case GetDataRequest(path, ctx) =>
zooKeeper.getData(path, shouldWatch(request), new DataCallback {
override def processResult(rc: Int, path: String, ctx: Any, data: Array[Byte], stat: Stat): Unit =
callback(GetDataResponse(Code.get(rc), path, Option(ctx), data, stat, responseMetadata(sendTimeMs)))
}, ctx.orNull)
case GetChildrenRequest(path, ctx) =>
zooKeeper.getChildren(path, shouldWatch(request), new Children2Callback {
override def processResult(rc: Int, path: String, ctx: Any, children: java.util.List[String], stat: Stat): Unit =
callback(GetChildrenResponse(Code.get(rc), path, Option(ctx),
Option(children).map(_.asScala).getOrElse(Seq.empty), stat, responseMetadata(sendTimeMs)))
}, ctx.orNull)
case CreateRequest(path, data, acl, createMode, ctx) =>
zooKeeper.create(path, data, acl.asJava, createMode, new StringCallback {
override def processResult(rc: Int, path: String, ctx: Any, name: String): Unit =
callback(CreateResponse(Code.get(rc), path, Option(ctx), name, responseMetadata(sendTimeMs)))
}, ctx.orNull)
case SetDataRequest(path, data, version, ctx) =>
zooKeeper.setData(path, data, version, new StatCallback {
override def processResult(rc: Int, path: String, ctx: Any, stat: Stat): Unit =
callback(SetDataResponse(Code.get(rc), path, Option(ctx), stat, responseMetadata(sendTimeMs)))
}, ctx.orNull)
case DeleteRequest(path, version, ctx) =>
zooKeeper.delete(path, version, new VoidCallback {
override def processResult(rc: Int, path: String, ctx: Any): Unit =
callback(DeleteResponse(Code.get(rc), path, Option(ctx), responseMetadata(sendTimeMs)))
}, ctx.orNull)
case GetAclRequest(path, ctx) =>
zooKeeper.getACL(path, null, new ACLCallback {
override def processResult(rc: Int, path: String, ctx: Any, acl: java.util.List[ACL], stat: Stat): Unit = {
callback(GetAclResponse(Code.get(rc), path, Option(ctx), Option(acl).map(_.asScala).getOrElse(Seq.empty),
stat, responseMetadata(sendTimeMs)))
}}, ctx.orNull)
case SetAclRequest(path, acl, version, ctx) =>
zooKeeper.setACL(path, acl.asJava, version, new StatCallback {
override def processResult(rc: Int, path: String, ctx: Any, stat: Stat): Unit =
callback(SetAclResponse(Code.get(rc), path, Option(ctx), stat, responseMetadata(sendTimeMs)))
}, ctx.orNull)
case MultiRequest(zkOps, ctx) =>
zooKeeper.multi(zkOps.map(_.toZookeeperOp).asJava, new MultiCallback {
override def processResult(rc: Int, path: String, ctx: Any, opResults: util.List[OpResult]): Unit = {
callback(MultiResponse(Code.get(rc), path, Option(ctx),
if (opResults == null)
null
else
zkOps.zip(opResults.asScala) map { case (zkOp, result) => ZkOpResult(zkOp, result) },
responseMetadata(sendTimeMs)))
}
}, ctx.orNull)
}
}
/**
* Wait indefinitely until the underlying zookeeper client to reaches the CONNECTED state.
* @throws ZooKeeperClientAuthFailedException if the authentication failed either before or while waiting for connection.
* @throws ZooKeeperClientExpiredException if the session expired either before or while waiting for connection.
*/
def waitUntilConnected(): Unit = inLock(isConnectedOrExpiredLock) {
waitUntilConnected(Long.MaxValue, TimeUnit.MILLISECONDS)
}
private def waitUntilConnected(timeout: Long, timeUnit: TimeUnit): Unit = {
info("Waiting until connected.")
var nanos = timeUnit.toNanos(timeout)
inLock(isConnectedOrExpiredLock) {
var state = connectionState
while (!state.isConnected && state.isAlive) {
if (nanos <= 0) {
throw new ZooKeeperClientTimeoutException(s"Timed out waiting for connection while in state: $state")
}
nanos = isConnectedOrExpiredCondition.awaitNanos(nanos)
state = connectionState
}
if (state == States.AUTH_FAILED) {
throw new ZooKeeperClientAuthFailedException("Auth failed either before or while waiting for connection")
} else if (state == States.CLOSED) {
throw new ZooKeeperClientExpiredException("Session expired either before or while waiting for connection")
}
}
info("Connected.")
}
// If this method is changed, the documentation for registerZNodeChangeHandler and/or registerZNodeChildChangeHandler
// may need to be updated.
private def shouldWatch(request: AsyncRequest): Boolean = request match {
case _: GetChildrenRequest => zNodeChildChangeHandlers.contains(request.path)
case _: ExistsRequest | _: GetDataRequest => zNodeChangeHandlers.contains(request.path)
case _ => throw new IllegalArgumentException(s"Request $request is not watchable")
}
/**
* Register the handler to ZooKeeperClient. This is just a local operation. This does not actually register a watcher.
*
* The watcher is only registered once the user calls handle(AsyncRequest) or handle(Seq[AsyncRequest])
* with either a GetDataRequest or ExistsRequest.
*
* NOTE: zookeeper only allows registration to a nonexistent znode with ExistsRequest.
*
* @param zNodeChangeHandler the handler to register
*/
def registerZNodeChangeHandler(zNodeChangeHandler: ZNodeChangeHandler): Unit = {
zNodeChangeHandlers.put(zNodeChangeHandler.path, zNodeChangeHandler)
}
/**
* Unregister the handler from ZooKeeperClient. This is just a local operation.
* @param path the path of the handler to unregister
*/
def unregisterZNodeChangeHandler(path: String): Unit = {
zNodeChangeHandlers.remove(path)
}
/**
* Register the handler to ZooKeeperClient. This is just a local operation. This does not actually register a watcher.
*
* The watcher is only registered once the user calls handle(AsyncRequest) or handle(Seq[AsyncRequest]) with a GetChildrenRequest.
*
* @param zNodeChildChangeHandler the handler to register
*/
def registerZNodeChildChangeHandler(zNodeChildChangeHandler: ZNodeChildChangeHandler): Unit = {
zNodeChildChangeHandlers.put(zNodeChildChangeHandler.path, zNodeChildChangeHandler)
}
/**
* Unregister the handler from ZooKeeperClient. This is just a local operation.
* @param path the path of the handler to unregister
*/
def unregisterZNodeChildChangeHandler(path: String): Unit = {
zNodeChildChangeHandlers.remove(path)
}
/**
* @param stateChangeHandler
*/
def registerStateChangeHandler(stateChangeHandler: StateChangeHandler): Unit = inReadLock(initializationLock) {
if (stateChangeHandler != null)
stateChangeHandlers.put(stateChangeHandler.name, stateChangeHandler)
}
/**
*
* @param name
*/
def unregisterStateChangeHandler(name: String): Unit = inReadLock(initializationLock) {
stateChangeHandlers.remove(name)
}
def close(): Unit = {
info("Closing.")
// Shutdown scheduler outside of lock to avoid deadlock if scheduler
// is waiting for lock to process session expiry. Close expiry thread
// first to ensure that new clients are not created during close().
expiryScheduler.shutdown()
inWriteLock(initializationLock) {
zNodeChangeHandlers.clear()
zNodeChildChangeHandlers.clear()
stateChangeHandlers.clear()
zooKeeper.close()
metricNames.foreach(removeMetric(_))
}
info("Closed.")
}
def sessionId: Long = inReadLock(initializationLock) {
zooKeeper.getSessionId
}
// Only for testing
private[kafka] def currentZooKeeper: ZooKeeper = inReadLock(initializationLock) {
zooKeeper
}
private def reinitialize(): Unit = {
// Initialization callbacks are invoked outside of the lock to avoid deadlock potential since their completion
// may require additional Zookeeper requests, which will block to acquire the initialization lock
stateChangeHandlers.values.foreach(callBeforeInitializingSession _)
inWriteLock(initializationLock) {
if (!connectionState.isAlive) {
zooKeeper.close()
info(s"Initializing a new session to $connectString.")
// retry forever until ZooKeeper can be instantiated
var connected = false
while (!connected) {
try {
zooKeeper = new ZooKeeper(connectString, sessionTimeoutMs, ZooKeeperClientWatcher)
connected = true
} catch {
case e: Exception =>
info("Error when recreating ZooKeeper, retrying after a short sleep", e)
Thread.sleep(1000)
}
}
}
}
stateChangeHandlers.values.foreach(callAfterInitializingSession _)
}
/**
* Close the zookeeper client to force session reinitialization. This is visible for testing only.
*/
private[zookeeper] def forceReinitialize(): Unit = {
zooKeeper.close()
reinitialize()
}
private def callBeforeInitializingSession(handler: StateChangeHandler): Unit = {
try {
handler.beforeInitializingSession()
} catch {
case t: Throwable =>
error(s"Uncaught error in handler ${handler.name}", t)
}
}
private def callAfterInitializingSession(handler: StateChangeHandler): Unit = {
try {
handler.afterInitializingSession()
} catch {
case t: Throwable =>
error(s"Uncaught error in handler ${handler.name}", t)
}
}
// Visibility for testing
private[zookeeper] def scheduleSessionExpiryHandler(): Unit = {
expiryScheduler.scheduleOnce("zk-session-expired", () => {
info("Session expired.")
reinitialize()
})
}
// package level visibility for testing only
private[zookeeper] object ZooKeeperClientWatcher extends Watcher {
override def process(event: WatchedEvent): Unit = {
debug(s"Received event: $event")
Option(event.getPath) match {
case None =>
val state = event.getState
stateToMeterMap.get(state).foreach(_.mark())
inLock(isConnectedOrExpiredLock) {
isConnectedOrExpiredCondition.signalAll()
}
if (state == KeeperState.AuthFailed) {
error("Auth failed.")
stateChangeHandlers.values.foreach(_.onAuthFailure())
} else if (state == KeeperState.Expired) {
scheduleSessionExpiryHandler()
}
case Some(path) =>
(event.getType: @unchecked) match {
case EventType.NodeChildrenChanged => zNodeChildChangeHandlers.get(path).foreach(_.handleChildChange())
case EventType.NodeCreated => zNodeChangeHandlers.get(path).foreach(_.handleCreation())
case EventType.NodeDeleted => zNodeChangeHandlers.get(path).foreach(_.handleDeletion())
case EventType.NodeDataChanged => zNodeChangeHandlers.get(path).foreach(_.handleDataChange())
}
}
}
}
}
trait StateChangeHandler {
val name: String
def beforeInitializingSession(): Unit = {}
def afterInitializingSession(): Unit = {}
def onAuthFailure(): Unit = {}
}
trait ZNodeChangeHandler {
val path: String
def handleCreation(): Unit = {}
def handleDeletion(): Unit = {}
def handleDataChange(): Unit = {}
}
trait ZNodeChildChangeHandler {
val path: String
def handleChildChange(): Unit = {}
}
// Thin wrapper for zookeeper.Op
sealed trait ZkOp {
def toZookeeperOp: Op
}
case class CreateOp(path: String, data: Array[Byte], acl: Seq[ACL], createMode: CreateMode) extends ZkOp {
override def toZookeeperOp: Op = Op.create(path, data, acl.asJava, createMode)
}
case class DeleteOp(path: String, version: Int) extends ZkOp {
override def toZookeeperOp: Op = Op.delete(path, version)
}
case class SetDataOp(path: String, data: Array[Byte], version: Int) extends ZkOp {
override def toZookeeperOp: Op = Op.setData(path, data, version)
}
case class CheckOp(path: String, version: Int) extends ZkOp {
override def toZookeeperOp: Op = Op.check(path, version)
}
case class ZkOpResult(zkOp: ZkOp, rawOpResult: OpResult)
sealed trait AsyncRequest {
/**
* This type member allows us to define methods that take requests and return responses with the correct types.
* See ``ZooKeeperClient.handleRequests`` for example.
*/
type Response <: AsyncResponse
def path: String
def ctx: Option[Any]
}
case class CreateRequest(path: String, data: Array[Byte], acl: Seq[ACL], createMode: CreateMode,
ctx: Option[Any] = None) extends AsyncRequest {
type Response = CreateResponse
}
case class DeleteRequest(path: String, version: Int, ctx: Option[Any] = None) extends AsyncRequest {
type Response = DeleteResponse
}
case class ExistsRequest(path: String, ctx: Option[Any] = None) extends AsyncRequest {
type Response = ExistsResponse
}
case class GetDataRequest(path: String, ctx: Option[Any] = None) extends AsyncRequest {
type Response = GetDataResponse
}
case class SetDataRequest(path: String, data: Array[Byte], version: Int, ctx: Option[Any] = None) extends AsyncRequest {
type Response = SetDataResponse
}
case class GetAclRequest(path: String, ctx: Option[Any] = None) extends AsyncRequest {
type Response = GetAclResponse
}
case class SetAclRequest(path: String, acl: Seq[ACL], version: Int, ctx: Option[Any] = None) extends AsyncRequest {
type Response = SetAclResponse
}
case class GetChildrenRequest(path: String, ctx: Option[Any] = None) extends AsyncRequest {
type Response = GetChildrenResponse
}
case class MultiRequest(zkOps: Seq[ZkOp], ctx: Option[Any] = None) extends AsyncRequest {
type Response = MultiResponse
override def path: String = null
}
sealed abstract class AsyncResponse {
def resultCode: Code
def path: String
def ctx: Option[Any]
/** Return None if the result code is OK and KeeperException otherwise. */
def resultException: Option[KeeperException] =
if (resultCode == Code.OK) None else Some(KeeperException.create(resultCode, path))
/**
* Throw KeeperException if the result code is not OK.
*/
def maybeThrow(): Unit = {
if (resultCode != Code.OK)
throw KeeperException.create(resultCode, path)
}
def metadata: ResponseMetadata
}
case class ResponseMetadata(sendTimeMs: Long, receivedTimeMs: Long) {
def responseTimeMs: Long = receivedTimeMs - sendTimeMs
}
case class CreateResponse(resultCode: Code, path: String, ctx: Option[Any], name: String,
metadata: ResponseMetadata) extends AsyncResponse
case class DeleteResponse(resultCode: Code, path: String, ctx: Option[Any],
metadata: ResponseMetadata) extends AsyncResponse
case class ExistsResponse(resultCode: Code, path: String, ctx: Option[Any], stat: Stat,
metadata: ResponseMetadata) extends AsyncResponse
case class GetDataResponse(resultCode: Code, path: String, ctx: Option[Any], data: Array[Byte], stat: Stat,
metadata: ResponseMetadata) extends AsyncResponse
case class SetDataResponse(resultCode: Code, path: String, ctx: Option[Any], stat: Stat,
metadata: ResponseMetadata) extends AsyncResponse
case class GetAclResponse(resultCode: Code, path: String, ctx: Option[Any], acl: Seq[ACL], stat: Stat,
metadata: ResponseMetadata) extends AsyncResponse
case class SetAclResponse(resultCode: Code, path: String, ctx: Option[Any], stat: Stat,
metadata: ResponseMetadata) extends AsyncResponse
case class GetChildrenResponse(resultCode: Code, path: String, ctx: Option[Any], children: Seq[String], stat: Stat,
metadata: ResponseMetadata) extends AsyncResponse
case class MultiResponse(resultCode: Code, path: String, ctx: Option[Any], zkOpResults: Seq[ZkOpResult],
metadata: ResponseMetadata) extends AsyncResponse
class ZooKeeperClientException(message: String) extends RuntimeException(message)
class ZooKeeperClientExpiredException(message: String) extends ZooKeeperClientException(message)
class ZooKeeperClientAuthFailedException(message: String) extends ZooKeeperClientException(message)
class ZooKeeperClientTimeoutException(message: String) extends ZooKeeperClientException(message)
|
KevinLiLu/kafka
|
core/src/main/scala/kafka/zookeeper/ZooKeeperClient.scala
|
Scala
|
apache-2.0
| 24,229
|
package org.jetbrains.plugins.scala.lang.typeInference
import org.jetbrains.plugins.scala.{LatestScalaVersions, ScalaVersion}
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
class Scala3ExtensionsTest extends ScalaLightCodeInsightFixtureTestAdapter {
override def supportedIn(version: ScalaVersion): Boolean =
version >= LatestScalaVersions.Scala_3_0
override def setUp(): Unit = {
super.setUp()
ScalaProjectSettings.getInstance(getProject).setCompilerHighlightingScala3(false)
}
def testSimpleExtension(): Unit = checkTextHasNoErrors(
"""
|object A {
| case class Circle(x: Double, y: Double, radius: Double)
|
| extension (c: Circle)
| def circumference: Double = c.radius * math.Pi * 2
|
| val c: Circle = ???
| c.circumference
|}
|""".stripMargin
)
def testSimpleDesugaredInvocation(): Unit = checkTextHasNoErrors(
"""
|object A {
| case class Circle(x: Double, y: Double, radius: Double)
|
| extension (c: Circle)
| def circumference: Double = c.radius * math.Pi * 2
|
| val c: Circle = ???
| circumference(c)
|}
|""".stripMargin
)
//@TODO: right-associative?
def testOperators(): Unit = checkTextHasNoErrors(
"""
|object A {
| extension (x: String)
| def < (y: String): Boolean = true
|
| "123" < "4235"
|}
|""".stripMargin
)
def testGenericExtension(): Unit = checkTextHasNoErrors(
"""
|object A {
| extension [T](xs: List[T])
| def second: T = ???
|
| val xs: List[Int] = ???
| val x: Int = xs.second
|}
|""".stripMargin
)
def testCollectiveExtension(): Unit = checkTextHasNoErrors(
"""
|object A {
| extension (ss: Seq[String])
| def longestStrings: Seq[String] = ???
| def longestString: String = ???
|
| val xs: Seq[String] = ???
| val longest: Seq[String] = xs.longestStrings
| val singleLongest: String = xs.longestString
|}
|""".stripMargin
)
def testTwoTypeArgumentSectionsOnInvocation(): Unit = checkTextHasNoErrors(
"""
|object A {
| extension [A](x: Int) { def method[B](y: Int) = () }
| method[Int](1)[Long](2)
|}
|""".stripMargin
)
def testPriorityOfVisibleExtensionOverVisibleConversion(): Unit = checkTextHasNoErrors(
"""
|object A {
| extension (x: Int) { def foo: Int = 123 }
| implicit class IntOps(val x: Int) { def foo: Int = 123 }
|
| 123.foo
|}
|""".stripMargin
)
def testExtensionFromGivenInLexicalScope(): Unit = checkTextHasNoErrors(
"""
|object A {
| trait F
| given F with {
| extension (x: Int) { def foo: Int = 123 }
| }
|
| 123.foo
|}
|""".stripMargin
)
def testExtensionFromImplicitScope(): Unit = checkTextHasNoErrors(
"""
|trait List[T]
|object List {
| extension [T, U](xs: List[T])(using t: Ordering[U])
| def foo(t: U): Int = ???
|}
|
|object A {
| given Ordering[String] = ???
| val xs: List[Int] = ???
| val y: Int = xs.foo("123")
|}
|""".stripMargin
)
def testExtensionFromGivenInImplicitScope(): Unit = checkTextHasNoErrors(
"""
|trait List[T]
|object List {
| given Ordering[List[Int]] with {
| def compare(xs: List[Int], ys: List[Int]): Int = 1
|
| extension [T, U](xs: List[T])(using t: Ordering[U])
| def foo(t: U): U = ???
| }
|}
|
|object A {
| trait F
| given Ordering[F] = ???
| val xs: List[Int] = ???
| val f: F = ???
| val y: F = xs.foo(f)
|}
|""".stripMargin
)
//actually compiles, but probably should not
def testAmbiguousExtensionAndConversion(): Unit = checkHasErrorAroundCaret(
s"""
|object A {
| trait F
| given F with {
| extension (x: Int) { def foo: Int = 123 }
| }
|
| implicit class IntOps(val x: Int) { def foo: Int = 123 }
| 123.fo${CARET}o
|}
|""".stripMargin
)
def testAmbiguousExtensionAndConversion2(): Unit = checkHasErrorAroundCaret(
s"""
|object A {
| trait F
| given F with {
| extension (x: Int) { def foo: Int = 123 }
| }
|
| class IntOps(val x: Int) { def foo: Int = 123 }
|
| given Conversion[Int, IntOps] = new IntOps(_)
|
| 123.fo${CARET}o
|}
|""".stripMargin
)
def testAmbiguousExtensionAndConversionImplicitScope(): Unit = checkHasErrorAroundCaret(
s"""
|trait List[T]
|object List {
| extension [T](xs: List[T])
| def foo(u: String): Int = ???
|
| implicit class ListOps[T](xs: List[T]) {
| def foo(t: String): Int = 123
| }
|}
|
|object A {
| val xs: List[Int] = ???
| xs.fo${CARET}o("123")
|}
|""".stripMargin
)
//
// def testAmbiguousExtensionsWithExpectedType(): Unit = {
// checkTextHasNoErrors(
// """
// |object B:
// | trait F
// | given F with {
// | extension (x: Int) { def foo: Int = 123 }
// | }
// |
// | trait G
// | given G with {
// | extension (x: Int) { def foo: String = "123" }
// | }
// |
// | val s: Int = 123.foo
// |""".stripMargin
// )
// }
//
// def testAmbiguousImplicitClassesWithExpectedType(): Unit = {
// checkTextHasNoErrors(
// """
// |object B {
// | implicit class IntOps1(val x: Int) { def fooz: Int = 123 }
// | implicit class IntOps2(val x: Int) { def fooz: String = "123" }
// |
// | val s: String = 123.fooz
// |}
// |""".stripMargin
// )
// }
def testAmbiguousExtensionWithExpectedTypeAndTypeArgs(): Unit = checkHasErrorAroundCaret(
s"""
|object B {
| trait F
| given F with {
| extension (x: Int) { def foo[X]: X = ??? }
| }
|
| trait G
| given G with {
| extension (x: Int) { def foo[Y]: String = "123" }
| }
|
| val s: Int = 123.f${CARET}oo[Int]
|}""".stripMargin
)
def testAmbiguousExtensionWithExpectedTypeAndArgs(): Unit = checkHasErrorAroundCaret(
s"""
|object B {
| trait F
| given F with {
| extension (x: Int) { def foo(i: Int): Int = ??? }
| }
|
| trait G
| given G with {
| extension (x: Int) { def foo(i: Int): String = "123" }
| }
|
| val s: Int = 123.fo${CARET}o(1)
|}""".stripMargin
)
def testResolveFromInsideExtension(): Unit = checkTextHasNoErrors(
"""
|object A {
| extension (s: String)
| def position(ch: Char, n: Int): Int =
| if n < s.length && s(n) != ch then position(ch, n + 1)
| else n
|
| extension [T](x: T)
| def f: Int = g
| def g: Int = 123
|}
|""".stripMargin
)
def testExtensionFromContextBound(): Unit = checkTextHasNoErrors(
"""
|object A {
| trait Functor[F[_]] {
| extension [A, B](fa: F[A]) def map(f: A => B): F[B]
| }
|
| def foo[F[_]: Functor](fi: F[Int], toS: Int => String): F[String] = fi.map(toS)
|}
|""".stripMargin
)
def testExtensionFromTypeClassInstance(): Unit = checkTextHasNoErrors(
"""
|trait Ord[A] {
| extension (xs: A) def foo: Int = 123
|}
|
|trait List[T]
|object List {
| implicit def ordList(implicit ord: Ord[Int]): Ord[List[Int]] = new Ord[List[Int]] {}
|}
|
|object A {
| implicit val ordInt: Ord[Int] = new Ord[Int] {}
|
| val xs: List[Int] = new List[Int] {}
| println(xs.foo)
|}
|""".stripMargin
)
def testExtensionFromTypeClassInstanceNeg(): Unit = checkHasErrorAroundCaret(
s"""
|trait Ord[A] {
| extension (xs: A) def foo: Int = 123
|}
|
|trait List[T]
|object List {
| implicit def ordList(implicit ord: Ord[Int]): Ord[List[Int]] = new Ord[List[Int]] {}
|}
|
|object A {
| val xs: List[Int] = new List[Int] {}
| println(xs.f${CARET}oo)
|}
|""".stripMargin
)
def testExtensionFromGiven(): Unit = checkTextHasNoErrors(
"""
|object A {
| trait Monad[F[_]] {
| extension[A,B](fa: F[A])
| def flatMap(f: A => F[B]):F[B]
| }
|
| given optionMonad: Monad[Option] with
| def pure[A](a: A) = Some(a)
| extension[A,B](fa: Option[A])
| def flatMap(f: A => Option[B]) = {
| fa match {
| case Some(a) =>
| f(a)
| case None =>
| None
| }
| }
|
| Option(123).flatMap(x => Option(x + 1))
|}
|""".stripMargin
)
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/lang/typeInference/Scala3ExtensionsTest.scala
|
Scala
|
apache-2.0
| 9,346
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testing.adapter
import java.io.{File, IOException}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file._
import java.nio.file.attribute.BasicFileAttributes
import sbt.testing.{Framework, TaskDef}
import org.scalajs.jsenv.{Input, UnsupportedInputException}
import org.scalajs.jsenv.JSUtils.escapeJS
import org.scalajs.testing.common._
/** Template for the HTML runner. */
object HTMLRunnerBuilder {
@deprecated("Use write instead", "1.2.0")
def writeToFile(output: File, title: String, input: Seq[Input],
frameworkImplClassNames: List[List[String]],
taskDefs: List[TaskDef]): Unit = {
val outputPath = output.toPath()
val artifactsDir =
Files.createTempDirectory(outputPath.getParent(), ".html-artifacts")
sys.addShutdownHook {
Files.walkFileTree(artifactsDir, new SimpleFileVisitor[Path] {
override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
Files.delete(file)
FileVisitResult.CONTINUE
}
override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = {
Files.delete(dir)
FileVisitResult.CONTINUE
}
})
}
write(outputPath, artifactsDir, title, input, frameworkImplClassNames, taskDefs)
}
def write(output: Path, artifactsDir: Path, title: String, input: Seq[Input],
frameworkImplClassNames: List[List[String]],
taskDefs: List[TaskDef]): Unit = {
val absoluteArtifacts = artifactsDir.toAbsolutePath()
val outputDir = output.toAbsolutePath().normalize().getParent()
try {
outputDir.relativize(absoluteArtifacts)
} catch {
case e: IllegalArgumentException =>
throw new IllegalArgumentException(
"cannot relativize `artifactsDir` with respect to `output`", e)
}
def artifactPath(name: String): (String, Path) = {
val path = absoluteArtifacts.resolve(name)
val relPath = outputDir.relativize(path)
(joinRelPath(relPath), path)
}
def scriptTag(index: Int, tpe: String, content: Path) = {
val src = {
try {
joinRelPath(outputDir.relativize(content))
} catch {
case _: IllegalArgumentException =>
// Cannot relativize this content.
val (src, target) = artifactPath(f"input$index-${content.getFileName()}")
Files.copy(content, target, StandardCopyOption.REPLACE_EXISTING)
src
}
}
s"""<script defer type="$tpe" src="${htmlEscaped(src)}"></script>"""
}
val loadJSTags = input.zipWithIndex.map {
case (Input.Script(script), i) => scriptTag(i, "text/javascript", script)
case (Input.ESModule(module), i) => scriptTag(i, "module", module)
case _ =>
throw new UnsupportedInputException(
s"Unsupported input for the generation of an HTML runner: $input")
}
val bridgeModeStr = {
val tests = new IsolatedTestSet(frameworkImplClassNames, taskDefs)
val mode = TestBridgeMode.HTMLRunner(tests)
Serializer.serialize[TestBridgeMode](mode)
}
val cssHref = {
val name = "test-runner.css"
val (href, target) = artifactPath(name)
val in = getClass.getResourceAsStream(name)
try Files.copy(in, target, StandardCopyOption.REPLACE_EXISTING)
finally in.close()
href
}
val htmlContent = s"""
<!DOCTYPE html>
<html>
<head>
<title>${htmlEscaped(title)}</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="stylesheet" type="text/css" href="${htmlEscaped(cssHref)}" />
<script type="text/javascript">
var __ScalaJSTestBridgeMode = "${escapeJS(bridgeModeStr)}";
</script>
${loadJSTags.mkString("\\n")}
</head>
<body></body>
</html>
"""
Files.write(output, java.util.Arrays.asList(htmlContent), UTF_8)
}
private def htmlEscaped(str: String): String = str.flatMap {
case '<' => "<"
case '>' => ">"
case '"' => """
case '&' => "&"
case c => c.toString()
}
/* Necessary on Windows to not have backslashes in URIs.
*
* <parts>.map(_.toString()).mkString("/")
*/
private def joinRelPath(p: Path): String = {
require(p.getRoot() == null)
val partsIter = p.iterator()
val result = new StringBuilder()
while (partsIter.hasNext()) {
result.append(partsIter.next())
if (partsIter.hasNext())
result.append('/')
}
result.toString()
}
}
|
scala-js/scala-js
|
test-adapter/src/main/scala/org/scalajs/testing/adapter/HTMLRunnerBuilder.scala
|
Scala
|
apache-2.0
| 4,886
|
package org.awong.beyond
import org.awong.AbstractWordSpec
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BeyondSpec extends AbstractWordSpec {
"This" should {
"load 1423 pairs" in {
BeyondData.rs1423.nPairs should be (1423)
}
}
"Lamport clock" should {
"Increment" in {
var clock = LamportClock()
clock.value should be (1)
clock = clock.increment
clock.value should be (2)
}
"Merge" in {
var thisClock = LamportClock(10)
val thatClock = LamportClock(9)
thisClock = thisClock.merge(thatClock)
thisClock.value should be (11)
thisClock = thisClock.merge(LamportClock(20))
thisClock.value should be (21)
}
}
def incrementNode[N](clock: VectorClock[N], node: N, times: Int): VectorClock[N] = {
var newClock = clock
(1 to times).foreach { _ =>
newClock = newClock.increment(node)
}
newClock
}
"Vector clock" should {
"Increment" in {
val nodes = 1::2::Nil
var clock = VectorClock(nodes)
clock.value(1) should be (Some(1))
clock = clock.increment(1)
clock.value(1) should be (Some(2))
}
"Merge" in {
val nodes = 1::2::Nil
var thisClock = VectorClock(1::2::Nil)
thisClock = thisClock.increment(1)
thisClock = incrementNode(thisClock, 2, 3)
thisClock.value(1) should be (Some(2))
thisClock.value(2) should be (Some(4))
var thatClock = VectorClock(2::3::Nil)
thatClock = incrementNode(thatClock, 2, 2)
thatClock = incrementNode(thatClock, 3, 2)
thatClock.value(2) should be (Some(3))
thatClock.value(3) should be (Some(3))
thisClock = thisClock.merge(thatClock)
thisClock.value(1) should be (Some(2))
thisClock.value(2) should be (Some(4))
thisClock.value(3) should be (Some(3))
thisClock = thisClock.merge(VectorClock(4::Nil))
thisClock.value(1) should be (Some(2))
thisClock.value(2) should be (Some(4))
thisClock.value(3) should be (Some(3))
thisClock.value(4) should be (Some(1))
}
}
}
|
alanktwong/algorithms-scala
|
beyond/src/test/scala/org/awong/beyond/BeyondSpec.scala
|
Scala
|
mit
| 2,004
|
package ml.dmlc.xgboost4j.scala.spark.mleap
import java.nio.file.Files
import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl.{Model, NodeShape, Value}
import ml.combust.bundle.op.OpModel
import ml.dmlc.xgboost4j.scala.spark.XGBoostRegressionModel
import ml.dmlc.xgboost4j.scala.{XGBoost => SXGBoost}
import org.apache.spark.ml.bundle._
import org.apache.spark.ml.linalg.Vector
import resource.managed
/**
* Created by hollinwilkins on 9/16/17.
*/
class XGBoostRegressionModelOp extends SimpleSparkOp[XGBoostRegressionModel] {
/** Type class for the underlying model.
*/
override val Model: OpModel[SparkBundleContext, XGBoostRegressionModel] = new OpModel[SparkBundleContext, XGBoostRegressionModel] {
override val klazz: Class[XGBoostRegressionModel] = classOf[XGBoostRegressionModel]
override def opName: String = "xgboost.regression"
override def store(model: Model, obj: XGBoostRegressionModel)
(implicit context: BundleContext[SparkBundleContext]): Model = {
assert(context.context.dataset.isDefined, BundleHelper.sampleDataframeMessage(klazz))
Files.write(context.file("xgboost.model"), obj._booster.toByteArray)
val numFeatures = context.context.dataset.get.select(obj.getFeaturesCol).first.getAs[Vector](0).size
model.withValue("num_features", Value.int(numFeatures)).
withValue("tree_limit", Value.int(obj.getOrDefault(obj.treeLimit))).
withValue("missing", Value.float(obj.getOrDefault(obj.missing))).
withValue("infer_batch_size", Value.int(obj.getOrDefault(obj.inferBatchSize))).
withValue("use_external_memory", Value.boolean(obj.getOrDefault(obj.useExternalMemory))).
withValue("allow_non_zero_for_missing", Value.boolean(obj.getOrDefault(obj.allowNonZeroForMissing)))
}
override def load(model: Model)
(implicit context: BundleContext[SparkBundleContext]): XGBoostRegressionModel = {
val booster = (for(in <- managed(Files.newInputStream(context.file("xgboost.model")))) yield {
SXGBoost.loadModel(in)
}).tried.get
val xgb = new XGBoostRegressionModel("", booster).
setTreeLimit(model.value("tree_limit").getInt)
model.getValue("missing").map(o => xgb.setMissing(o.getFloat))
model.getValue("allow_non_zero_for_missing").map(o => xgb.setAllowNonZeroForMissing(o.getBoolean))
model.getValue("infer_batch_size").map(o => xgb.setInferBatchSize(o.getInt))
model.getValue("use_external_memory").map(o => xgb.set(xgb.useExternalMemory, o.getBoolean))
xgb
}
}
override def sparkLoad(uid: String,
shape: NodeShape,
model: XGBoostRegressionModel): XGBoostRegressionModel = {
val xgb = new XGBoostRegressionModel(uid, model._booster)
if(model.isSet(model.missing)) xgb.setMissing(model.getOrDefault(model.missing))
if(model.isSet(model.allowNonZeroForMissing)) xgb.setAllowNonZeroForMissing(model.getOrDefault(model.allowNonZeroForMissing))
if(model.isSet(model.inferBatchSize)) xgb.setInferBatchSize(model.getOrDefault(model.inferBatchSize))
if(model.isSet(model.treeLimit)) xgb.setTreeLimit(model.getOrDefault(model.treeLimit))
if(model.isSet(model.useExternalMemory)) xgb.set(xgb.useExternalMemory, model.getOrDefault(model.useExternalMemory))
xgb
}
override def sparkInputs(obj: XGBoostRegressionModel): Seq[ParamSpec] = {
Seq("features" -> obj.featuresCol)
}
override def sparkOutputs(obj: XGBoostRegressionModel): Seq[SimpleParamSpec] = {
Seq("prediction" -> obj.predictionCol,
"leaf_prediction" -> obj.leafPredictionCol,
"contrib_prediction" -> obj.contribPredictionCol)
}
}
|
combust/mleap
|
mleap-xgboost-spark/src/main/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostRegressionModelOp.scala
|
Scala
|
apache-2.0
| 3,733
|
package fpoo
object Chapter01 {
def second[A](list: List[A]): A = list(1)
def third[A](list: List[A]): A = list(2)
def third2[A](list: List[A]): A = list.tail.tail.head
def addSquares[T](list: List[T])(implicit n: Numeric[T]): T = list.map( x => n.times(x, x) ).sum
def bizarreFactorial(n: Int): Int = n match {
case x if x < 0 => throw new IllegalArgumentException("Factorial only works for positive integers")
case 0 => 1
case _ => 1 to n product
}
implicit class Ops[A](val seq: Seq[A]) extends AnyVal {
def interleave(that: Seq[A]): Seq[A] = {
(seq, that).zipped flatMap { Seq(_, _) }
}
def prefixOf(that: Seq[A]): Boolean = {
(that take seq.length) == seq
}
def tails1: Seq[Seq[A]] = seq.tails.toSeq
def tails2: Seq[Seq[A]] = {
def tailn(s: Seq[A]): Seq[Seq[A]] = s match {
case Seq() => Seq(s)
case _ => s +: tailn(s.tail)
}
tailn(seq)
}
def tails3: Seq[Seq[A]] = {
val seqs = Seq.fill(seq.length + 1)(seq)
val nToDrop = 0 to seq.length
(seqs, nToDrop).zipped map (_ drop _)
}
}
}
|
bradleyscollins/fpoo-scala
|
src/main/scala/fpoo/Chapter01.scala
|
Scala
|
gpl-2.0
| 1,118
|
/*
Copyright 2015 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization
import java.io.{InputStream, OutputStream}
import scala.util.{Failure, Success}
import scala.util.control.NonFatal
import JavaStreamEnrichments._
object StringOrderedSerialization {
final def binaryIntCompare(
leftSize: Int,
seekingLeft: InputStream,
rightSize: Int,
seekingRight: InputStream
): Int = {
/*
* This algorithm only works if count in {0, 1, 2, 3}. Since we only
* call it that way below it is safe.
*/
@inline
def compareBytes(count: Int): Int =
if ((count & 2) == 2) {
// there are 2 or 3 bytes to read
val cmp = Integer.compare(seekingLeft.readUnsignedShort, seekingRight.readUnsignedShort)
if (cmp != 0) cmp
else if (count == 3) Integer.compare(seekingLeft.readUnsignedByte, seekingRight.readUnsignedByte)
else 0
} else {
// there are 0 or 1 bytes to read
if (count == 0) 0
else Integer.compare(seekingLeft.readUnsignedByte, seekingRight.readUnsignedByte)
}
/**
* Now we start by comparing blocks of ints, then 0 - 3 bytes
*/
val toCheck = math.min(leftSize, rightSize)
val ints = toCheck / 4
var counter = ints
var ic = 0
while ((counter > 0) && (ic == 0)) {
// Unsigned compare of ints is cheaper than longs, because we can do it
// by upcasting to Long
ic = UnsignedComparisons.unsignedIntCompare(seekingLeft.readInt, seekingRight.readInt)
counter = counter - 1
}
if (ic != 0) ic
else {
val bc = compareBytes(toCheck - 4 * ints)
if (bc != 0) bc
else {
// the size is the fallback when the prefixes match:
Integer.compare(leftSize, rightSize)
}
}
}
}
class StringOrderedSerialization extends OrderedSerialization[String] {
import StringOrderedSerialization._
override def hash(s: String) = s.hashCode
override def compare(a: String, b: String) = a.compareTo(b)
override def read(in: InputStream) = try {
val byteString = new Array[Byte](in.readPosVarInt)
in.readFully(byteString)
Success(new String(byteString, "UTF-8"))
} catch { case NonFatal(e) => Failure(e) }
override def write(b: OutputStream, s: String) = try {
val bytes = s.getBytes("UTF-8")
b.writePosVarInt(bytes.length)
b.writeBytes(bytes)
Serialization.successUnit
} catch { case NonFatal(e) => Failure(e) }
override def compareBinary(lhs: InputStream, rhs: InputStream) = try {
val leftSize = lhs.readPosVarInt
val rightSize = rhs.readPosVarInt
val seekingLeft = PositionInputStream(lhs)
val seekingRight = PositionInputStream(rhs)
val leftStart = seekingLeft.position
val rightStart = seekingRight.position
val res =
OrderedSerialization.resultFrom(binaryIntCompare(leftSize, seekingLeft, rightSize, seekingRight))
seekingLeft.seekToPosition(leftStart + leftSize)
seekingRight.seekToPosition(rightStart + rightSize)
res
} catch {
case NonFatal(e) => OrderedSerialization.CompareFailure(e)
}
/**
* generally there is no way to see how big a utf-8 string is without serializing. We could scan looking for
* all ascii characters, but it's hard to see if we'd get the balance right.
*/
override def staticSize = None
override def dynamicSize(s: String) = None
}
|
twitter/scalding
|
scalding-serialization/src/main/scala/com/twitter/scalding/serialization/StringOrderedSerialization.scala
|
Scala
|
apache-2.0
| 3,920
|
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.util
import io.gatling.BaseSpec
class CircularIteratorSpec extends BaseSpec {
"CircularIterator" should "work fine with non empty Iterable with threadsafe on" in {
val rr = CircularIterator(IndexedSeq(1, 2, 3), threadSafe = true)
rr.next() shouldBe 1
rr.next() shouldBe 2
rr.next() shouldBe 3
rr.next() shouldBe 1
rr.next() shouldBe 2
rr.next() shouldBe 3
}
it should "work fine with non empty Iterable with threadsafe off" in {
val rr = CircularIterator(IndexedSeq(1, 2, 3), threadSafe = false)
rr.next() shouldBe 1
rr.next() shouldBe 2
rr.next() shouldBe 3
rr.next() shouldBe 1
rr.next() shouldBe 2
rr.next() shouldBe 3
}
it should "always return the same value when iterating a single value Iterable" in {
val rr = CircularIterator(IndexedSeq(1), threadSafe = false)
rr.next() shouldBe 1
rr.next() shouldBe 1
rr.next() shouldBe 1
rr.next() shouldBe 1
rr.next() shouldBe 1
rr.next() shouldBe 1
}
it should "throw NoSuchElementException when iterating on an empty Iterable" in {
val rr = CircularIterator(IndexedSeq.empty[Int], threadSafe = false)
a[NoSuchElementException] should be thrownBy rr.next()
}
}
|
gatling/gatling
|
gatling-commons/src/test/scala/io/gatling/commons/util/CircularIteratorSpec.scala
|
Scala
|
apache-2.0
| 1,875
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system.kafka_deprecated
import java.util.concurrent.CountDownLatch
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import kafka.api.TopicMetadata
import org.I0Itec.zkclient.ZkClient
import org.apache.samza.util.Clock
import org.apache.samza.util.TopicMetadataStore
import org.junit.Assert._
import org.junit.Before
import org.junit.Test
import kafka.common.ErrorMapping
import kafka.api.PartitionMetadata
import org.apache.kafka.common.protocol.Errors
class TestTopicMetadataCache {
class MockTime extends Clock {
var currentValue = 0
def currentTimeMillis: Long = currentValue
}
class MockTopicMetadataStore extends TopicMetadataStore {
var mockCache = Map(
"topic1" -> new TopicMetadata("topic1", List.empty, Errors.NONE),
"topic2" -> new TopicMetadata("topic2", List.empty, Errors.NONE))
var numberOfCalls: AtomicInteger = new AtomicInteger(0)
def getTopicInfo(topics: Set[String]) = {
var topicMetadata = Map[String, TopicMetadata]()
topics.foreach(topic => topicMetadata += topic -> mockCache(topic))
numberOfCalls.getAndIncrement
topicMetadata
}
def setErrorCode(topic: String, errorCode: Short) {
mockCache += topic -> new TopicMetadata(topic, List.empty, Errors.forCode(errorCode))
}
}
@Before def setup {
TopicMetadataCache.clear
}
@Test
def testBasicMetadataCacheFunctionality {
val mockStore = new MockTopicMetadataStore
val mockTime = new MockTime
// Retrieve a topic from the cache. Initially cache is empty and store is queried to get the data
mockStore.setErrorCode("topic1", 3)
var metadata = TopicMetadataCache.getTopicMetadata(Set("topic1"), "kafka", mockStore.getTopicInfo, 5, mockTime.currentTimeMillis)
assertEquals("topic1", metadata("topic1").topic)
assertEquals(3, metadata("topic1").error.code)
assertEquals(1, mockStore.numberOfCalls.get())
// Retrieve the same topic from the cache which has an error code. Ensure the store is called to refresh the cache
mockTime.currentValue = 5
mockStore.setErrorCode("topic1", 0)
metadata = TopicMetadataCache.getTopicMetadata(Set("topic1"), "kafka", mockStore.getTopicInfo, 5, mockTime.currentTimeMillis)
assertEquals("topic1", metadata("topic1").topic)
assertEquals(0, metadata("topic1").error.code)
assertEquals(2, mockStore.numberOfCalls.get())
// Retrieve the same topic from the cache with refresh rate greater than the last update. Ensure the store is not
// called
metadata = TopicMetadataCache.getTopicMetadata(Set("topic1"), "kafka", mockStore.getTopicInfo, 5, mockTime.currentTimeMillis)
assertEquals("topic1", metadata("topic1").topic)
assertEquals(0, metadata("topic1").error.code)
assertEquals(2, mockStore.numberOfCalls.get())
// Ensure that refresh happens when refresh rate is less than the last update. Ensure the store is called
mockTime.currentValue = 11
metadata = TopicMetadataCache.getTopicMetadata(Set("topic1"), "kafka", mockStore.getTopicInfo, 5, mockTime.currentTimeMillis)
assertEquals("topic1", metadata("topic1").topic)
assertEquals(0, metadata("topic1").error.code)
assertEquals(3, mockStore.numberOfCalls.get())
}
@Test
def testMultiThreadedInteractionForTopicMetadataCache {
val mockStore = new MockTopicMetadataStore
val mockTime = new MockTime
val waitForThreadStart = new CountDownLatch(3)
val numAssertionSuccess = new AtomicBoolean(true)
// Add topic to the cache from multiple threads and ensure the store is called only once
val threads = new Array[Thread](3)
mockTime.currentValue = 17
for (i <- 0 until 3) {
threads(i) = new Thread(new Runnable {
def run {
waitForThreadStart.countDown()
waitForThreadStart.await()
val metadata = TopicMetadataCache.getTopicMetadata(Set("topic1"), "kafka", mockStore.getTopicInfo, 5, mockTime.currentTimeMillis)
numAssertionSuccess.compareAndSet(true, metadata("topic1").topic.equals("topic1"))
numAssertionSuccess.compareAndSet(true, metadata("topic1").error.code == 0)
}
})
threads(i).start()
}
for (i <- 0 until 3) {
threads(i).join
}
assertTrue(numAssertionSuccess.get())
assertEquals(1, mockStore.numberOfCalls.get())
}
@Test
def testBadErrorCodes {
val partitionMetadataBad = new PartitionMetadata(0, None, Seq(), error = Errors.LEADER_NOT_AVAILABLE)
val partitionMetadataGood = new PartitionMetadata(0, None, Seq(), error = Errors.NONE)
assertTrue(TopicMetadataCache.hasBadErrorCode(new TopicMetadata("test", List.empty, Errors.REQUEST_TIMED_OUT)))
assertTrue(TopicMetadataCache.hasBadErrorCode(new TopicMetadata("test", List(partitionMetadataBad), Errors.NONE)))
assertFalse(TopicMetadataCache.hasBadErrorCode(new TopicMetadata("test", List.empty, Errors.NONE)))
assertFalse(TopicMetadataCache.hasBadErrorCode(new TopicMetadata("test", List(partitionMetadataGood), Errors.NONE)))
}
}
|
bharathkk/samza
|
samza-kafka/src/test/scala/org/apache/samza/system/kafka_deprecated/TestTopicMetadataCache.scala
|
Scala
|
apache-2.0
| 5,882
|
package com.forged
import akka.actor.{ActorSystem, Props}
import akka.pattern.ask
import akka.event.Logging
import akka.io.IO
import akka.util.Timeout
import com.forged.server.ServerActor
import spray.can.Http
import scala.concurrent.duration._
/**
* Created by visitor15 on 11/28/15.
*/
object Boot extends App {
implicit val system = ActorSystem("site")
val log = Logging(system, getClass)
implicit val timeout = Timeout(5.seconds)
val service = system.actorOf(Props(new ServerActor()), "root-actor")
// start a new HTTP server on port 8080 with our service actor as the handler
IO(Http) ? Http.Bind(service, interface = "192.168.1.2", port = 8080)
}
|
Visitor15/webPresence-scala
|
src/main/scala/com/forged/Boot.scala
|
Scala
|
mit
| 671
|
package controllers.c5
import play.api.mvc._
/**
* Created by trydofor on 7/10/15.
* @see https://playframework.com/documentation/2.4.x/ScalaJsonInception
*/
class S5ScalaJsonInception extends Controller {
val a0 = Action {
Ok(
"""
5.5.JSON Macro Inception
all codes work well
just ctrl-c and ctrl-v
""")
}
object boring {
import play.api.libs.json._
import play.api.libs.functional.syntax._
case class Person(name: String, age: Int, lovesChocolate: Boolean)
implicit val personReads = (
(__ \\ 'name).read[String] and
(__ \\ 'age).read[Int] and
(__ \\ 'lovesChocolate).read[Boolean]
)(Person)
}
object injectionRw{
import play.api.libs.json._
case class Person(name: String, age: Int)
object Person{
implicit val personWrites = Json.writes[Person]
implicit val personReads = Json.reads[Person]
}
val json = Json.parse("""{"name":"trydofor",age:36}""")
val person = Person("trydofor",36)
val v2 = Json.fromJson[Person](json).get
val v3 = Json.toJson(person)
assert(person == v2)
assert(json == v3)
}
object injectionFormat{
import play.api.libs.json._
case class Person(name: String, age: Int)
object Person{
implicit val personFmt = Json.format[Person]
}
val json = Json.parse("""{"name":"trydofor",age:36}""")
val person = Person("trydofor",36)
val v2 = Json.fromJson[Person](json).get
val v3 = Json.toJson(person)
assert(person == v2)
assert(json == v3)
}
}
|
moilioncircle/playframework-2.4.x-scala
|
app/controllers/c5/S5ScalaJsonInception.scala
|
Scala
|
apache-2.0
| 1,579
|
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.path
import org.scalatest.DoNotDiscover
import org.scalatest.freespec
import org.scalatest.funspec
class ExampleBaseClass
@DoNotDiscover
protected class ExampleFreeSpecLike extends ExampleBaseClass with freespec.PathAnyFreeSpecLike {
//SCALATESTJS,NATIVE-ONLY override def newInstance: freespec.PathAnyFreeSpecLike = new ExampleFreeSpecLike
}
@DoNotDiscover
protected class ExampleFunSpecLike extends ExampleBaseClass with funspec.PathAnyFunSpecLike {
//SCALATESTJS,NATIVE-ONLY override def newInstance: funspec.PathAnyFunSpecLike = new ExampleFunSpecLike
}
|
scalatest/scalatest
|
jvm/common-test/src/main/scala/org/scalatest/path/ExampleLikeSpecs.scala
|
Scala
|
apache-2.0
| 1,188
|
package com.seremis.geninfusion.util
import java.awt.image.BufferedImage
import javax.imageio.ImageIO
import com.seremis.geninfusion.api.model.ITexturedRect
import net.minecraft.util.ResourceLocation
import scala.collection.mutable.HashMap
object TextureHelper {
val imageCache: HashMap[ResourceLocation, BufferedImage] = HashMap()
/**
* Get BufferedImage from disk, location specified by ResourceLocation.
*
* @param location The location the image is stored at.
* @return The image found at the specified location.
*/
def getBufferedImage(location: ResourceLocation): BufferedImage = {
if(!imageCache.contains(location)) {
try {
imageCache += (location -> ImageIO.read(getClass.getResourceAsStream("/assets/" + location.getResourceDomain + "/" + location.getResourcePath)))
} catch {
case e: Exception => e.printStackTrace()
}
}
imageCache.get(location).get
}
/**
* Creates a new BufferedImage with the texture of every ITexturedRect in rects. The destination coordinates
* of the textured rects needs to be populated.
* @param rects The rectangles that need to be stitched to a BufferedImage.
* @param textureSize A tuple with the (width, height) of the returned BufferedImage.
* @return A new BufferedImage with the textures of every ITexturedRect in rects.
*/
def stitchTexturedRects(rects: Array[ITexturedRect], textureSize: (Int, Int)): BufferedImage = {
val result = new BufferedImage(textureSize._1, textureSize._2, BufferedImage.TYPE_INT_ARGB)
val graphics = result.getGraphics
for(rect <- rects) {
val srcX1 = rect.getSrcX
val srcY1 = rect.getSrcY
val srcX2 = rect.getSrcX + rect.getWidth
val srcY2 = rect.getSrcY + rect.getHeight
val destX1 = rect.getDestX
val destY1 = rect.getDestY
val destX2 = rect.getDestX + rect.getWidth
val destY2 = rect.getDestY + rect.getHeight
graphics.drawImage(rect.getTexture, destX1, destY1, destX2, destY2, srcX1, srcY1, srcX2, srcY2, null)
}
graphics.dispose()
result
}
/**
* Rounds i up to the nearest power of two.
* @param i A number.
* @return The nearest power of two that is bigger than i.
*/
def wrapPow2(i: Int): Int = {
var n = i
n -= 1
n |= n >> 1
n |= n >> 2
n |= n >> 4
n |= n >> 8
n |= n >> 16
n += 1
n
}
/**
* Populates the destination coordinates of the ITexturedRectangles in rects.
* @param rects An array of ITexturedRect. Their destination coordinates will be set.
* @param maxWidth The maximum width of the texture. Defaults to 64 px.
* @return The required texture size (rounded up to the nearest power of 2) to fit all rectangles.
*/
def populateTexturedRectsDestination(rects: Array[ITexturedRect], maxWidth: Int = 64): (Int, Int) = {
var x = 0
var y = 0
var maxY = 0
for(rect <- rects) {
if(x + rect.getWidth > maxWidth) {
x = 0
y = maxY
}
if(y + rect.getHeight > maxY) {
maxY = y + rect.getHeight
}
rect.setDestX(x)
rect.setDestY(y)
x = x + rect.getWidth
y = y + rect.getHeight
}
(maxWidth, wrapPow2(y))
}
}
|
Seremis/Genetic-Infusion
|
src/main/scala/com/seremis/geninfusion/util/TextureHelper.scala
|
Scala
|
gpl-3.0
| 3,583
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
import org.apache.kafka.common.config.ConfigException
import org.scalatest.TestFailedException
import org.scalatest.junit.JUnit3Suite
import kafka.consumer.SimpleConsumer
import kafka.message.Message
import kafka.server.{KafkaConfig, KafkaRequestHandler, KafkaServer}
import kafka.zk.ZooKeeperTestHarness
import org.apache.log4j.{Level, Logger}
import org.junit.Test
import kafka.utils._
import java.util
import kafka.admin.AdminUtils
import util.Properties
import kafka.api.FetchRequestBuilder
import org.junit.Assert.assertTrue
import org.junit.Assert.assertFalse
import org.junit.Assert.assertEquals
import kafka.common.{ErrorMapping, FailedToSendMessageException}
import kafka.serializer.StringEncoder
class ProducerTest extends JUnit3Suite with ZooKeeperTestHarness with Logging{
private val brokerId1 = 0
private val brokerId2 = 1
private val ports = TestUtils.choosePorts(2)
private val (port1, port2) = (ports(0), ports(1))
private var server1: KafkaServer = null
private var server2: KafkaServer = null
private var consumer1: SimpleConsumer = null
private var consumer2: SimpleConsumer = null
private val requestHandlerLogger = Logger.getLogger(classOf[KafkaRequestHandler])
private var servers = List.empty[KafkaServer]
private val props1 = TestUtils.createBrokerConfig(brokerId1, port1, false)
props1.put("num.partitions", "4")
private val config1 = KafkaConfig.fromProps(props1)
private val props2 = TestUtils.createBrokerConfig(brokerId2, port2, false)
props2.put("num.partitions", "4")
private val config2 = KafkaConfig.fromProps(props2)
override def setUp() {
super.setUp()
// set up 2 brokers with 4 partitions each
server1 = TestUtils.createServer(config1)
server2 = TestUtils.createServer(config2)
servers = List(server1,server2)
val props = new Properties()
props.put("host", "localhost")
props.put("port", port1.toString)
consumer1 = new SimpleConsumer("localhost", port1, 1000000, 64*1024, "")
consumer2 = new SimpleConsumer("localhost", port2, 100, 64*1024, "")
// temporarily set request handler logger to a higher level
requestHandlerLogger.setLevel(Level.FATAL)
}
override def tearDown() {
// restore set request handler logger to a higher level
requestHandlerLogger.setLevel(Level.ERROR)
if (consumer1 != null)
consumer1.close()
if (consumer2 != null)
consumer2.close()
server1.shutdown
server2.shutdown
Utils.rm(server1.config.logDirs)
Utils.rm(server2.config.logDirs)
super.tearDown()
}
@Test
def testUpdateBrokerPartitionInfo() {
val topic = "new-topic"
TestUtils.createTopic(zkClient, topic, numPartitions = 1, replicationFactor = 2, servers = servers, acl=null)
val props = new Properties()
// no need to retry since the send will always fail
props.put("message.send.max.retries", "0")
val producer1 = TestUtils.createProducer[String, String](
brokerList = "localhost:80,localhost:81",
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
producerProps = props)
try{
producer1.send(new KeyedMessage[String, String](topic, "test", "test1"))
fail("Test should fail because the broker list provided are not valid")
} catch {
case e: FailedToSendMessageException => // this is expected
case oe: Throwable => fail("fails with exception", oe)
} finally {
producer1.close()
}
val producer2 = TestUtils.createProducer[String, String](
brokerList = "localhost:80," + TestUtils.getBrokerListStrFromConfigs(Seq(config1)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName)
try{
producer2.send(new KeyedMessage[String, String](topic, "test", "test1"))
} catch {
case e: Throwable => fail("Should succeed sending the message", e)
} finally {
producer2.close()
}
val producer3 = TestUtils.createProducer[String, String](
brokerList = TestUtils.getBrokerListStrFromConfigs(Seq(config1, config2)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName)
try{
producer3.send(new KeyedMessage[String, String](topic, "test", "test1"))
} catch {
case e: Throwable => fail("Should succeed sending the message", e)
} finally {
producer3.close()
}
}
@Test
def testSendToNewTopic() {
val props1 = new util.Properties()
props1.put("request.required.acks", "-1")
val topic = "new-topic"
// create topic with 1 partition and await leadership
TestUtils.createTopic(zkClient, topic, numPartitions = 1, replicationFactor = 2, servers = servers, acl=null)
val producer1 = TestUtils.createProducer[String, String](
brokerList = TestUtils.getBrokerListStrFromConfigs(Seq(config1, config2)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
partitioner = classOf[StaticPartitioner].getName,
producerProps = props1)
// Available partition ids should be 0.
producer1.send(new KeyedMessage[String, String](topic, "test", "test1"))
producer1.send(new KeyedMessage[String, String](topic, "test", "test2"))
// get the leader
val leaderOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0)
assertTrue("Leader for topic new-topic partition 0 should exist", leaderOpt.isDefined)
val leader = leaderOpt.get
val messageSet = if(leader == server1.config.brokerId) {
val response1 = consumer1.fetch(new FetchRequestBuilder().addFetch(topic, 0, 0, 10000).build())
response1.messageSet("new-topic", 0).iterator.toBuffer
}else {
val response2 = consumer2.fetch(new FetchRequestBuilder().addFetch(topic, 0, 0, 10000).build())
response2.messageSet("new-topic", 0).iterator.toBuffer
}
assertEquals("Should have fetched 2 messages", 2, messageSet.size)
assertEquals(new Message(bytes = "test1".getBytes, key = "test".getBytes), messageSet(0).message)
assertEquals(new Message(bytes = "test2".getBytes, key = "test".getBytes), messageSet(1).message)
producer1.close()
val props2 = new util.Properties()
props2.put("request.required.acks", "3")
// no need to retry since the send will always fail
props2.put("message.send.max.retries", "0")
try {
val producer2 = TestUtils.createProducer[String, String](
brokerList = TestUtils.getBrokerListStrFromConfigs(Seq(config1, config2)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
partitioner = classOf[StaticPartitioner].getName,
producerProps = props2)
producer2.close
fail("we don't support request.required.acks greater than 1")
}
catch {
case iae: IllegalArgumentException => // this is expected
case e: Throwable => fail("Not expected", e)
}
}
@Test
def testSendWithDeadBroker() {
val props = new Properties()
props.put("request.required.acks", "1")
// No need to retry since the topic will be created beforehand and normal send will succeed on the first try.
// Reducing the retries will save the time on the subsequent failure test.
props.put("message.send.max.retries", "0")
val topic = "new-topic"
// create topic
TestUtils.createTopic(zkClient, topic, partitionReplicaAssignment = Map(0->Seq(0), 1->Seq(0), 2->Seq(0), 3->Seq(0)),
servers = servers, acl=null)
val producer = TestUtils.createProducer[String, String](
brokerList = TestUtils.getBrokerListStrFromConfigs(Seq(config1, config2)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
partitioner = classOf[StaticPartitioner].getName,
producerProps = props)
try {
// Available partition ids should be 0, 1, 2 and 3, all lead and hosted only
// on broker 0
producer.send(new KeyedMessage[String, String](topic, "test", "test1"))
} catch {
case e: Throwable => fail("Unexpected exception: " + e)
}
// kill the broker
server1.shutdown
server1.awaitShutdown()
try {
// These sends should fail since there are no available brokers
producer.send(new KeyedMessage[String, String](topic, "test", "test1"))
fail("Should fail since no leader exists for the partition.")
} catch {
case e : TestFailedException => throw e // catch and re-throw the failure message
case e2: Throwable => // otherwise success
}
// restart server 1
server1.startup()
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 0)
try {
// cross check if broker 1 got the messages
val response1 = consumer1.fetch(new FetchRequestBuilder().addFetch(topic, 0, 0, 10000).build())
val messageSet1 = response1.messageSet(topic, 0).iterator
assertTrue("Message set should have 1 message", messageSet1.hasNext)
assertEquals(new Message(bytes = "test1".getBytes, key = "test".getBytes), messageSet1.next.message)
assertFalse("Message set should have another message", messageSet1.hasNext)
} catch {
case e: Exception => fail("Not expected", e)
}
producer.close
}
@Test
def testAsyncSendCanCorrectlyFailWithTimeout() {
val timeoutMs = 500
val props = new Properties()
props.put("request.timeout.ms", String.valueOf(timeoutMs))
props.put("request.required.acks", "1")
props.put("message.send.max.retries", "0")
props.put("client.id","ProducerTest-testAsyncSendCanCorrectlyFailWithTimeout")
val producer = TestUtils.createProducer[String, String](
brokerList = TestUtils.getBrokerListStrFromConfigs(Seq(config1, config2)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
partitioner = classOf[StaticPartitioner].getName,
producerProps = props)
val topic = "new-topic"
// create topics in ZK
TestUtils.createTopic(zkClient, topic, partitionReplicaAssignment = Map(0->Seq(0,1)), servers = servers, acl=null)
// do a simple test to make sure plumbing is okay
try {
// this message should be assigned to partition 0 whose leader is on broker 0
producer.send(new KeyedMessage[String, String](topic, "test", "test"))
// cross check if brokers got the messages
val response1 = consumer1.fetch(new FetchRequestBuilder().addFetch(topic, 0, 0, 10000).build())
val messageSet1 = response1.messageSet("new-topic", 0).iterator
assertTrue("Message set should have 1 message", messageSet1.hasNext)
assertEquals(new Message("test".getBytes), messageSet1.next.message)
} catch {
case e: Throwable => case e: Exception => producer.close; fail("Not expected", e)
}
// stop IO threads and request handling, but leave networking operational
// any requests should be accepted and queue up, but not handled
server1.requestHandlerPool.shutdown()
val t1 = SystemTime.milliseconds
try {
// this message should be assigned to partition 0 whose leader is on broker 0, but
// broker 0 will not response within timeoutMs millis.
producer.send(new KeyedMessage[String, String](topic, "test", "test"))
} catch {
case e: FailedToSendMessageException => /* success */
case e: Exception => fail("Not expected", e)
} finally {
producer.close()
}
val t2 = SystemTime.milliseconds
// make sure we don't wait fewer than timeoutMs
assertTrue((t2-t1) >= timeoutMs)
}
@Test
def testSendNullMessage() {
val producer = TestUtils.createProducer[String, String](
brokerList = TestUtils.getBrokerListStrFromConfigs(Seq(config1, config2)),
encoder = classOf[StringEncoder].getName,
keyEncoder = classOf[StringEncoder].getName,
partitioner = classOf[StaticPartitioner].getName)
try {
// create topic
AdminUtils.createTopic(zkClient, "new-topic", 2, 1, acl=null)
TestUtils.waitUntilTrue(() =>
AdminUtils.fetchTopicMetadataFromZk("new-topic", zkClient).errorCode != ErrorMapping.UnknownTopicOrPartitionCode,
"Topic new-topic not created after timeout",
waitTime = zookeeper.tickTime)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "new-topic", 0)
producer.send(new KeyedMessage[String, String]("new-topic", "key", null))
} finally {
producer.close()
}
}
}
|
roadboy/KafkaACL
|
core/src/test/scala/unit/kafka/producer/ProducerTest.scala
|
Scala
|
apache-2.0
| 13,436
|
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.receptors.directives
import akka.http.scaladsl.model.HttpHeader
import akka.http.scaladsl.server._
import akka.http.scaladsl.server.directives.{BasicDirectives, RouteDirectives}
import com.flipkart.connekt.commons.cache.{DistributedCacheManager, DistributedCacheType}
import com.flipkart.connekt.commons.core.Wrappers.Try_#
import com.flipkart.connekt.commons.metrics.Instrumented
import com.flipkart.metrics.Timed
import scala.util.{Success, Try}
case class IdempotentRequestFailedRejection(requestId: String) extends Rejection
trait IdempotentDirectives extends HeaderDirectives with Instrumented {
val X_REQUEST_ID = "x-request-id"
def idempotentRequest(appName: String): Directive0 = {
BasicDirectives.extract[Seq[HttpHeader]](_.request.headers) flatMap { headers =>
getHeader(X_REQUEST_ID, headers) match {
case Some(reqId) if reqId.nonEmpty =>
get(appName, reqId) match {
case Success(isIdempotentReq) if !isIdempotentReq =>
add(appName, reqId)
BasicDirectives.pass
case _ =>
RouteDirectives.reject(IdempotentRequestFailedRejection(reqId))
}
case _ =>
BasicDirectives.pass
}
}
}
@Timed("add")
private def add(appName: String, requestId: String): Try[Boolean] = Try_#(message = "IdempotentDirectives.add Failed") {
DistributedCacheManager.getCache(DistributedCacheType.IdempotentCheck).put[Boolean](cacheKey(appName, requestId), true)
}
@Timed("get")
private def get(appName: String, requestId: String): Try[Boolean] = Try_#(message = "IdempotentDirectives.get Failed") {
DistributedCacheManager.getCache(DistributedCacheType.IdempotentCheck).get[Boolean](cacheKey(appName, requestId)).getOrElse(false)
}
private def cacheKey(appName: String, requestId: String): String = appName.toLowerCase + "_" + requestId
}
|
Flipkart/connekt
|
receptors/src/main/scala/com/flipkart/connekt/receptors/directives/IdempotentDirectives.scala
|
Scala
|
mit
| 2,524
|
package dotty.tools
package backend
package jvm
import scala.language.unsafeNulls
import scala.annotation.switch
import scala.collection.mutable.SortedMap
import scala.tools.asm
import scala.tools.asm.{Handle, Opcodes}
import BCodeHelpers.InvokeStyle
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.core.Constants._
import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _}
import dotty.tools.dotc.core.Types._
import dotty.tools.dotc.core.StdNames.{nme, str}
import dotty.tools.dotc.core.Symbols._
import dotty.tools.dotc.transform.Erasure
import dotty.tools.dotc.transform.SymUtils._
import dotty.tools.dotc.util.Spans._
import dotty.tools.dotc.core.Contexts._
import dotty.tools.dotc.core.Phases._
import dotty.tools.dotc.report
/*
*
* @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
* @version 1.0
*
*/
trait BCodeBodyBuilder extends BCodeSkelBuilder {
// import global._
// import definitions._
import tpd._
import int.{_, given}
import DottyBackendInterface.symExtensions
import bTypes._
import coreBTypes._
import BCodeBodyBuilder._
protected val primitives: DottyPrimitives
/*
* Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
*/
abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) {
import Primitives.TestOp
/* ---------------- helper utils for generating methods and code ---------------- */
def emit(opc: Int): Unit = { mnode.visitInsn(opc) }
def emitZeroOf(tk: BType): Unit = {
tk match {
case BOOL => bc.boolconst(false)
case BYTE |
SHORT |
CHAR |
INT => bc.iconst(0)
case LONG => bc.lconst(0)
case FLOAT => bc.fconst(0)
case DOUBLE => bc.dconst(0)
case UNIT => ()
case _ => emit(asm.Opcodes.ACONST_NULL)
}
}
/*
* Emits code that adds nothing to the operand stack.
* Two main cases: `tree` is an assignment,
* otherwise an `adapt()` to UNIT is performed if needed.
*/
def genStat(tree: Tree): Unit = {
lineNumber(tree)
tree match {
case Assign(lhs @ DesugaredSelect(qual, _), rhs) =>
val isStatic = lhs.symbol.isStaticMember
if (!isStatic) { genLoadQualifier(lhs) }
genLoad(rhs, symInfoTK(lhs.symbol))
lineNumber(tree)
// receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError
val receiverClass = qual.tpe.typeSymbol
fieldStore(lhs.symbol, receiverClass)
case Assign(lhs, rhs) =>
val s = lhs.symbol
val Local(tk, _, idx, _) = locals.getOrMakeLocal(s)
rhs match {
case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil)
if larg.symbol == s && tk.isIntSizedType && x.isShortRange =>
lineNumber(tree)
bc.iinc(idx, x.intValue)
case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil)
if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange =>
lineNumber(tree)
bc.iinc(idx, -x.intValue)
case _ =>
genLoad(rhs, tk)
lineNumber(tree)
bc.store(idx, tk)
}
case _ =>
genLoad(tree, UNIT)
}
}
def genThrow(expr: Tree): BType = {
val thrownKind = tpeTK(expr)
// `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable.
// Similarly for scala.Nothing (again, as defined in src/libray-aux).
assert(thrownKind.isNullType || thrownKind.isNothingType || thrownKind.asClassBType.isSubtypeOf(ThrowableReference))
genLoad(expr, thrownKind)
lineNumber(expr)
emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
RT_NOTHING // always returns the same, the invoker should know :)
}
/* Generate code for primitive arithmetic operations. */
def genArithmeticOp(tree: Tree, code: Int): BType = tree match{
case Apply(fun @ DesugaredSelect(larg, _), args) =>
var resKind = tpeTK(larg)
assert(resKind.isNumericType || (resKind == BOOL),
s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]")
import ScalaPrimitivesOps._
args match {
// unary operation
case Nil =>
genLoad(larg, resKind)
code match {
case POS => () // nothing
case NEG => bc.neg(resKind)
case NOT => bc.genPrimitiveArithmetic(Primitives.NOT, resKind)
case _ => abort(s"Unknown unary operation: ${fun.symbol.showFullName} code: $code")
}
// binary operation
case rarg :: Nil =>
val isShift = isShiftOp(code)
resKind = tpeTK(larg).maxType(if (isShift) INT else tpeTK(rarg))
if (isShift || isBitwiseOp(code)) {
assert(resKind.isIntegralType || (resKind == BOOL),
s"$resKind incompatible with arithmetic modulo operation.")
}
genLoad(larg, resKind)
genLoad(rarg, if (isShift) INT else resKind)
(code: @switch) match {
case ADD => bc add resKind
case SUB => bc sub resKind
case MUL => bc mul resKind
case DIV => bc div resKind
case MOD => bc rem resKind
case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind)
case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind)
case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]")
}
case _ =>
abort(s"Too many arguments for primitive function: $tree")
}
lineNumber(tree)
resKind
}
/* Generate primitive array operations. */
def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{
case Apply(DesugaredSelect(arrayObj, _), args) =>
import ScalaPrimitivesOps._
val k = tpeTK(arrayObj)
genLoad(arrayObj, k)
val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code"))
var generatedType = expectedType
if (isArrayGet(code)) {
// load argument on stack
assert(args.length == 1, s"Too many arguments for array get operation: $tree");
genLoad(args.head, INT)
generatedType = k.asArrayBType.componentType
bc.aload(elementType)
}
else if (isArraySet(code)) {
val List(a1, a2) = args
genLoad(a1, INT)
genLoad(a2)
generatedType = UNIT
bc.astore(elementType)
} else {
generatedType = INT
emit(asm.Opcodes.ARRAYLENGTH)
}
lineNumber(tree)
generatedType
}
def genLoadIf(tree: If, expectedType: BType): BType = tree match{
case If(condp, thenp, elsep) =>
val success = new asm.Label
val failure = new asm.Label
val hasElse = !elsep.isEmpty && (elsep match {
case Literal(value) if value.tag == UnitTag => false
case _ => true
})
val postIf = if (hasElse) new asm.Label else failure
genCond(condp, success, failure, targetIfNoJump = success)
markProgramPoint(success)
val thenKind = tpeTK(thenp)
val elseKind = if (!hasElse) UNIT else tpeTK(elsep)
def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT) && expectedType == UNIT
val resKind = if (hasUnitBranch) UNIT else tpeTK(tree)
genLoad(thenp, resKind)
if (hasElse) { bc goTo postIf }
markProgramPoint(failure)
if (hasElse) {
genLoad(elsep, resKind)
markProgramPoint(postIf)
}
resKind
}
def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match {
case Apply(fun @ DesugaredSelect(receiver, _), _) =>
val sym = tree.symbol
val code = primitives.getPrimitive(tree, receiver.tpe)
import ScalaPrimitivesOps._
if (isArithmeticOp(code)) genArithmeticOp(tree, code)
else if (code == CONCAT) genStringConcat(tree)
else if (code == HASH) genScalaHash(receiver)
else if (isArrayOp(code)) genArrayOp(tree, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
val success, failure, after = new asm.Label
genCond(tree, success, failure, targetIfNoJump = success)
// success block
markProgramPoint(success)
bc boolconst true
bc goTo after
// failure block
markProgramPoint(failure)
bc boolconst false
// after
markProgramPoint(after)
BOOL
}
else if (isCoercion(code)) {
genLoad(receiver)
lineNumber(tree)
genCoercion(code)
coercionTo(code)
}
else abort(
s"Primitive operation not handled yet: ${sym.showFullName}(${fun.symbol.name}) at: ${tree.span}"
)
}
def genLoad(tree: Tree): Unit = {
genLoad(tree, tpeTK(tree))
}
/* Generate code for trees that produce values on the stack */
def genLoad(tree: Tree, expectedType: BType): Unit = {
var generatedType = expectedType
lineNumber(tree)
tree match {
case ValDef(nme.THIS, _, _) =>
report.debuglog("skipping trivial assign to _$this: " + tree)
case tree@ValDef(_, _, _) =>
val sym = tree.symbol
/* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called
while duplicating a finalizer that contains this ValDef. */
val loc = locals.getOrMakeLocal(sym)
val Local(tk, _, idx, isSynth) = loc
if (tree.rhs == tpd.EmptyTree) { emitZeroOf(tk) }
else { genLoad(tree.rhs, tk) }
bc.store(idx, tk)
val localVarStart = currProgramPoint()
if (!isSynth) { // there are case <synthetic> ValDef's emitted by patmat
varsInScope ::= (sym -> localVarStart)
}
generatedType = UNIT
case t @ If(_, _, _) =>
generatedType = genLoadIf(t, expectedType)
case t @ Labeled(_, _) =>
generatedType = genLabeled(t)
case r: Return =>
genReturn(r)
generatedType = expectedType
case t @ WhileDo(_, _) =>
generatedType = genWhileDo(t)
case t @ Try(_, _, _) =>
generatedType = genLoadTry(t)
case t: Apply if t.fun.symbol eq defn.throwMethod =>
generatedType = genThrow(t.args.head)
case New(tpt) =>
abort(s"Unexpected New(${tpt.tpe.showSummary()}/$tpt) reached GenBCode.\\n" +
" Call was genLoad" + ((tree, expectedType)))
case t @ Closure(env, call, tpt) =>
val functionalInterface: Symbol =
if !tpt.isEmpty then tpt.tpe.classSymbol
else t.tpe.classSymbol
val (fun, args) = call match {
case Apply(fun, args) => (fun, args)
case t @ DesugaredSelect(_, _) => (t, Nil) // TODO: use Select
case t @ Ident(_) => (t, Nil)
}
if (!fun.symbol.isStaticMember) {
// load receiver of non-static implementation of lambda
// darkdimius: I haven't found in spec `this` reference should go
// but I was able to derrive it by reading
// AbstractValidatingLambdaMetafactory.validateMetafactoryArgs
val DesugaredSelect(prefix, _) = fun
genLoad(prefix)
}
genLoadArguments(env, fun.symbol.info.firstParamTypes map toTypeKind)
generatedType = genInvokeDynamicLambda(NoSymbol, fun.symbol, env.size, functionalInterface)
case app @ Apply(_, _) =>
generatedType = genApply(app, expectedType)
case This(qual) =>
val symIsModuleClass = tree.symbol.is(ModuleClass)
assert(tree.symbol == claszSymbol || symIsModuleClass,
s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit")
if (symIsModuleClass && tree.symbol != claszSymbol) {
generatedType = genLoadModule(tree)
}
else {
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
// When compiling Array.scala, the constructor invokes `Array.this.super.<init>`. The expectedType
// is `[Object` (computed by typeToBType, the type of This(Array) is `Array[T]`). If we would set
// the generatedType to `Array` below, the call to adapt at the end would fail. The situation is
// similar for primitives (`I` vs `Int`).
if (tree.symbol != defn.ArrayClass && !tree.symbol.isPrimitiveValueClass) {
generatedType = classBTypeFromSymbol(claszSymbol)
}
}
case DesugaredSelect(Ident(nme.EMPTY_PACKAGE), module) =>
assert(tree.symbol.is(Module), s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.span}")
genLoadModule(tree)
case DesugaredSelect(qualifier, _) =>
val sym = tree.symbol
generatedType = symInfoTK(sym)
val qualSafeToElide = tpd.isIdempotentExpr(qualifier)
def genLoadQualUnlessElidable(): Unit = { if (!qualSafeToElide) { genLoadQualifier(tree) } }
// receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError
def receiverClass = qualifier.tpe.typeSymbol
if (sym.is(Module)) {
genLoadQualUnlessElidable()
genLoadModule(tree)
} else if (sym.isStaticMember) {
genLoadQualUnlessElidable()
fieldLoad(sym, receiverClass)
} else {
genLoadQualifier(tree)
fieldLoad(sym, receiverClass)
}
case t @ Ident(name) =>
val sym = tree.symbol
val tk = symInfoTK(sym)
generatedType = tk
val desugared = cachedDesugarIdent(t)
desugared match {
case None =>
if (!sym.is(Package)) {
if (sym.is(Module)) genLoadModule(sym)
else locals.load(sym)
}
case Some(t) =>
genLoad(t, generatedType)
}
case Literal(value) =>
if (value.tag != UnitTag) (value.tag, expectedType) match {
case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG
case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE
case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = RT_NULL
case _ => genConstant(value); generatedType = tpeTK(tree)
}
case blck @ Block(stats, expr) =>
if(stats.isEmpty)
genLoad(expr, expectedType)
else genBlock(blck, expectedType)
case Typed(Super(_, _), _) => genLoad(tpd.This(claszSymbol.asClass), expectedType)
case Typed(expr, _) => genLoad(expr, expectedType)
case Assign(_, _) =>
generatedType = UNIT
genStat(tree)
case av @ ArrayValue(_, _) =>
generatedType = genArrayValue(av)
case mtch @ Match(_, _) =>
generatedType = genMatch(mtch)
case tpd.EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) }
case t: TypeApply => // dotty specific
generatedType = genTypeApply(t)
case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.span}")
}
// emit conversion
if (generatedType != expectedType) {
adapt(generatedType, expectedType)
}
} // end of GenBCode.genLoad()
// ---------------- field load and store ----------------
/*
* must-single-thread
*/
def fieldLoad( field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = true, hostClass)
/*
* must-single-thread
*/
def fieldStore(field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = false, hostClass)
/*
* must-single-thread
*/
private def fieldOp(field: Symbol, isLoad: Boolean, specificReceiver: Symbol): Unit = {
val useSpecificReceiver = specificReceiver != null && !field.isScalaStatic
val owner = internalName(if (useSpecificReceiver) specificReceiver else field.owner)
val fieldJName = field.javaSimpleName
val fieldDescr = symInfoTK(field).descriptor
val isStatic = field.isStaticMember
val opc =
if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD }
else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD }
mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
}
// ---------------- emitting constant values ----------------
/*
* For ClazzTag:
* must-single-thread
* Otherwise it's safe to call from multiple threads.
*/
def genConstant(const: Constant): Unit = {
(const.tag/*: @switch*/) match {
case BooleanTag => bc.boolconst(const.booleanValue)
case ByteTag => bc.iconst(const.byteValue)
case ShortTag => bc.iconst(const.shortValue)
case CharTag => bc.iconst(const.charValue)
case IntTag => bc.iconst(const.intValue)
case LongTag => bc.lconst(const.longValue)
case FloatTag => bc.fconst(const.floatValue)
case DoubleTag => bc.dconst(const.doubleValue)
case UnitTag => ()
case StringTag =>
assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
case NullTag => emit(asm.Opcodes.ACONST_NULL)
case ClazzTag =>
val tp = toTypeKind(const.typeValue)
if tp.isPrimitive then
val boxedClass = boxedClassOfPrimitive(tp.asPrimitiveBType)
mnode.visitFieldInsn(
asm.Opcodes.GETSTATIC,
boxedClass.internalName,
"TYPE", // field name
jlClassRef.descriptor
)
else
mnode.visitLdcInsn(tp.toASMType)
case _ => abort(s"Unknown constant value: $const")
}
}
private def genLabeled(tree: Labeled): BType = tree match {
case Labeled(bind, expr) =>
val resKind = tpeTK(tree)
genLoad(expr, resKind)
markProgramPoint(programPoint(bind.symbol))
resKind
}
private def genReturn(r: Return): Unit = {
val expr: Tree = r.expr
val fromSym: Symbol = if (r.from.symbol.is(LabelFlag)) r.from.symbol else NoSymbol
if (NoSymbol == fromSym) {
// return from enclosing method
val returnedKind = tpeTK(expr)
genLoad(expr, returnedKind)
adapt(returnedKind, returnType)
val saveReturnValue = (returnType != UNIT)
lineNumber(r)
cleanups match {
case Nil =>
// not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`.
bc emitRETURN returnType
case nextCleanup :: rest =>
if (saveReturnValue) {
// regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
if (earlyReturnVar == null) {
earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar", expr.tpe, expr.span)
}
locals.store(earlyReturnVar)
}
bc goTo nextCleanup
shouldEmitCleanup = true
}
} else {
// return from labeled
assert(fromSym.is(LabelFlag), fromSym)
assert(!fromSym.is(Method), fromSym)
/* TODO At the moment, we disregard cleanups, because by construction we don't have return-from-labels
* that cross cleanup boundaries. However, in theory such crossings are valid, so we should take care
* of them.
*/
val resultKind = toTypeKind(fromSym.info)
genLoad(expr, resultKind)
lineNumber(r)
bc goTo programPoint(fromSym)
}
} // end of genReturn()
def genWhileDo(tree: WhileDo): BType = tree match{
case WhileDo(cond, body) =>
val isInfinite = cond == tpd.EmptyTree
val loop = new asm.Label
markProgramPoint(loop)
if (isInfinite) {
genLoad(body, UNIT)
bc goTo loop
RT_NOTHING
} else {
val hasBody = cond match {
case Literal(value) if value.tag == UnitTag => false
case _ => true
}
if (hasBody) {
val success = new asm.Label
val failure = new asm.Label
genCond(cond, success, failure, targetIfNoJump = success)
markProgramPoint(success)
genLoad(body, UNIT)
bc goTo loop
markProgramPoint(failure)
} else {
// this is the shape of do..while loops, so do something smart about them
val failure = new asm.Label
genCond(cond, loop, failure, targetIfNoJump = failure)
markProgramPoint(failure)
}
UNIT
}
}
def genTypeApply(t: TypeApply): BType = (t: @unchecked) match {
case TypeApply(fun@DesugaredSelect(obj, _), targs) =>
val sym = fun.symbol
val cast =
if (sym == defn.Any_isInstanceOf) false
else if (sym == defn.Any_asInstanceOf) true
else abort(s"Unexpected type application $fun[sym: ${sym.showFullName}] in: $t")
val l = tpeTK(obj)
val r = tpeTK(targs.head)
genLoadQualifier(fun)
// TODO @lry make pattern match
if (l.isPrimitive && r.isPrimitive)
genConversion(l, r, cast)
else if (l.isPrimitive) {
bc drop l
if (cast) {
mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.internalName)
bc dup ObjectReference
emit(asm.Opcodes.ATHROW)
} else {
bc boolconst false
}
}
else if (r.isPrimitive && cast) {
abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $t")
}
else if (r.isPrimitive) {
bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType)
}
else {
assert(r.isRef, r) // ensure that it's not a method
genCast(r.asRefBType, cast)
}
if (cast) r else BOOL
} // end of genTypeApply()
private def mkArrayConstructorCall(arr: ArrayBType, app: Apply, args: List[Tree]) = {
val dims = arr.dimension
var elemKind = arr.elementType
val argsSize = args.length
if (argsSize > dims) {
report.error(s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span))
}
if (argsSize < dims) {
/* In one step:
* elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize)
* however the above does not enter a TypeName for each nested arrays in chrs.
*/
for (i <- args.length until dims) elemKind = ArrayBType(elemKind)
}
genLoadArguments(args, List.fill(args.size)(INT))
(argsSize /*: @switch*/) match {
case 1 => bc newarray elemKind
case _ =>
val descr = ("[" * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor
mnode.visitMultiANewArrayInsn(descr, argsSize)
}
}
private def genApply(app: Apply, expectedType: BType): BType = {
var generatedType = expectedType
lineNumber(app)
app match {
case Apply(_, args) if app.symbol eq defn.newArrayMethod =>
val List(elemClaz, Literal(c: Constant), ArrayValue(_, dims)) = args
generatedType = toTypeKind(c.typeValue)
mkArrayConstructorCall(generatedType.asArrayBType, app, dims)
case Apply(t :TypeApply, _) =>
generatedType =
if (t.symbol ne defn.Object_synchronized) genTypeApply(t)
else genSynchronized(app, expectedType)
case Apply(fun @ DesugaredSelect(Super(superQual, _), _), args) =>
// 'super' call: Note: since constructors are supposed to
// return an instance of what they construct, we have to take
// special care. On JVM they are 'void', and Scala forbids (syntactically)
// to call super constructors explicitly and/or use their 'returned' value.
// therefore, we can ignore this fact, and generate code that leaves nothing
// on the stack (contrary to what the type in the AST says).
// scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0)
genLoad(superQual)
genLoadArguments(args, paramTKs(app))
generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span)
// 'new' constructor call: Note: since constructors are
// thought to return an instance of what they construct,
// we have to 'simulate' it by DUPlicating the freshly created
// instance (on JVM, <init> methods return VOID).
case Apply(fun @ DesugaredSelect(New(tpt), nme.CONSTRUCTOR), args) =>
val ctor = fun.symbol
assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}")
generatedType = toTypeKind(tpt.tpe)
assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType")
generatedType match {
case arr: ArrayBType =>
mkArrayConstructorCall(arr, app, args)
case rt: ClassBType =>
assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt")
mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName)
bc dup generatedType
genLoadArguments(args, paramTKs(app))
genCallMethod(ctor, InvokeStyle.Special, app.span)
case _ =>
abort(s"Cannot instantiate $tpt of kind: $generatedType")
}
case Apply(fun, List(expr)) if Erasure.Boxing.isBox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass =>
val nativeKind = tpeTK(expr)
genLoad(expr, nativeKind)
val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind)
bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor, itf = false)
generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType)
case Apply(fun, List(expr)) if Erasure.Boxing.isUnbox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass =>
genLoad(expr)
val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
generatedType = boxType
val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType)
bc.invokestatic(BoxesRunTime.internalName, mname, methodType.descriptor, itf = false)
case app @ Apply(fun, args) =>
val sym = fun.symbol
if (isPrimitive(fun)) { // primitive method call
generatedType = genPrimitiveOp(app, expectedType)
} else { // normal method call
val invokeStyle =
if (sym.isStaticMember) InvokeStyle.Static
else if (sym.is(Private) || sym.isClassConstructor) InvokeStyle.Special
else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special
else InvokeStyle.Virtual
if (invokeStyle.hasInstance) genLoadQualifier(fun)
genLoadArguments(args, paramTKs(app))
val DesugaredSelect(qual, name) = fun // fun is a Select, also checked in genLoadQualifier
val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType]
if (isArrayClone) {
// Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call
// as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac.
// Arrays have a public method `clone` (jls 10.7).
//
// The JVMS is not explicit about this, but that receiver type can be an array type
// descriptor (instead of a class internal name):
// invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object
//
// Note that using `Object.clone()` would work as well, but only because the JVM
// relaxes protected access specifically if the receiver is an array:
// http://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439
// Example: `class C { override def clone(): Object = "hi" }`
// Emitting `def f(c: C) = c.clone()` as `Object.clone()` gives a VerifyError.
val target: String = tpeTK(qual).asRefBType.classOrArrayType
val methodBType = asmMethodType(sym)
bc.invokevirtual(target, sym.javaSimpleName, methodBType.descriptor)
generatedType = methodBType.returnType
} else {
val receiverClass = if (!invokeStyle.isVirtual) null else {
// receiverClass is used in the bytecode to as the method receiver. using sym.owner
// may lead to IllegalAccessErrors, see 9954eaf / aladdin bug 455.
val qualSym = qual.tpe.typeSymbol
if (qualSym == defn.ArrayClass) {
// For invocations like `Array(1).hashCode` or `.wait()`, use Object as receiver
// in the bytecode. Using the array descriptor (like we do for clone above) seems
// to work as well, but it seems safer not to change this. Javac also uses Object.
// Note that array apply/update/length are handled by isPrimitive (above).
assert(sym.owner == defn.ObjectClass, s"unexpected array call: $app")
defn.ObjectClass
} else qualSym
}
generatedType = genCallMethod(sym, invokeStyle, app.span, receiverClass)
}
}
}
generatedType
} // end of genApply()
private def genArrayValue(av: tpd.JavaSeqLiteral): BType = {
val ArrayValue(tpt, elems) = av
lineNumber(av)
genArray(elems, tpt)
}
private def genArray(elems: List[Tree], elemType: Type): BType = {
val elmKind = toTypeKind(elemType)
val generatedType = ArrayBType(elmKind)
bc iconst elems.length
bc newarray elmKind
var i = 0
var rest = elems
while (!rest.isEmpty) {
bc dup generatedType
bc iconst i
genLoad(rest.head, elmKind)
bc astore elmKind
rest = rest.tail
i = i + 1
}
generatedType
}
/* A Match node contains one or more case clauses, each case clause lists one or more
* Int/String values to use as keys, and a code block. The exception is the "default" case
* clause which doesn't list any key (there is exactly one of these per match).
*/
private def genMatch(tree: Match): BType = tree match {
case Match(selector, cases) =>
lineNumber(tree)
val generatedType = tpeTK(tree)
val postMatch = new asm.Label
// Only two possible selector types exist in `Match` trees at this point: Int and String
if (tpeTK(selector) == INT) {
/* On a first pass over the case clauses, we flatten the keys and their
* targets (the latter represented with asm.Labels). That representation
* allows JCodeMethodV to emit a lookupswitch or a tableswitch.
*
* On a second pass, we emit the switch blocks, one for each different target.
*/
var flatKeys: List[Int] = Nil
var targets: List[asm.Label] = Nil
var default: asm.Label = null
var switchBlocks: List[(asm.Label, Tree)] = Nil
genLoad(selector, INT)
// collect switch blocks and their keys, but don't emit yet any switch-block.
for (caze @ CaseDef(pat, guard, body) <- cases) {
assert(guard == tpd.EmptyTree, guard)
val switchBlockPoint = new asm.Label
switchBlocks ::= (switchBlockPoint, body)
pat match {
case Literal(value) =>
flatKeys ::= value.intValue
targets ::= switchBlockPoint
case Ident(nme.WILDCARD) =>
assert(default == null, s"multiple default targets in a Match node, at ${tree.span}")
default = switchBlockPoint
case Alternative(alts) =>
alts foreach {
case Literal(value) =>
flatKeys ::= value.intValue
targets ::= switchBlockPoint
case _ =>
abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}")
}
case _ =>
abort(s"Invalid pattern in Match node: $tree at: ${tree.span}")
}
}
bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY)
// emit switch-blocks.
for (sb <- switchBlocks.reverse) {
val (caseLabel, caseBody) = sb
markProgramPoint(caseLabel)
genLoad(caseBody, generatedType)
bc goTo postMatch
}
} else {
/* Since the JVM doesn't have a way to switch on a string, we switch
* on the `hashCode` of the string then do an `equals` check (with a
* possible second set of jumps if blocks can be reach from multiple
* string alternatives).
*
* This mirrors the way that Java compiles `switch` on Strings.
*/
var default: asm.Label = null
var indirectBlocks: List[(asm.Label, Tree)] = Nil
// Cases grouped by their hashCode
val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]]
var caseFallback: Tree = null
for (caze @ CaseDef(pat, guard, body) <- cases) {
assert(guard == tpd.EmptyTree, guard)
pat match {
case Literal(value) =>
val strValue = value.stringValue
casesByHash.updateWith(strValue.##) { existingCasesOpt =>
val newCase = (strValue, Right(body))
Some(newCase :: existingCasesOpt.getOrElse(Nil))
}
case Ident(nme.WILDCARD) =>
assert(default == null, s"multiple default targets in a Match node, at ${tree.span}")
default = new asm.Label
indirectBlocks ::= (default, body)
case Alternative(alts) =>
// We need an extra basic block since multiple strings can lead to this code
val indirectCaseGroupLabel = new asm.Label
indirectBlocks ::= (indirectCaseGroupLabel, body)
alts foreach {
case Literal(value) =>
val strValue = value.stringValue
casesByHash.updateWith(strValue.##) { existingCasesOpt =>
val newCase = (strValue, Left(indirectCaseGroupLabel))
Some(newCase :: existingCasesOpt.getOrElse(Nil))
}
case _ =>
abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}")
}
case _ =>
abort(s"Invalid pattern in Match node: $tree at: ${tree.span}")
}
}
// Organize the hashCode options into switch cases
var flatKeys: List[Int] = Nil
var targets: List[asm.Label] = Nil
var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil
for ((hashValue, hashCases) <- casesByHash) {
val switchBlockPoint = new asm.Label
hashBlocks ::= (switchBlockPoint, hashCases)
flatKeys ::= hashValue
targets ::= switchBlockPoint
}
// Push the hashCode of the string (or `0` it is `null`) onto the stack and switch on it
genLoadIf(
If(
tree.selector.select(defn.Any_==).appliedTo(nullLiteral),
Literal(Constant(0)),
tree.selector.select(defn.Any_hashCode).appliedToNone
),
INT
)
bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY)
// emit blocks for each hash case
for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) {
markProgramPoint(hashLabel)
for ((caseString, indirectLblOrBody) <- caseAlternatives) {
val comparison = if (caseString == null) defn.Any_== else defn.Any_equals
val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector)
val keepGoing = new asm.Label
indirectLblOrBody match {
case Left(jump) =>
genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing)
case Right(caseBody) =>
val thisCaseMatches = new asm.Label
genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches)
markProgramPoint(thisCaseMatches)
genLoad(caseBody, generatedType)
bc goTo postMatch
}
markProgramPoint(keepGoing)
}
bc goTo default
}
// emit blocks for common patterns
for ((caseLabel, caseBody) <- indirectBlocks.reverse) {
markProgramPoint(caseLabel)
genLoad(caseBody, generatedType)
bc goTo postMatch
}
}
markProgramPoint(postMatch)
generatedType
}
def genBlock(tree: Block, expectedType: BType) = tree match {
case Block(stats, expr) =>
val savedScope = varsInScope
varsInScope = Nil
stats foreach genStat
genLoad(expr, expectedType)
val end = currProgramPoint()
if (emitVars) {
// add entries to LocalVariableTable JVM attribute
for ((sym, start) <- varsInScope.reverse) {
emitLocalVarScope(sym, start, end)
}
}
varsInScope = savedScope
}
def adapt(from: BType, to: BType): Unit = {
if (!from.conformsTo(to)) {
to match {
case UNIT => bc drop from
case _ => bc.emitT2T(from, to)
}
} else if (from.isNothingType) {
/* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and
* loading a (phantom) value of type Nothing.
*
* The Nothing type in Scala's type system does not exist in the JVM. In bytecode, Nothing
* is mapped to scala.runtime.Nothing$. To the JVM, a call to Predef.??? looks like it would
* return an object of type Nothing$. We need to do something with that phantom object on
* the stack. "Phantom" because it never exists: such methods always throw, but the JVM does
* not know that.
*
* Note: The two verifiers (old: type inference, new: type checking) have different
* requirements. Very briefly:
*
* Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at
* each program point, no matter what branches were taken to get there
* - Stack is same size and has same typed values
* - Local and stack values need to have consistent types
* - In practice, the old verifier seems to ignore unreachable code and accept any
* instructions after an ATHROW. For example, there can be another ATHROW (without
* loading another throwable first).
*
* New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1)
* - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6
* or higher.
* - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting
* correct frames after an ATHROW is probably complex, so ASM uses the following strategy:
* - Every time when generating an ATHROW, a new basic block is started.
* - During classfile writing, such basic blocks are found to be dead: no branches go there
* - Eliminating dead code would probably require complex shifts in the output byte buffer
* - But there's an easy solution: replace all code in the dead block with with
* `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same
* - The corresponding stack frame can be easily generated: on entering a dead the block,
* the frame requires a single Throwable on the stack.
* - Since there are no branches to the dead block, the frame requirements are never violated.
*
* To summarize the above: it does matter what we emit after an ATHROW.
*
* NOW: if we end up here because we emitted a load of a (phantom) value of type Nothing$,
* there was no ATHROW emitted. So, we have to make the verifier happy and do something
* with that value. Since Nothing$ extends Throwable, the easiest is to just emit an ATHROW.
*
* If we ended up here because we generated a "throw e" expression, we know the last
* emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW,
* the verifiers will be happy.
*/
if (lastInsn.getOpcode != asm.Opcodes.ATHROW)
emit(asm.Opcodes.ATHROW)
} else if (from.isNullType) {
/* After loading an expression of type `scala.runtime.Null$`, introduce POP; ACONST_NULL.
* This is required to pass the verifier: in Scala's type system, Null conforms to any
* reference type. In bytecode, the type Null is represented by scala.runtime.Null$, which
* is not a subtype of all reference types. Example:
*
* def nl: Null = null // in bytecode, nl has return type scala.runtime.Null$
* val a: String = nl // OK for Scala but not for the JVM, scala.runtime.Null$ does not conform to String
*
* In order to fix the above problem, the value returned by nl is dropped and ACONST_NULL is
* inserted instead - after all, an expression of type scala.runtime.Null$ can only be null.
*/
if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) {
bc drop from
emit(asm.Opcodes.ACONST_NULL)
}
}
else (from, to) match {
case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG)
case _ => ()
}
}
/* Emit code to Load the qualifier of `tree` on top of the stack. */
def genLoadQualifier(tree: Tree): Unit = {
lineNumber(tree)
tree match {
case DesugaredSelect(qualifier, _) => genLoad(qualifier)
case t: Ident => // dotty specific
cachedDesugarIdent(t) match {
case Some(sel) => genLoadQualifier(sel)
case None =>
assert(t.symbol.owner == this.claszSymbol)
}
case _ => abort(s"Unknown qualifier $tree")
}
}
def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit =
args match
case arg :: args1 =>
btpes match
case btpe :: btpes1 =>
genLoad(arg, btpe)
genLoadArguments(args1, btpes1)
case _ =>
case _ =>
def genLoadModule(tree: Tree): BType = {
val module = (
if (!tree.symbol.is(PackageClass)) tree.symbol
else tree.symbol.info.member(nme.PACKAGE).symbol match {
case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree")
case s => abort(s"SI-5604: found package class where package object expected: $tree")
}
)
lineNumber(tree)
genLoadModule(module)
symInfoTK(module)
}
def genLoadModule(module: Symbol): Unit = {
def inStaticMethod = methSymbol != null && methSymbol.isStaticMember
if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) {
mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
} else {
val mbt = symInfoTK(module).asClassBType
mnode.visitFieldInsn(
asm.Opcodes.GETSTATIC,
mbt.internalName /* + "$" */ ,
str.MODULE_INSTANCE_FIELD,
mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor
)
}
}
def genConversion(from: BType, to: BType, cast: Boolean): Unit = {
if (cast) { bc.emitT2T(from, to) }
else {
bc drop from
bc boolconst (from == to)
}
}
def genCast(to: RefBType, cast: Boolean): Unit = {
if (cast) { bc checkCast to }
else { bc isInstance to }
}
/* Is the given symbol a primitive operation? */
def isPrimitive(fun: Tree): Boolean = {
primitives.isPrimitive(fun)
}
/* Generate coercion denoted by "code" */
def genCoercion(code: Int): Unit = {
import ScalaPrimitivesOps._
(code: @switch) match {
case B2B | S2S | C2C | I2I | L2L | F2F | D2D => ()
case _ =>
val from = coercionFrom(code)
val to = coercionTo(code)
bc.emitT2T(from, to)
}
}
/* Generate string concatenation
*
* On JDK 8: create and append using `StringBuilder`
* On JDK 9+: use `invokedynamic` with `StringConcatFactory`
*/
def genStringConcat(tree: Tree): BType = {
lineNumber(tree)
liftStringConcat(tree) match {
// Optimization for expressions of the form "" + x
case List(Literal(Constant("")), arg) =>
genLoad(arg, ObjectReference)
genCallMethod(defn.String_valueOf_Object, InvokeStyle.Static)
case concatenations =>
val concatArguments = concatenations.view
.filter {
case Literal(Constant("")) => false // empty strings are no-ops in concatenation
case _ => true
}
.map {
case Apply(boxOp, value :: Nil) if Erasure.Boxing.isBox(boxOp.symbol) && boxOp.symbol.denot.owner != defn.UnitModuleClass =>
// Eliminate boxing of primitive values. Boxing is introduced by erasure because
// there's only a single synthetic `+` method "added" to the string class.
value
case other => other
}
.toList
// `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower
if (classfileVersion < asm.Opcodes.V9) {
// Estimate capacity needed for the string builder
val approxBuilderSize = concatArguments.view.map {
case Literal(Constant(s: String)) => s.length
case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length
case _ => 0
}.sum
bc.genNewStringBuilder(approxBuilderSize)
for (elem <- concatArguments) {
val elemType = tpeTK(elem)
genLoad(elem, elemType)
bc.genStringBuilderAppend(elemType)
}
bc.genStringBuilderEnd
} else {
/* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If
* the string concatenation is longer (unlikely), we spill into multiple calls
*/
val MaxIndySlots = 200
val TagArg = '\\u0001' // indicates a hole (in the recipe string) for an argument
val TagConst = '\\u0002' // indicates a hole (in the recipe string) for a constant
val recipe = new StringBuilder()
val argTypes = Seq.newBuilder[asm.Type]
val constVals = Seq.newBuilder[String]
var totalArgSlots = 0
var countConcats = 1 // ie. 1 + how many times we spilled
for (elem <- concatArguments) {
val tpe = tpeTK(elem)
val elemSlots = tpe.size
// Unlikely spill case
if (totalArgSlots + elemSlots >= MaxIndySlots) {
bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result())
countConcats += 1
totalArgSlots = 0
recipe.setLength(0)
argTypes.clear()
constVals.clear()
}
elem match {
case Literal(Constant(s: String)) =>
if (s.contains(TagArg) || s.contains(TagConst)) {
totalArgSlots += elemSlots
recipe.append(TagConst)
constVals += s
} else {
recipe.append(s)
}
case other =>
totalArgSlots += elemSlots
recipe.append(TagArg)
val tpe = tpeTK(elem)
argTypes += tpe.toASMType
genLoad(elem, tpe)
}
}
bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result())
// If we spilled, generate one final concat
if (countConcats > 1) {
bc.genIndyStringConcat(
TagArg.toString * countConcats,
Seq.fill(countConcats)(StringRef.toASMType),
Seq.empty
)
}
}
}
StringRef
}
/**
* Generate a method invocation. If `specificReceiver != null`, it is used as receiver in the
* invocation instruction, otherwise `method.owner`. A specific receiver class is needed to
* prevent an IllegalAccessError, (aladdin bug 455).
*/
def genCallMethod(method: Symbol, style: InvokeStyle, pos: Span = NoSpan, specificReceiver: Symbol = null): BType = {
val methodOwner = method.owner
// the class used in the invocation's method descriptor in the classfile
val receiverClass = {
if (specificReceiver != null)
assert(style.isVirtual || specificReceiver == methodOwner, s"specificReceiver can only be specified for virtual calls. $method - $specificReceiver")
val useSpecificReceiver = specificReceiver != null && !defn.isBottomClass(specificReceiver) && !method.isScalaStatic
val receiver = if (useSpecificReceiver) specificReceiver else methodOwner
// workaround for a JVM bug: https://bugs.openjdk.java.net/browse/JDK-8154587
// when an interface method overrides a member of Object (note that all interfaces implicitly
// have superclass Object), the receiver needs to be the interface declaring the override (and
// not a sub-interface that inherits it). example:
// trait T { override def clone(): Object = "" }
// trait U extends T
// class C extends U
// class D { def f(u: U) = u.clone() }
// The invocation `u.clone()` needs `T` as a receiver:
// - using Object is illegal, as Object.clone is protected
// - using U results in a `NoSuchMethodError: U.clone. This is the JVM bug.
// Note that a mixin forwarder is generated, so the correct method is executed in the end:
// class C { override def clone(): Object = super[T].clone() }
val isTraitMethodOverridingObjectMember = {
receiver != methodOwner && // fast path - the boolean is used to pick either of these two, if they are the same it does not matter
style.isVirtual &&
isEmittedInterface(receiver) &&
defn.ObjectType.decl(method.name).symbol.exists && { // fast path - compute overrideChain on the next line only if necessary
val syms = method.allOverriddenSymbols.toList
!syms.isEmpty && syms.last.owner == defn.ObjectClass
}
}
if (isTraitMethodOverridingObjectMember) methodOwner else receiver
}
receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits
val receiverName = internalName(receiverClass)
val jname = method.javaSimpleName
val bmType = asmMethodType(method)
val mdescr = bmType.descriptor
val isInterface = isEmittedInterface(receiverClass)
import InvokeStyle._
if (style == Super) {
if (isInterface && !method.is(JavaDefined)) {
val args = new Array[BType](bmType.argumentTypes.length + 1)
val ownerBType = toTypeKind(method.owner.info)
bmType.argumentTypes.copyToArray(args, 1)
val staticDesc = MethodBType(ownerBType :: bmType.argumentTypes, bmType.returnType).descriptor
val staticName = traitSuperAccessorName(method)
bc.invokestatic(receiverName, staticName, staticDesc, isInterface)
} else {
bc.invokespecial(receiverName, jname, mdescr, isInterface)
}
} else {
val opc = style match {
case Static => Opcodes.INVOKESTATIC
case Special => Opcodes.INVOKESPECIAL
case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL
}
bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface)
}
bmType.returnType
} // end of genCallMethod()
/* Generate the scala ## method. */
def genScalaHash(tree: Tree): BType = {
genLoad(tree, ObjectReference)
genCallMethod(NoSymbol, InvokeStyle.Static) // used to dispatch ## on primitives to ScalaRuntime.hash. Should be implemented by a miniphase
}
/*
* Returns a list of trees that each should be concatenated, from left to right.
* It turns a chained call like "a".+("b").+("c") into a list of arguments.
*/
def liftStringConcat(tree: Tree): List[Tree] = tree match {
case tree @ Apply(fun @ DesugaredSelect(larg, method), rarg) =>
if (isPrimitive(fun) &&
primitives.getPrimitive(tree, larg.tpe) == ScalaPrimitivesOps.CONCAT)
liftStringConcat(larg) ::: rarg
else
tree :: Nil
case _ =>
tree :: Nil
}
/* Emit code to compare the two top-most stack values using the 'op' operator. */
private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = {
if (targetIfNoJump == success) genCJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated)
else {
if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
bc.emitIF_ICMP(op, success)
} else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
bc.emitIF_ACMP(op, success)
} else {
import Primitives._
def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE
(tk: @unchecked) match {
case LONG => emit(asm.Opcodes.LCMP)
case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL)
case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL)
}
bc.emitIF(op, success)
}
if (targetIfNoJump != failure) bc goTo failure
}
}
/* Emits code to compare (and consume) stack-top and zero using the 'op' operator */
private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = {
import Primitives._
if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated)
else {
if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
bc.emitIF(op, success)
} else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
(op: @unchecked) match { // references are only compared with EQ and NE
case EQ => bc emitIFNULL success
case NE => bc emitIFNONNULL success
}
} else {
def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE
(tk: @unchecked) match {
case LONG =>
emit(asm.Opcodes.LCONST_0)
emit(asm.Opcodes.LCMP)
case FLOAT =>
emit(asm.Opcodes.FCONST_0)
emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL)
case DOUBLE =>
emit(asm.Opcodes.DCONST_0)
emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL)
}
bc.emitIF(op, success)
}
if (targetIfNoJump != failure) bc goTo failure
}
}
def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match {
case ScalaPrimitivesOps.ID => Primitives.EQ
case ScalaPrimitivesOps.NI => Primitives.NE
case ScalaPrimitivesOps.EQ => Primitives.EQ
case ScalaPrimitivesOps.NE => Primitives.NE
case ScalaPrimitivesOps.LT => Primitives.LT
case ScalaPrimitivesOps.LE => Primitives.LE
case ScalaPrimitivesOps.GT => Primitives.GT
case ScalaPrimitivesOps.GE => Primitives.GE
}
/*
* Generate code for conditional expressions.
* The jump targets success/failure of the test are `then-target` and `else-target` resp.
*/
private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = {
def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = {
val op = testOpForPrimitive(code)
def isNull(t: Tree): Boolean = t match {
case Literal(Constant(null)) => true
case _ => false
}
def ifOneIsNull(l: Tree, r: Tree): Tree = if (isNull(l)) r else if (isNull(r)) l else null
val nonNullSide = if (ScalaPrimitivesOps.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null
if (nonNullSide != null) {
// special-case reference (in)equality test for null (null eq x, x eq null)
genLoad(nonNullSide, ObjectReference)
genCZJUMP(success, failure, op, ObjectReference, targetIfNoJump)
} else {
val tk = tpeTK(l).maxType(tpeTK(r))
genLoad(l, tk)
genLoad(r, tk)
genCJUMP(success, failure, op, tk, targetIfNoJump)
}
}
def loadAndTestBoolean() = {
genLoad(tree, BOOL)
genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump)
}
lineNumber(tree)
tree match {
case tree @ Apply(fun, args) if primitives.isPrimitive(fun.symbol) =>
import ScalaPrimitivesOps.{ ZNOT, ZAND, ZOR, EQ }
// lhs and rhs of test
lazy val DesugaredSelect(lhs, _) = fun
val rhs = if (args.isEmpty) tpd.EmptyTree else args.head // args.isEmpty only for ZNOT
def genZandOrZor(and: Boolean): Unit = {
// reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited).
val keepGoing = new asm.Label
if (and) genCond(lhs, keepGoing, failure, targetIfNoJump = keepGoing)
else genCond(lhs, success, keepGoing, targetIfNoJump = keepGoing)
markProgramPoint(keepGoing)
genCond(rhs, success, failure, targetIfNoJump)
}
primitives.getPrimitive(fun.symbol) match {
case ZNOT => genCond(lhs, failure, success, targetIfNoJump)
case ZAND => genZandOrZor(and = true)
case ZOR => genZandOrZor(and = false)
case code =>
if (ScalaPrimitivesOps.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) {
// rewrite `==` to null tests and `equals`. not needed for arrays (`equals` is reference equality).
if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, targetIfNoJump)
else genEqEqPrimitive(lhs, rhs, failure, success, targetIfNoJump)
} else if (ScalaPrimitivesOps.isComparisonOp(code)) {
genComparisonOp(lhs, rhs, code)
} else
loadAndTestBoolean()
}
case _ => loadAndTestBoolean()
}
} // end of genCond()
/*
* Generate the "==" code for object references. It is equivalent of
* if (l eq null) r eq null else l.equals(r);
*
* @param l left-hand-side of the '=='
* @param r right-hand-side of the '=='
*/
def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = {
/* True if the equality comparison is between values that require the use of the rich equality
* comparator (scala.runtime.Comparator.equals). This is the case when either side of the
* comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
* When it is statically known that both sides are equal and subtypes of Number of Character,
* not using the rich equality is possible (their own equals method will do ok.)
*/
val mustUseAnyComparator: Boolean = {
val areSameFinals = l.tpe.typeSymbol.is(Final) && r.tpe.typeSymbol.is(Final) && (l.tpe =:= r.tpe)
// todo: remove
def isMaybeBoxed(sym: Symbol): Boolean = {
(sym == defn.ObjectClass) ||
(sym == defn.JavaSerializableClass) ||
(sym == defn.ComparableClass) ||
(sym derivesFrom defn.BoxedNumberClass) ||
(sym derivesFrom defn.BoxedCharClass) ||
(sym derivesFrom defn.BoxedBooleanClass)
}
!areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
}
def isNull(t: Tree): Boolean = t match {
case Literal(Constant(null)) => true
case _ => false
}
def isNonNullExpr(t: Tree): Boolean = t.isInstanceOf[Literal] || ((t.symbol ne null) && t.symbol.is(Module))
if (mustUseAnyComparator) {
val equalsMethod: Symbol = {
if (l.tpe <:< defn.BoxedNumberClass.info) {
if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum)
else if (r.tpe <:< defn.BoxedCharClass.info) NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private
else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject)
} else defn.BoxesRunTimeModule_externalEquals
}
genLoad(l, ObjectReference)
genLoad(r, ObjectReference)
genCallMethod(equalsMethod, InvokeStyle.Static)
genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump)
}
else {
if (isNull(l)) {
// null == expr -> expr eq null
genLoad(r, ObjectReference)
genCZJUMP(success, failure, Primitives.EQ, ObjectReference, targetIfNoJump)
} else if (isNull(r)) {
// expr == null -> expr eq null
genLoad(l, ObjectReference)
genCZJUMP(success, failure, Primitives.EQ, ObjectReference, targetIfNoJump)
} else if (isNonNullExpr(l)) {
// SI-7852 Avoid null check if L is statically non-null.
genLoad(l, ObjectReference)
genLoad(r, ObjectReference)
genCallMethod(defn.Any_equals, InvokeStyle.Virtual)
genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump)
} else {
// l == r -> if (l eq null) r eq null else l.equals(r)
val eqEqTempLocal = locals.makeLocal(ObjectReference, nme.EQEQ_LOCAL_VAR.mangledString, defn.ObjectType, r.span)
val lNull = new asm.Label
val lNonNull = new asm.Label
genLoad(l, ObjectReference)
genLoad(r, ObjectReference)
locals.store(eqEqTempLocal)
bc dup ObjectReference
genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectReference, targetIfNoJump = lNull)
markProgramPoint(lNull)
bc drop ObjectReference
locals.load(eqEqTempLocal)
genCZJUMP(success, failure, Primitives.EQ, ObjectReference, targetIfNoJump = lNonNull)
markProgramPoint(lNonNull)
locals.load(eqEqTempLocal)
genCallMethod(defn.Any_equals, InvokeStyle.Virtual)
genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump)
}
}
}
def genSynchronized(tree: Apply, expectedType: BType): BType
def genLoadTry(tree: Try): BType
def genInvokeDynamicLambda(ctor: Symbol, lambdaTarget: Symbol, environmentSize: Int, functionalInterface: Symbol): BType = {
import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_SERIALIZABLE}
report.debuglog(s"Using invokedynamic rather than `new ${ctor.owner}`")
val generatedType = classBTypeFromSymbol(functionalInterface)
// Lambdas should be serializable if they implement a SAM that extends Serializable or if they
// implement a scala.Function* class.
val isSerializable = functionalInterface.isSerializable || defn.isFunctionClass(functionalInterface)
val isInterface = isEmittedInterface(lambdaTarget.owner)
val invokeStyle =
if (lambdaTarget.isStaticMember) asm.Opcodes.H_INVOKESTATIC
else if (lambdaTarget.is(Private) || lambdaTarget.isClassConstructor) asm.Opcodes.H_INVOKESPECIAL
else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE
else asm.Opcodes.H_INVOKEVIRTUAL
val targetHandle =
new asm.Handle(invokeStyle,
classBTypeFromSymbol(lambdaTarget.owner).internalName,
lambdaTarget.javaSimpleName,
asmMethodType(lambdaTarget).descriptor,
/* itf = */ isInterface)
val (a,b) = lambdaTarget.info.firstParamTypes.splitAt(environmentSize)
var (capturedParamsTypes, lambdaParamTypes) = (a,b)
if (invokeStyle != asm.Opcodes.H_INVOKESTATIC) capturedParamsTypes = lambdaTarget.owner.info :: capturedParamsTypes
// Requires https://github.com/scala/scala-java8-compat on the runtime classpath
val returnUnit = lambdaTarget.info.resultType.typeSymbol == defn.UnitClass
val functionalInterfaceDesc: String = generatedType.descriptor
val desc = capturedParamsTypes.map(tpe => toTypeKind(tpe)).mkString(("("), "", ")") + functionalInterfaceDesc
// TODO specialization
val instantiatedMethodType = new MethodBType(lambdaParamTypes.map(p => toTypeKind(p)), toTypeKind(lambdaTarget.info.resultType)).toASMType
val samMethod = atPhase(erasurePhase) {
val samMethods = toDenot(functionalInterface).info.possibleSamMethods.toList
samMethods match {
case x :: Nil => x.symbol
case Nil => abort(s"${functionalInterface.show} is not a functional interface. It doesn't have abstract methods")
case xs => abort(s"${functionalInterface.show} is not a functional interface. " +
s"It has the following abstract methods: ${xs.map(_.name).mkString(", ")}")
}
}
val methodName = samMethod.javaSimpleName
val samMethodType = asmMethodType(samMethod).toASMType
// scala/bug#10334: make sure that a lambda object for `T => U` has a method `apply(T)U`, not only the `(Object)Object`
// version. Using the lambda a structural type `{def apply(t: T): U}` causes a reflective lookup for this method.
val needsGenericBridge = samMethodType != instantiatedMethodType
val bridgeMethods = atPhase(erasurePhase){
samMethod.allOverriddenSymbols.toList
}
val overriddenMethodTypes = bridgeMethods.map(b => asmMethodType(b).toASMType)
// any methods which `samMethod` overrides need bridges made for them
// this is done automatically during erasure for classes we generate, but LMF needs to have them explicitly mentioned
// so we have to compute them at this relatively late point.
val bridgeTypes = (
if (needsGenericBridge)
instantiatedMethodType +: overriddenMethodTypes
else
overriddenMethodTypes
).distinct.filterNot(_ == samMethodType)
val needsBridges = bridgeTypes.nonEmpty
def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0
val flags = flagIf(isSerializable, FLAG_SERIALIZABLE) | flagIf(needsBridges, FLAG_BRIDGES)
val bsmArgs0 = Seq(samMethodType, targetHandle, instantiatedMethodType)
val bsmArgs1 = if (flags != 0) Seq(Int.box(flags)) else Seq.empty
val bsmArgs2 = if needsBridges then bridgeTypes.length +: bridgeTypes else Seq.empty
val bsmArgs = bsmArgs0 ++ bsmArgs1 ++ bsmArgs2
val metafactory =
if (flags != 0)
lambdaMetaFactoryAltMetafactoryHandle // altMetafactory required to be able to pass the flags and additional arguments if needed
else
lambdaMetaFactoryMetafactoryHandle
bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*)
generatedType
}
}
/** Does this symbol actually correspond to an interface that will be emitted?
* In the backend, this should be preferred over `isInterface` because it
* also returns true for the symbols of the fake companion objects we
* create for Java-defined classes as well as for Java annotations
* which we represent as classes.
*/
private def isEmittedInterface(sym: Symbol): Boolean = sym.isInterface ||
sym.is(JavaDefined) && (toDenot(sym).isAnnotation || sym.is(ModuleClass) && (sym.companionClass.is(PureInterface)) || sym.companionClass.is(Trait))
}
object BCodeBodyBuilder {
val lambdaMetaFactoryMetafactoryHandle = new Handle(
Opcodes.H_INVOKESTATIC,
"java/lang/invoke/LambdaMetafactory",
"metafactory",
"(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;",
/* itf = */ false)
val lambdaMetaFactoryAltMetafactoryHandle = new Handle(
Opcodes.H_INVOKESTATIC,
"java/lang/invoke/LambdaMetafactory",
"altMetafactory",
"(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;",
/* itf = */ false)
val lambdaDeserializeBootstrapHandle = new Handle(
Opcodes.H_INVOKESTATIC,
"scala/runtime/LambdaDeserialize",
"bootstrap",
"(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/invoke/MethodHandle;)Ljava/lang/invoke/CallSite;",
/* itf = */ false)
}
|
dotty-staging/dotty
|
compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala
|
Scala
|
apache-2.0
| 71,740
|
package com.pragmaxim.scalajs.bindings.mbostock
import scala.scalajs.js
import scala.scalajs.js.native
@native
trait Geo extends js.Object {
def path(): Path = js.native
def circle(): Circle = js.native
def area(feature: js.Any): Double = js.native
def bounds(feature: js.Any): js.Array[js.Array[Double]] = js.native
def centroid(feature: js.Any): js.Array[Double] = js.native
def distance(a: js.Array[Double], b: js.Array[Double]): Double = js.native
def interpolate(a: js.Array[Double], b: js.Array[Double]): js.Function1[Double, js.Array[Double]] = js.native
def length(feature: js.Any): Double = js.native
def projection(raw: RawProjection): Projection = js.native
def projectionMutator(rawFactory: RawProjection): ProjectionMutator = js.native
def albers(): Projection = js.native
def albersUsa(): Projection = js.native
var azimuthalEqualArea: js.Any = js.native
var azimuthalEquidistant: js.Any = js.native
var conicConformal: js.Any = js.native
var conicEquidistant: js.Any = js.native
var conicEqualArea: js.Any = js.native
var equirectangular: js.Any = js.native
var gnomonic: js.Any = js.native
var mercator: js.Any = js.native
var orthographic: js.Any = js.native
var stereographic: js.Any = js.native
var transverseMercator: js.Any = js.native
def stream(`object`: GeoJSON, listener: Stream): Unit = js.native
def graticule(): Graticule = js.native
def greatArc(): GreatArc = js.native
def rotation(rotation: js.Array[Double]): Rotation = js.native
}
@native
trait Path extends js.Object {
def apply(feature: js.Any, index: js.Any = js.native): String = js.native
def projection(): Projection = js.native
def projection(projection: Projection): Path = js.native
def context(): String = js.native
def context(context: Context): Path = js.native
def area(feature: js.Any): js.Dynamic = js.native
def centroid(feature: js.Any): js.Dynamic = js.native
def bounds(feature: js.Any): js.Dynamic = js.native
def pointRadius(): Double = js.native
def pointRadius(radius: Double): Path = js.native
}
@native
trait Context extends js.Object {
def beginPath(): js.Dynamic = js.native
def moveTo(x: Double, y: Double): js.Dynamic = js.native
def lineTo(x: Double, y: Double): js.Dynamic = js.native
def arc(x: Double, y: Double, radius: Double, startAngle: Double, endAngle: Double): js.Dynamic = js.native
def closePath(): js.Dynamic = js.native
}
@native
trait Circle extends js.Object {
def apply(args: js.Any*): GeoJSON = js.native
def origin(): js.Array[Double] = js.native
def origin(origin: js.Array[Double]): Circle = js.native
def angle(): Double = js.native
def angle(angle: Double): Circle = js.native
def precision(): Double = js.native
def precision(precision: Double): Circle = js.native
}
@native
trait Graticule extends js.Object {
def apply(): GeoJSON = js.native
def lines(): js.Array[GeoJSON] = js.native
def outline(): GeoJSON = js.native
def extent(): js.Array[js.Array[Double]] = js.native
def extent(extent: js.Array[js.Array[Double]]): Graticule = js.native
def minorExtent(): js.Array[js.Array[Double]] = js.native
def minorExtent(extent: js.Array[js.Array[Double]]): Graticule = js.native
def majorExtent(): js.Array[js.Array[Double]] = js.native
def majorExtent(extent: js.Array[js.Array[Double]]): Graticule = js.native
def step(): js.Array[js.Array[Double]] = js.native
def step(extent: js.Array[js.Array[Double]]): Graticule = js.native
def minorStep(): js.Array[js.Array[Double]] = js.native
def minorStep(extent: js.Array[js.Array[Double]]): Graticule = js.native
def majorStep(): js.Array[js.Array[Double]] = js.native
def majorStep(extent: js.Array[js.Array[Double]]): Graticule = js.native
def precision(): Double = js.native
def precision(precision: Double): Graticule = js.native
}
@native
trait GreatArc extends js.Object {
def apply(): GeoJSON = js.native
def distance(): Double = js.native
def source(): js.Dynamic = js.native
def source(source: js.Any): GreatArc = js.native
def target(): js.Dynamic = js.native
def target(target: js.Any): GreatArc = js.native
def precision(): Double = js.native
def precision(precision: Double): GreatArc = js.native
}
@native
trait GeoJSON extends js.Object {
var coordinates: js.Array[js.Array[Double]] = js.native
var `type`: String = js.native
}
@native
trait RawProjection extends js.Object {
def apply(lambda: Double, phi: Double): js.Array[Double] = js.native
def invert(x: Double, y: Double): js.Array[Double] = js.native
}
@native
trait Projection extends js.Object {
def apply(coordinates: js.Array[Double]): js.Array[Double] = js.native
def invert(point: js.Array[Double]): js.Array[Double] = js.native
def rotate(): js.Array[Double] = js.native
def rotate(rotation: js.Array[Double]): Projection = js.native
def center(): js.Array[Double] = js.native
def center(location: js.Array[Double]): Projection = js.native
def parallels(): js.Array[Double] = js.native
def parallels(location: js.Array[Double]): Projection = js.native
def translate(): js.Array[Double] = js.native
def translate(point: js.Array[Double]): Projection = js.native
def scale(): Double = js.native
def scale(scale: Double): Projection = js.native
def clipAngle(): Double = js.native
def clipAngle(angle: Double): Projection = js.native
def clipExtent(): js.Array[js.Array[Double]] = js.native
def clipExtent(extent: js.Array[js.Array[Double]]): Projection = js.native
def precision(): Double = js.native
def precision(precision: Double): Projection = js.native
def stream(listener: Stream = js.native): Stream = js.native
}
@native
trait Stream extends js.Object {
def point(x: Double, y: Double, z: Double = js.native): Unit = js.native
def lineStart(): Unit = js.native
def lineEnd(): Unit = js.native
def polygonStart(): Unit = js.native
def polygonEnd(): Unit = js.native
def sphere(): Unit = js.native
}
@native
trait Rotation extends js.Array[js.Any] {
def apply(location: js.Array[Double]): Rotation = js.native
def invert(location: js.Array[Double]): Rotation = js.native
}
@native
trait ProjectionMutator extends js.Object {
def apply(lambda: Double, phi: Double): Projection = js.native
}
|
viagraphs/scalajs-bindings
|
d3/src/main/scala/com/pragmaxim/scalajs/bindings/mbostock/Geo.scala
|
Scala
|
mit
| 6,281
|
package net.fehmicansaglam.bson.reader
import java.nio.ByteBuffer
import java.nio.ByteOrder._
import akka.util.ByteString
import net.fehmicansaglam.bson.BsonDocument
import net.fehmicansaglam.bson.element.BsonElement
import scala.collection.mutable.ArrayBuffer
import scala.util.control.Breaks._
case class BsonDocumentReader(buffer: ByteBuffer) extends Reader[BsonDocument] {
buffer.order(LITTLE_ENDIAN)
private val elements: ArrayBuffer[Option[BsonElement]] = new ArrayBuffer[Option[BsonElement]]
def readElement(code: Byte): Option[BsonElement] = code match {
case 0x01 => BsonDoubleReader(buffer).read
case 0x02 => BsonStringReader(buffer).read
case 0x03 => BsonObjectReader(buffer).read
case 0x04 => BsonArrayReader(buffer).read
case 0x05 => BsonBinaryReader(buffer).read
case 0x07 => BsonObjectIdReader(buffer).read
case 0x08 => BsonBooleanReader(buffer).read
case 0x09 => BsonDateTimeReader(buffer).read
case 0x0A => BsonNullReader(buffer).read
case 0x10 => BsonIntegerReader(buffer).read
case 0x11 => BsonTimestampReader(buffer).read
case 0x12 => BsonLongReader(buffer).read
}
override def read: Option[BsonDocument] = {
val size = buffer.getInt()
breakable {
while (buffer.hasRemaining) {
val code = buffer.get()
if (code != 0x00) {
elements += readElement(code)
} else {
break
}
}
}
Some(BsonDocument(elements.flatten: _*))
}
}
object BsonDocumentReader {
def apply(array: Array[Byte]): BsonDocumentReader = BsonDocumentReader(ByteBuffer.wrap(array))
def apply(buffer: ByteString): BsonDocumentReader = BsonDocumentReader(buffer.asByteBuffer)
}
|
danielwegener/tepkin
|
bson/src/main/scala/net/fehmicansaglam/bson/reader/BsonDocumentReader.scala
|
Scala
|
apache-2.0
| 1,712
|
package io.cumulus.stream.storage
import java.io.OutputStream
import java.security.MessageDigest
import java.time.LocalDateTime
import java.util.UUID
import scala.concurrent.ExecutionContext
import akka.NotUsed
import akka.stream._
import akka.stream.scaladsl.{Broadcast, Flow, GraphDSL, ZipWith}
import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
import akka.util.ByteString
import io.cumulus.Logging
import io.cumulus.stream.storage.StorageObjectWriter.ObjectWriterState
import io.cumulus.stream.utils.{Counter, DigestCalculator}
import io.cumulus.utils.Base64
import io.cumulus.persistence.storage.{StorageEngine, StorageObject}
/**
* Object writer (Flow of `ByteString` to `StorageObject`) which will write to a newly created storage object the
* content of the stream using the provided storage engine.
* @param storageEngine The storage engine to use.
*/
class StorageObjectWriter(storageEngine: StorageEngine)(implicit ec: ExecutionContext) extends GraphStage[FlowShape[ByteString, StorageObject]] with Logging {
private val in: Inlet[ByteString] = Inlet[ByteString]("ObjectWriter.in")
private val out: Outlet[StorageObject] = Outlet[StorageObject]("ObjectWriter.out")
override val shape: FlowShape[ByteString, StorageObject] = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
// The storage engine used
private implicit val engine: StorageEngine = storageEngine
// The current state
@SuppressWarnings(Array("org.wartremover.warts.Var"))
private var state = ObjectWriterState.empty
setHandler(out, new OutHandler {
override def onPull(): Unit = {
if (!isClosed(in))
pull(in)
}
})
setHandler(in, new InHandler {
override def onPush(): Unit = {
write(grab(in))
}
override def onUpstreamFinish(): Unit = {
// Close the writer and add to the ready list
state.output.close()
val hash = Base64.encode(state.hashDigest.digest)
state = state.copy(
storageObject = state.storageObject.copy(
hash = hash,
size = state.written,
storageHash = hash, // By default, assume that the object is written directly
storageSize = state.written
)
)
logger.debug(s"Object ${state.storageObject.id} created into ${storageEngine.name} v${storageEngine.version}")
if (isAvailable(out)) {
emitStorageObject()
completeStage()
}
}
})
/**
* Write a buffer to a file source.
* @param buffer The buffer to write.
*/
private def write(buffer: ByteString): Unit = {
// Write
state.output.write(buffer.toArray)
// Update state
state.hashDigest.update(buffer.toArray)
state = state.copy(written = state.written + buffer.length)
// Need more to read
pull(in)
}
/**
* Emit the file source. The source is emitted once the file is fully written.
*/
private def emitStorageObject(): Unit = {
push(out, state.storageObject)
}
}
}
object StorageObjectWriter {
private case class ObjectWriterState(
written: Int,
output: OutputStream,
storageObject: StorageObject,
hashDigest: MessageDigest
)
private object ObjectWriterState {
def empty(implicit storageEngine: StorageEngine, ec: ExecutionContext): ObjectWriterState = {
val storageObject = StorageObject(
id = UUID.randomUUID(),
size = 0,
hash = "",
storageSize = 0,
storageHash = "",
cipher = None,
compression = None,
storageEngine = storageEngine.name,
storageEngineVersion = storageEngine.version,
storageEngineReference = storageEngine.reference,
creation = LocalDateTime.now
)
// TODO handle failure ?
val output = storageEngine.writeObject(storageObject.id)
ObjectWriterState(0, output, storageObject, MessageDigest.getInstance("SHA1"))
}
}
/**
* Naïve version of the object writer, which assume the stream is not altered before. This stage will compute the
* stage and the hash of the byte stream, and assume that this value are representative of the byte source.
*
* @param storageEngine The storage engine to use.
* @see [[io.cumulus.stream.storage.StorageObjectWriter StorageObjectWriter]]
*/
def writer(
storageEngine: StorageEngine
)(implicit ec: ExecutionContext): Flow[ByteString, StorageObject, NotUsed] =
Flow[ByteString].via(new StorageObjectWriter(storageEngine))
/**
* Object writer (Flow of `ByteString` to `StorageObject`) which will write to a newly created storage object the
* content of the stream using the provided storage engine. An optional transformation to apply to the byte stream
* before writing can be provided (i.e. for compression or encryption).
* <br/><br/>
* This helper will correctly set the size and hash before and after the transformation (`storageSize` and
* `storageHash`).
* <br/><br/>
* This flow aims at being used with substreams to allow to upload multiples chunks without ending the stream.
*
* @param storageEngine The storage engine to use.
* @param transformation The transformation to performs.
* @see [[io.cumulus.stream.storage.StorageObjectWriter StorageObjectWriter]]
*/
def writer(
storageEngine: StorageEngine,
transformation: Flow[ByteString, ByteString, NotUsed]
)(implicit ec: ExecutionContext): Flow[ByteString, StorageObject, NotUsed] = {
// Will write the byte stream using the provided storage engine, and return the storage object
val objectWriter = new StorageObjectWriter(storageEngine)
// Will compute the hash (SHA1) of a byte stream
val hash = DigestCalculator.sha1
// Will compute the total size of a byte stream
val size = Counter.apply
val graph = GraphDSL.create() { implicit builder =>
val broadcast = builder.add(Broadcast[ByteString](3))
val zip = builder.add(ZipWith[StorageObject, Long, String, StorageObject] {
case (storageObject, objectSize, objectSha1) =>
storageObject.copy(
hash = objectSha1,
size = objectSize
)
})
import GraphDSL.Implicits._
// Compute the hash and size of the object while writing it
broadcast ~> transformation ~> objectWriter ~> zip.in0
broadcast ~> size ~> zip.in1
broadcast ~> hash ~> zip.in2
FlowShape(broadcast.in, zip.out)
}
// Return the graph
Flow[ByteString]
.via(graph)
}
}
|
Cumulus-Cloud/cumulus
|
server/cumulus-core/src/main/scala/io/cumulus/stream/storage/StorageObjectWriter.scala
|
Scala
|
mit
| 6,793
|
package com.lucidchart.relate
import org.postgresql.util.PGInterval
import scala.concurrent.duration.FiniteDuration
import java.sql.PreparedStatement
package object postgres {
implicit class PGIntervalParameter(value: PGInterval) extends SingleParameter {
protected def set(stmt: PreparedStatement, i: Int) = stmt.setObject(i, value)
}
implicit class FiniteDurationParameter(value: FiniteDuration)
extends PGIntervalParameter(new PGInterval(0, 0, 0, 0, 0, value.toSeconds))
}
|
pauldraper/relate
|
postgres/src/main/scala/com/lucidchart/relate/postgres/package.scala
|
Scala
|
apache-2.0
| 497
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import com.google.inject.Inject
import com.google.inject.name.Named
import javax.inject.Singleton
import play.api.mvc.RequestHeader
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.audit.AuditExtensions.auditHeaderCarrier
import uk.gov.hmrc.play.audit.EventKeys
import uk.gov.hmrc.play.audit.http.connector.AuditConnector
import uk.gov.hmrc.play.audit.model.{ Audit, DataEvent, EventTypes }
@Singleton
class AuditService @Inject() (@Named("appName") val appName: String, auditConnector: AuditConnector) {
private[services] val auditFacade: Audit = new Audit(appName, auditConnector)
def logEvent(eventName: String)(implicit hc: HeaderCarrier, rh: RequestHeader): Unit =
auditFacade.sendDataEvent(
DataEvent(appName, EventTypes.Succeeded, tags = hc.toAuditTags(eventName, rh.path))
)
def logEvent(eventName: String, detail: Map[String, String])(implicit hc: HeaderCarrier, rh: RequestHeader): Unit =
auditFacade.sendDataEvent(
DataEvent(appName, EventTypes.Succeeded, tags = hc.toAuditTags(eventName, rh.path), detail = detail)
)
def logEventNoRequest(eventName: String, detail: Map[String, String]): Unit =
auditFacade.sendDataEvent(
DataEvent(appName, EventTypes.Succeeded, tags = Map(EventKeys.TransactionName -> eventName), detail = detail)
)
}
|
hmrc/fset-faststream
|
app/services/AuditService.scala
|
Scala
|
apache-2.0
| 1,938
|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand
import org.locationtech.geomesa.tools.stats.{StatsBoundsCommand, StatsBoundsParams}
import org.locationtech.geomesa.tools.{CatalogParam, RequiredTypeNameParam}
class HBaseStatsBoundsCommand extends StatsBoundsCommand[HBaseDataStore] with HBaseDataStoreCommand {
override val params = new HBaseStatsBoundsParams
}
@Parameters(commandDescription = "View or calculate bounds on attributes in a GeoMesa feature type")
class HBaseStatsBoundsParams extends StatsBoundsParams with CatalogParam with RequiredTypeNameParam
|
ronq/geomesa
|
geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/stats/HBaseStatsBoundsCommand.scala
|
Scala
|
apache-2.0
| 1,198
|
package com.wavesplatform.matcher.model
import com.wavesplatform.matcher.model.Events.{OrderCanceled, OrderExecuted}
import com.wavesplatform.matcher.model.LimitOrder.OrderStatus
import com.wavesplatform.matcher.util.{Cache, TTLCache}
import scorex.transaction.AssetAcc
import scorex.transaction.assets.exchange.Order
import scorex.transaction.state.database.state._
import scala.collection.mutable
import scala.concurrent.duration._
trait OrderHistory {
val assetsToSpend = mutable.Map.empty[Address, Long]
val ordersRemainingAmount: Cache[String, (Long, Long)] =
TTLCache[String, (Long, Long)]((Order.MaxLiveTime + 3600*1000).millis)
val openOrdersCount = mutable.Map.empty[Address, Int]
def initOrdersCache(m: Map[String, (Long, Long)]) = {
ordersRemainingAmount.clear()
m.foreach(v => ordersRemainingAmount.set(v._1, v._2))
}
def recoverFromOrderBook(ob: OrderBook): Unit = {
ob.bids.foreach(_._2.foreach(addAssetsToSpend))
ob.asks.foreach(_._2.foreach(addAssetsToSpend))
}
private def incCount(address: Address) = openOrdersCount(address) = openOrdersCount.getOrElse(address, 0) + 1
private def decCount(address: Address) = openOrdersCount(address) = openOrdersCount.getOrElse(address, 0) - 1
private def addAssetsToSpend(lo: LimitOrder) = {
val order = lo.order
val assetAcc = AssetAcc(order.sender, order.spendAssetId)
val feeAssetAcc = AssetAcc(order.sender, None)
assetsToSpend(assetAcc.key) = assetsToSpend.getOrElse(assetAcc.key, 0L) + lo.getSpendAmount
assetsToSpend(feeAssetAcc.key) = assetsToSpend.getOrElse(feeAssetAcc.key, 0L) + lo.feeAmount
incCount(order.sender.address)
}
private def updateRemaining(orderId: String, d: (Long, Long)) = {
val prev = ordersRemainingAmount.get(orderId).getOrElse(0L, 0L)
ordersRemainingAmount.set(orderId, (prev._1 + d._1 , prev._2 + d._2))
}
def didOrderAccepted(lo: LimitOrder): Unit = {
addAssetsToSpend(lo)
updateRemaining(lo.order.idStr, (lo.amount, 0L))
}
def reduceSpendAssets(limitOrder: LimitOrder) = {
def reduce(key: Address, value: Long) =
if (assetsToSpend.contains(key)) {
val newVal = assetsToSpend(key) - value
if (newVal > 0) assetsToSpend += (key -> newVal)
else assetsToSpend -= key
}
val order = limitOrder.order
val assetAcc = AssetAcc(order.sender, order.spendAssetId)
val feeAssetAcc = AssetAcc(order.sender, None)
reduce(assetAcc.key, limitOrder.getSpendAmount)
reduce(feeAssetAcc.key, limitOrder.feeAmount)
}
def didOrderExecuted(e: OrderExecuted): Unit = {
reduceSpendAssets(e.counterExecuted)
updateRemaining(e.submitted.order.idStr, (e.executedAmount, e.executedAmount))
updateRemaining(e.counter.order.idStr, (0L, e.executedAmount))
if (e.isCounterFilled) decCount(e.counterExecuted.order.sender.address)
}
def didOrderCanceled(orderCanceled: OrderCanceled): Unit = {
val o = orderCanceled.limitOrder.order
updateRemaining(o.idStr, (-o.amount, 0L))
reduceSpendAssets(orderCanceled.limitOrder)
decCount(o.sender.address)
}
def getOrderStatus(id: String): OrderStatus = {
if (!ordersRemainingAmount.contains(id))
LimitOrder.NotFound
else {
val (full, filled) = ordersRemainingAmount.get(id).get
if (full == 0) LimitOrder.Cancelled(filled)
else if (filled == 0) LimitOrder.Accepted
else if (filled < full) LimitOrder.PartiallyFilled(filled)
else LimitOrder.Filled
}
}
}
|
B83YPoj/Waves
|
src/main/scala/com/wavesplatform/matcher/model/OrderHistory.scala
|
Scala
|
apache-2.0
| 3,515
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle.unsafe
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.Matchers
import org.apache.spark._
import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, Serializer}
/**
* Tests for the fallback logic in UnsafeShuffleManager. Actual tests of shuffling data are
* performed in other suites.
*/
class UnsafeShuffleManagerSuite extends SparkFunSuite with Matchers {
import UnsafeShuffleManager.canUseUnsafeShuffle
private class RuntimeExceptionAnswer extends Answer[Object] {
override def answer(invocation: InvocationOnMock): Object = {
throw new RuntimeException("Called non-stubbed method, " + invocation.getMethod.getName)
}
}
private def shuffleDep(
partitioner: Partitioner,
serializer: Option[Serializer],
keyOrdering: Option[Ordering[Any]],
aggregator: Option[Aggregator[Any, Any, Any]],
mapSideCombine: Boolean): ShuffleDependency[Any, Any, Any] = {
val dep = mock(classOf[ShuffleDependency[Any, Any, Any]], new RuntimeExceptionAnswer())
doReturn(0).when(dep).shuffleId
doReturn(partitioner).when(dep).partitioner
doReturn(serializer).when(dep).serializer
doReturn(keyOrdering).when(dep).keyOrdering
doReturn(aggregator).when(dep).aggregator
doReturn(mapSideCombine).when(dep).mapSideCombine
dep
}
test("supported shuffle dependencies") {
val kryo = Some(new KryoSerializer(new SparkConf()))
assert(canUseUnsafeShuffle(shuffleDep(
partitioner = new HashPartitioner(2),
serializer = kryo,
keyOrdering = None,
aggregator = None,
mapSideCombine = false
)))
val rangePartitioner = mock(classOf[RangePartitioner[Any, Any]])
when(rangePartitioner.numPartitions).thenReturn(2)
assert(canUseUnsafeShuffle(shuffleDep(
partitioner = rangePartitioner,
serializer = kryo,
keyOrdering = None,
aggregator = None,
mapSideCombine = false
)))
}
test("unsupported shuffle dependencies") {
val kryo = Some(new KryoSerializer(new SparkConf()))
val java = Some(new JavaSerializer(new SparkConf()))
// We only support serializers that support object relocation
assert(!canUseUnsafeShuffle(shuffleDep(
partitioner = new HashPartitioner(2),
serializer = java,
keyOrdering = None,
aggregator = None,
mapSideCombine = false
)))
// We do not support shuffles with more than 16 million output partitions
assert(!canUseUnsafeShuffle(shuffleDep(
partitioner = new HashPartitioner(UnsafeShuffleManager.MAX_SHUFFLE_OUTPUT_PARTITIONS + 1),
serializer = kryo,
keyOrdering = None,
aggregator = None,
mapSideCombine = false
)))
// We do not support shuffles that perform any kind of aggregation or sorting of keys
assert(!canUseUnsafeShuffle(shuffleDep(
partitioner = new HashPartitioner(2),
serializer = kryo,
keyOrdering = Some(mock(classOf[Ordering[Any]])),
aggregator = None,
mapSideCombine = false
)))
assert(!canUseUnsafeShuffle(shuffleDep(
partitioner = new HashPartitioner(2),
serializer = kryo,
keyOrdering = None,
aggregator = Some(mock(classOf[Aggregator[Any, Any, Any]])),
mapSideCombine = false
)))
// We do not support shuffles that perform any kind of aggregation or sorting of keys
assert(!canUseUnsafeShuffle(shuffleDep(
partitioner = new HashPartitioner(2),
serializer = kryo,
keyOrdering = Some(mock(classOf[Ordering[Any]])),
aggregator = Some(mock(classOf[Aggregator[Any, Any, Any]])),
mapSideCombine = true
)))
}
}
|
andrewor14/iolap
|
core/src/test/scala/org/apache/spark/shuffle/unsafe/UnsafeShuffleManagerSuite.scala
|
Scala
|
apache-2.0
| 4,543
|
package db
import com.bryzek.apidoc.api.v0.models.UserForm
import lib.Role
import org.scalatest.{FunSpec, Matchers}
import org.junit.Assert._
import java.util.UUID
class EmailVerificationsDaoSpec extends FunSpec with Matchers with util.TestApplication {
def emailVerificationConfirmationsDao = play.api.Play.current.injector.instanceOf[db.EmailVerificationConfirmationsDao]
it("create") {
val user = Util.createRandomUser()
val verification = emailVerificationsDao.create(Util.createdBy, user, user.email)
verification.userGuid should be(user.guid)
verification.email should be(user.email)
}
it("upsert") {
val user = Util.createRandomUser()
val verification1 = emailVerificationsDao.upsert(Util.createdBy, user, user.email)
val verification2 = emailVerificationsDao.upsert(Util.createdBy, user, user.email)
verification2.guid should be(verification1.guid)
emailVerificationsDao.softDelete(Util.createdBy, verification1)
val verification3 = emailVerificationsDao.upsert(Util.createdBy, user, user.email)
verification3.guid should not be(verification1.guid)
val verificationWithDifferentEmail = emailVerificationsDao.upsert(Util.createdBy, user, "other-" + user.email)
verificationWithDifferentEmail.guid should not be(verification3.guid)
}
it("isExpired") {
val user = Util.createRandomUser()
val verification = emailVerificationsDao.create(Util.createdBy, user, user.email)
emailVerificationsDao.isExpired(verification) should be(false)
}
it("confirm") {
val user = Util.createRandomUser()
val verification = emailVerificationsDao.create(Util.createdBy, user, user.email)
emailVerificationConfirmationsDao.findAll(emailVerificationGuid = Some(verification.guid)) should be(Seq.empty)
emailVerificationsDao.confirm(None, verification)
emailVerificationConfirmationsDao.findAll(emailVerificationGuid = Some(verification.guid)).map(_.emailVerificationGuid) should be(Seq(verification.guid))
}
it("findByGuid") {
val user = Util.createRandomUser()
val verification = emailVerificationsDao.create(Util.createdBy, user, user.email)
emailVerificationsDao.findByGuid(verification.guid).map(_.userGuid) should be(Some(user.guid))
emailVerificationsDao.findByGuid(UUID.randomUUID) should be(None)
}
it("findByToken") {
val user = Util.createRandomUser()
val verification = emailVerificationsDao.create(Util.createdBy, user, user.email)
emailVerificationsDao.findByToken(verification.token).map(_.userGuid) should be(Some(user.guid))
emailVerificationsDao.findByToken(UUID.randomUUID.toString) should be(None)
}
it("findAll") {
val user1 = Util.createRandomUser()
val verification1 = emailVerificationsDao.create(Util.createdBy, user1, user1.email)
val user2 = Util.createRandomUser()
val verification2 = emailVerificationsDao.create(Util.createdBy, user2, user2.email)
emailVerificationsDao.findAll(userGuid = Some(user1.guid)).map(_.userGuid).distinct should be(Seq(user1.guid))
emailVerificationsDao.findAll(userGuid = Some(user2.guid)).map(_.userGuid).distinct should be(Seq(user2.guid))
emailVerificationsDao.findAll(userGuid = Some(UUID.randomUUID)).map(_.userGuid).distinct should be(Seq.empty)
emailVerificationsDao.findAll(isExpired = Some(false), userGuid = Some(user1.guid)).map(_.userGuid).distinct should be(Seq(user1.guid))
emailVerificationsDao.findAll(isExpired = Some(true), userGuid = Some(user1.guid)).map(_.userGuid).distinct should be(Seq.empty)
emailVerificationsDao.findAll(email = Some(user1.email)).map(_.userGuid).distinct should be(Seq(user1.guid))
emailVerificationsDao.findAll(email = Some(user1.email.toUpperCase)).map(_.userGuid).distinct should be(Seq(user1.guid))
emailVerificationsDao.findAll(email = Some(user2.email)).map(_.userGuid).distinct should be(Seq(user2.guid))
emailVerificationsDao.findAll(email = Some(UUID.randomUUID.toString)).map(_.userGuid).distinct should be(Seq.empty)
emailVerificationsDao.findAll(guid = Some(verification1.guid)).map(_.userGuid).distinct should be(Seq(user1.guid))
emailVerificationsDao.findAll(guid = Some(verification2.guid)).map(_.userGuid).distinct should be(Seq(user2.guid))
emailVerificationsDao.findAll(guid = Some(UUID.randomUUID)).map(_.userGuid).distinct should be(Seq.empty)
emailVerificationsDao.findAll(token = Some(verification1.token)).map(_.userGuid).distinct should be(Seq(user1.guid))
emailVerificationsDao.findAll(token = Some(verification2.token)).map(_.userGuid).distinct should be(Seq(user2.guid))
emailVerificationsDao.findAll(token = Some("bad")).map(_.userGuid).distinct should be(Seq.empty)
}
describe("membership requests") {
it("confirm auto approves pending membership requests based on org email domain") {
val org = Util.createOrganization()
val domain = UUID.randomUUID.toString + ".com"
organizationDomainsDao.create(Util.createdBy, org, domain)
val prefix = "test-user-" + UUID.randomUUID.toString
val user = usersDao.create(UserForm(
email = prefix + "@" + domain,
password = "testing"
))
val nonMatchingUser = usersDao.create(UserForm(
email = prefix + "@other." + domain,
password = "testing"
))
usersDao.processUserCreated(user.guid)
usersDao.processUserCreated(nonMatchingUser.guid)
membershipsDao.isUserMember(user, org) should be(false)
membershipsDao.isUserMember(nonMatchingUser, org) should be(false)
val verification = emailVerificationsDao.upsert(Util.createdBy, user, user.email)
emailVerificationsDao.confirm(Some(Util.createdBy), verification)
membershipsDao.isUserMember(user, org) should be(true)
membershipsDao.isUserMember(nonMatchingUser, org) should be(false)
}
}
}
|
Seanstoppable/apidoc
|
api/test/db/EmailVerificationsDaoSpec.scala
|
Scala
|
mit
| 5,878
|
package screact
import scala.language.implicitConversions
import scutil.lang.*
object Cell {
implicit def asSignal[T](it:Cell[T]):Signal[T] = it.signal
}
/** An Cell is a source for a Signal and can trigger an update cycle in the Engine */
trait Cell[T] extends AutoCloseable { outer =>
val signal:Signal[T]
def set(value:T):Unit
final def modify(func:T=>T):Unit = {
set(func(signal.current))
}
final def xmap[S](bijection:Bijection[S,T]):Cell[S] = new Cell[S] {
val signal = outer.signal map bijection.set
def set(it:S):Unit = { outer set (bijection get it) }
override def close():Unit = { signal.close() }
}
final def view[U](lens:Lens[T,U]):Cell[U] = new Cell[U] {
val signal = outer.signal map lens.get
def set(it:U):Unit = { outer modify (lens set it) }
override def close():Unit = { signal.close() }
}
def close():Unit = {
signal.close()
}
}
|
ritschwumm/screact
|
src/main/scala/screact/Cell.scala
|
Scala
|
bsd-2-clause
| 885
|
/* Title: Pure/General/pretty.scala
Author: Makarius
Generic pretty printing module.
*/
package isabelle
object Pretty
{
/* spaces */
val space = " "
private val static_spaces = space * 4000
def spaces(k: Int): String =
{
require(k >= 0)
if (k < static_spaces.length) static_spaces.substring(0, k)
else space * k
}
/* text metric -- standardized to width of space */
abstract class Metric
{
val unit: Double
def apply(s: String): Double
}
object Metric_Default extends Metric
{
val unit = 1.0
def apply(s: String): Double = s.length.toDouble
}
/* markup trees with physical blocks and breaks */
def block(body: XML.Body): XML.Tree = Block(2, body)
object Block
{
def apply(i: Int, body: XML.Body): XML.Tree =
XML.Elem(Markup.Block(i), body)
def unapply(tree: XML.Tree): Option[(Int, XML.Body)] =
tree match {
case XML.Elem(Markup.Block(i), body) => Some((i, body))
case _ => None
}
}
object Break
{
def apply(w: Int): XML.Tree =
XML.Elem(Markup.Break(w), List(XML.Text(spaces(w))))
def unapply(tree: XML.Tree): Option[Int] =
tree match {
case XML.Elem(Markup.Break(w), _) => Some(w)
case _ => None
}
}
val FBreak = XML.Text("\\n")
def item(body: XML.Body): XML.Tree =
Block(2, XML.elem(Markup.BULLET, List(XML.Text(space))) :: XML.Text(space) :: body)
val Separator = List(XML.elem(Markup.SEPARATOR, List(XML.Text(space))), FBreak)
def separate(ts: List[XML.Tree]): XML.Body = Library.separate(Separator, ts.map(List(_))).flatten
/* standard form */
def standard_form(body: XML.Body): XML.Body =
body flatMap {
case XML.Wrapped_Elem(markup, body1, body2) =>
List(XML.Wrapped_Elem(markup, body1, standard_form(body2)))
case XML.Elem(markup, body) =>
if (markup.name == Markup.ITEM) List(item(standard_form(body)))
else List(XML.Elem(markup, standard_form(body)))
case XML.Text(text) => Library.separate(FBreak, split_lines(text).map(XML.Text))
}
/* formatted output */
private val margin_default = 76.0
def formatted(input: XML.Body, margin: Double = margin_default,
metric: Metric = Metric_Default): XML.Body =
{
sealed case class Text(tx: XML.Body = Nil, pos: Double = 0.0, nl: Int = 0)
{
def newline: Text = copy(tx = FBreak :: tx, pos = 0.0, nl = nl + 1)
def string(s: String): Text = copy(tx = XML.Text(s) :: tx, pos = pos + metric(s))
def blanks(wd: Int): Text = string(spaces(wd))
def content: XML.Body = tx.reverse
}
val breakgain = margin / 20
val emergencypos = (margin / 2).round.toInt
def content_length(tree: XML.Tree): Double =
XML.traverse_text(List(tree))(0.0)(_ + metric(_))
def breakdist(trees: XML.Body, after: Double): Double =
trees match {
case Break(_) :: _ => 0.0
case FBreak :: _ => 0.0
case t :: ts => content_length(t) + breakdist(ts, after)
case Nil => after
}
def forcenext(trees: XML.Body): XML.Body =
trees match {
case Nil => Nil
case FBreak :: _ => trees
case Break(_) :: ts => FBreak :: ts
case t :: ts => t :: forcenext(ts)
}
def format(trees: XML.Body, blockin: Int, after: Double, text: Text): Text =
trees match {
case Nil => text
case Block(indent, body) :: ts =>
val pos1 = (text.pos + indent).ceil.toInt
val pos2 = pos1 % emergencypos
val blockin1 =
if (pos1 < emergencypos) pos1
else pos2
val btext = format(body, blockin1, breakdist(ts, after), text)
val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
format(ts1, blockin, after, btext)
case Break(wd) :: ts =>
if (text.pos + wd <= ((margin - breakdist(ts, after)) max (blockin + breakgain)))
format(ts, blockin, after, text.blanks(wd))
else format(ts, blockin, after, text.newline.blanks(blockin))
case FBreak :: ts => format(ts, blockin, after, text.newline.blanks(blockin))
case XML.Wrapped_Elem(markup, body1, body2) :: ts =>
val btext = format(body2, blockin, breakdist(ts, after), text.copy(tx = Nil))
val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
val btext1 = btext.copy(tx = XML.Wrapped_Elem(markup, body1, btext.content) :: text.tx)
format(ts1, blockin, after, btext1)
case XML.Elem(markup, body) :: ts =>
val btext = format(body, blockin, breakdist(ts, after), text.copy(tx = Nil))
val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
val btext1 = btext.copy(tx = XML.Elem(markup, btext.content) :: text.tx)
format(ts1, blockin, after, btext1)
case XML.Text(s) :: ts => format(ts, blockin, after, text.string(s))
}
format(standard_form(input), 0, 0.0, Text()).content
}
def string_of(input: XML.Body, margin: Double = margin_default,
metric: Metric = Metric_Default): String =
XML.content(formatted(input, margin, metric))
/* unformatted output */
def unformatted(input: XML.Body): XML.Body =
{
def fmt(tree: XML.Tree): XML.Body =
tree match {
case Block(_, body) => body.flatMap(fmt)
case Break(wd) => List(XML.Text(spaces(wd)))
case FBreak => List(XML.Text(space))
case XML.Wrapped_Elem(markup, body1, body2) =>
List(XML.Wrapped_Elem(markup, body1, body2.flatMap(fmt)))
case XML.Elem(markup, body) => List(XML.Elem(markup, body.flatMap(fmt)))
case XML.Text(_) => List(tree)
}
standard_form(input).flatMap(fmt)
}
def str_of(input: XML.Body): String = XML.content(unformatted(input))
}
|
MerelyAPseudonym/isabelle
|
src/Pure/General/pretty.scala
|
Scala
|
bsd-3-clause
| 5,819
|
// Copyright 2015 Ricardo Gladwell.
// Licensed under the GNU Lesser General Public License.
// See the README.md file for more information.
package microtesia
import org.specs2.mutable.Specification
import scala.util.{Failure, Success, Try}
object TrySequenceSpec extends Specification {
"Sequence of Try should" >> {
"sequence a successful Seq[Try] into Try[Seq]" >> {
Seq(Success(1), Success(2), Success(3)).sequence must_== Success(Seq(1,2,3))
}
"sequence a Seq[Try] containing a failure into a failure" >> {
val e = new Exception
Seq(Success(1), Failure(e), Success(3)).sequence must_== Failure(e)
}
"sequence a Seq[Try] containing multiple failures into the last failure" >> {
val first = new Exception
val last = new Exception
Seq(Failure(first), Success(1), Failure(last)).sequence must_== Failure(last)
}
"traverse a successful Seq[Try[Seq[B]] into Try[Seq[B]]" >> {
val seq: Seq[Try[Seq[Int]]] = Seq(Success(Seq(1,2)), Success(Seq(3)), Success(Seq(4,5)))
seq.traverse(Nil: Seq[Int]){ _ ++ _ } must_== Success(Seq(1,2,3,4,5))
}
}
}
|
rgladwell/microtesia
|
src/test/scala/microtesia/TrySequenceSpec.scala
|
Scala
|
lgpl-3.0
| 1,138
|
package $basePackageName$;
import io.skysail.core.akka.actors._
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
class ApplicationService { //(implicit val executionContext: ExecutionContext) {
//var applications = List[Application]()
def createApplication(app: Application): Future[Option[String]] = ???
def getApplication(id: String): Future[Option[Application]] = ???
def getApplications(): Seq[Application] = List(Application("hi"), Application("there"))
//def updateQuestion(id: String, update: QuestionUpdate): Future[Option[Question]] = ...
//def deleteQuestion(id: String): Future[Unit] = ...
}
|
evandor/skysail-core
|
skysail.template.app/resources/template/$srcDir$/$basePackageDir$/services.scala
|
Scala
|
apache-2.0
| 647
|
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import collection.immutable.TreeSet
import org.scalatest.events._
/* Uncomment after remove type aliases in org.scalatest package object
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestPendingException
*/
class DeprecatedMethodFormSuiteSpec extends FunSpec with PrivateMethodTester with SharedHelpers {
describe("The simpleNameForTest method") {
it("should return the correct test simple name with or without Informer") {
val simpleNameForTest = PrivateMethod[String]('simpleNameForTest)
assert((Suite invokePrivate simpleNameForTest("testThis")) === "testThis")
assert((Suite invokePrivate simpleNameForTest("testThis(Informer)")) === "testThis")
assert((Suite invokePrivate simpleNameForTest("test(Informer)")) === "test")
assert((Suite invokePrivate simpleNameForTest("test")) === "test")
}
}
describe("A Suite") {
/*
it("should send InfoProvided events with aboutAPendingTest set to true and aboutACanceledTest set to false for info " +
"calls made from a test that is pending") {
val a = new Suite {
def testSomething(info: Informer) {
info("two integers")
info("one is subracted from the other")
info("the result is the difference between the two numbers")
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testPending = rep.testPendingEventsReceived
assert(testPending.size === 1)
val recordedEvents = testPending(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && !ip.aboutACanceledTest.get)
}
}
it("should send InfoProvided events with aboutAPendingTest and aboutACanceledTest set to false for info " +
"calls made from a test that is not pending or canceled") {
val a = new Suite {
def testSomething(info: Informer) {
info("two integers")
info("one is subracted from the other")
info("the result is the difference between the two numbers")
assert(1 + 1 === 2)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
val recordedEvents = testSucceeded(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && !ip.aboutACanceledTest.get)
}
}
it("should send InfoProvided events with aboutAPendingTest set to false and aboutACanceledTest set to true for info " +
"calls made from a test that is canceled") {
val a = new Suite {
def testSomething(info: Informer) {
info("two integers")
info("one is subracted from the other")
info("the result is the difference between the two numbers")
cancel()
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testCanceled = rep.testCanceledEventsReceived
assert(testCanceled.size === 1)
val recordedEvents = testCanceled(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && ip.aboutACanceledTest.get)
}
}
*/
it("should return the test names in alphabetical order from testNames") {
val a = new Suite {
def testThis() {}
def testThat() {}
}
assertResult(List("testThat", "testThis")) {
a.testNames.iterator.toList
}
val b = new Suite {}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new Suite {
def testThat() {}
def testThis() {}
}
assertResult(List("testThat", "testThis")) {
c.testNames.iterator.toList
}
}
it("should return the proper testNames for test methods whether or not they take an Informer") {
val a = new Suite {
def testThis() = ()
def testThat(info: Informer) = ()
}
assert(a.testNames === TreeSet("testThat(Informer)", "testThis"))
val b = new Suite {}
assert(b.testNames === TreeSet[String]())
}
class TestWasCalledSuite extends Suite {
var theTestThisCalled = false
var theTestThatCalled = false
def testThis() {
theTestThisCalled = true
}
def testThat() {
theTestThatCalled = true
}
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("testThis"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, ant not run, tests marked ignored") {
val a = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
val b = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "testThis")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
val c = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
def testThis() {
theTestThisCalled = true
}
@Ignore
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "testThat(Informer)", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
val d = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
def testThis() {
theTestThisCalled = true
}
@Ignore
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "testThis") // last because run alphabetically
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
val e = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repE = new TestIgnoredTrackingReporter
e.run(Some("testThis"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should exclude a test with a tag included in the tagsToExclude set even if run is invoked with that testName") {
val e = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repE = new TestIgnoredTrackingReporter
e.run(Some("testThis"), Args(repE, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should throw IllegalArgumentException if run is passed a testName that does not exist") {
val suite = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
intercept[IllegalArgumentException] {
// Here, they forgot that the name is actually testThis(Fixture)
suite.run(Some("misspelled"), Args(SilentReporter))
}
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
@Ignore
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@Ignore
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
@Ignore
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
val i = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@Ignore
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@Ignore
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new Suite {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
@Ignore
@FastAsLight
@SlowAsMolasses
def testThis() {
theTestThisCalled = true
}
@Ignore
@SlowAsMolasses
def testThat(info: Informer) {
theTestThatCalled = true
}
@Ignore
def testTheOther(info: Informer) {
theTestTheOtherCalled = true
}
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new Suite {
def testThis() = ()
def testThat(info: Informer) = ()
}
assert(a.expectedTestCount(Filter()) === 2)
val b = new Suite {
@Ignore
def testThis() = ()
def testThat(info: Informer) = ()
}
assert(b.expectedTestCount(Filter()) === 1)
val c = new Suite {
@FastAsLight
def testThis() = ()
def testThat(info: Informer) = ()
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
val d = new Suite {
@FastAsLight
@SlowAsMolasses
def testThis() = ()
@SlowAsMolasses
def testThat(info: Informer) = ()
def testTheOtherThing(info: Informer) = ()
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
val e = new Suite {
@FastAsLight
@SlowAsMolasses
def testThis() = ()
@SlowAsMolasses
def testThat(info: Informer) = ()
@Ignore
def testTheOtherThing(info: Informer) = ()
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new Suite {
def testDoThis() {
pending
}
def testDoThat() {
assert(2 + 2 === 4)
}
def testDoSomethingElse() {
assert(2 + 2 === 4)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should generate a TestCanceled message when the test body includes a cancel call") {
val a = new Suite {
def testDoThis() {
cancel()
}
def testDoThat() {
assert(2 + 2 === 4)
}
def testDoSomethingElse() {
assert(2 + 2 === 4)
cancel()
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testCanceledEventsReceived
assert(tp.size === 2)
}
it("should generate a TestCanceled message when the test body includes a failed assume call") {
val a = new Suite {
def testDoThis() {
assume(1 === 2)
}
def testDoThat() {
assert(2 + 2 === 4)
}
def testDoSomethingElse() {
assert(2 + 2 === 4)
assume(3 === 4)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testCanceledEventsReceived
assert(tp.size === 2)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new Suite {
def testThrowsAssertionError() {
throw new AssertionError
}
def testThrowsPlainOldError() {
throw new Error
}
def testThrowsThrowable() {
throw new Throwable
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new Suite {
def testThrowsAssertionError() {
throw new OutOfMemoryError
}
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
it("should invoke withFixture from runTest for no-arg test method") {
val a = new Suite {
var withFixtureWasInvoked = false
var testWasInvoked = false
override def withFixture(test: NoArgTest): Outcome = {
withFixtureWasInvoked = true
super.withFixture(test)
}
def testSomething() {
testWasInvoked = true
}
}
a.run(None, Args(SilentReporter))
assert(a.withFixtureWasInvoked)
assert(a.testWasInvoked)
}
it("should invoke withFixture from runTest for a test method that takes an Informer") {
val a = new Suite {
var withFixtureWasInvoked = false
var testWasInvoked = false
override def withFixture(test: NoArgTest): Outcome = {
withFixtureWasInvoked = true
super.withFixture(test)
}
def testSomething(info: Informer) {
testWasInvoked = true
}
}
a.run(None, Args(SilentReporter))
assert(a.withFixtureWasInvoked)
assert(a.testWasInvoked)
}
it("should pass the correct test name in the NoArgTest passed to withFixture") {
val a = new Suite {
var correctTestNameWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
correctTestNameWasPassed = test.name == "testSomething(Informer)"
super.withFixture(test)
}
def testSomething(info: Informer) {}
}
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the NoArgTest passed to withFixture") {
val a = new Suite {
var correctConfigMapWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7))
super.withFixture(test)
}
def testSomething(info: Informer) {}
}
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
describe("(when its pendingUntilFixed method is invoked)") {
it("should throw TestPendingException if the code block throws an exception") {
intercept[TestPendingException] {
pendingUntilFixed {
assert(1 + 1 === 3)
}
}
}
it("should throw TestFailedException if the code block doesn't throw an exception") {
intercept[TestFailedException] {
pendingUntilFixed {
assert(1 + 2 === 3)
}
}
}
}
it("should, when a test methods takes an Informer and writes to it, report the info in test completion event") {
val msg = "hi there dude"
class MySuite extends Suite {
def testWithInformer(info: Informer) {
info(msg)
}
}
val myRep = new EventRecordingReporter
new MySuite().run(None, Args(myRep))
val testStarting = myRep.testStartingEventsReceived
assert(testStarting.size === 1)
val testSucceeded = myRep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
assert(testSucceeded(0).recordedEvents.size === 1)
val ip: InfoProvided = testSucceeded(0).recordedEvents(0).asInstanceOf[InfoProvided]
assert(msg === ip.message)
}
}
describe("the stopper") {
it("should stop nested suites from being executed") {
class SuiteA extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteB extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteC extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteD extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
val status = super.run(testName, args)
args.stopper.requestStop()
status
}
}
class SuiteE extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteF extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
class SuiteG extends Suite {
var executed = false;
override def run(testName: Option[String], args: Args): Status = {
executed = true
super.run(testName, args)
}
}
val a = new SuiteA
val b = new SuiteB
val c = new SuiteC
val d = new SuiteD
val e = new SuiteE
val f = new SuiteF
val g = new SuiteG
class IgnoreStopRequestStopper extends Stopper {
def stopRequested: Boolean = false
def requestStop() {}
def reset() {}
}
val x = Suites(a, b, c, d, e, f, g)
x.run(None, Args(SilentReporter, new IgnoreStopRequestStopper))
assert(a.executed)
assert(b.executed)
assert(c.executed)
assert(d.executed)
assert(e.executed)
assert(f.executed)
assert(g.executed)
val h = new SuiteA
val i = new SuiteB
val j = new SuiteC
val k = new SuiteD
val l = new SuiteE
val m = new SuiteF
val n = new SuiteG
val y = Suites(h, i, j, k, l, m, n)
y.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap.empty, None, new Tracker, Set.empty))
assert(k.executed)
assert(i.executed)
assert(j.executed)
assert(k.executed)
assert(!l.executed)
assert(!m.executed)
assert(!n.executed)
}
it("should stop tests from being executed") {
class MySuite extends Suite {
var testsExecutedCount = 0
def test1() {
testsExecutedCount += 1
}
def test2() {
testsExecutedCount += 1
}
def test3() {
testsExecutedCount += 1
}
def test4() {
testsExecutedCount += 1
}
def test5() {
testsExecutedCount += 1
}
def test6() {
testsExecutedCount += 1
}
def test7() {
testsExecutedCount += 1
}
}
val x = new MySuite
x.run(None, Args(SilentReporter))
assert(x.testsExecutedCount === 7)
val myStopper = Stopper.default
class MyStoppingSuite extends Suite {
var testsExecutedCount = 0
def test1() {
testsExecutedCount += 1
}
def test2() {
testsExecutedCount += 1
}
def test3() {
testsExecutedCount += 1
}
def test4() {
testsExecutedCount += 1
myStopper.requestStop()
}
def test5() {
testsExecutedCount += 1
}
def test6() {
testsExecutedCount += 1
}
def test7() {
testsExecutedCount += 1
}
}
val y = new MyStoppingSuite
y.run(None, Args(SilentReporter, myStopper, Filter(), ConfigMap.empty, None, new Tracker, Set.empty))
assert(y.testsExecutedCount === 4)
}
}
describe("A Suite's execute method") {
it("should throw NPE if passed null for configMap") {
class MySuite extends Suite
intercept[NullPointerException] {
(new MySuite).execute(configMap = null)
}
}
it("should throw IAE if a testName is passed that does not exist on the suite") {
class MySuite extends Suite
intercept[IllegalArgumentException] {
(new MySuite).execute(testName = "fred")
}
}
}
}
|
svn2github/scalatest
|
src/test/scala/org/scalatest/DeprecatedMethodFormSuiteSpec.scala
|
Scala
|
apache-2.0
| 32,176
|
package com.arcusys.learn.quiz.storage.impl.liferay
import com.arcusys.learn.storage.impl.KeyedEntityStorage
import com.arcusys.learn.persistence.liferay.service.{ LFQuizQuestCatLocalServiceUtil, LFQuizQuestionLocalServiceUtil }
import com.arcusys.learn.persistence.liferay.model.LFQuizQuestCat
import com.arcusys.valamis.quiz.model.QuizQuestionCategory
import scala.collection.JavaConverters._
import com.arcusys.learn.storage.impl.liferay.LiferayCommon._
import com.arcusys.learn.liferay.constants.QueryUtilHelper._
/**
* User: dkudinov
* Date: 15.3.2013
*/
trait LFQuizQuestionCategoryStorageImpl extends KeyedEntityStorage[QuizQuestionCategory] {
protected def doRenew() {
LFQuizQuestCatLocalServiceUtil.removeAll()
}
def getOne(parameters: (String, Any)*) = throw new UnsupportedOperationException
def getAll(parameters: (String, Any)*) = {
parameters match {
case Seq(("quizID", quizId: Int), ("parentID", parentId: Int)) =>
val parentIdForSearch = if (parentId == -1) nullInteger else new Integer(parentId)
LFQuizQuestCatLocalServiceUtil.findByQuizIdAndParentId(quizId, parentIdForSearch).asScala.map {
extract
}.sortBy(_.arrangementIndex)
case Seq(("parentID", parentId: Int)) =>
throw new UnsupportedOperationException("Quiz ID should be declared!")
case _ => LFQuizQuestCatLocalServiceUtil.getLFQuizQuestCats(ALL_POS, ALL_POS).asScala.map(extract).sortBy(_.arrangementIndex)
}
}
def create(parameters: (String, Any)*) {
throw new UnsupportedOperationException
}
def execute(sqlKey: String, parameters: (String, Any)*) {
throw new UnsupportedOperationException
}
def getAll(sqlKey: String, parameters: (String, Any)*) = throw new UnsupportedOperationException
def getOne(sqlKey: String, parameters: (String, Any)*) = throw new UnsupportedOperationException
def modify(sqlKey: String, parameters: (String, Any)*) {
throw new UnsupportedOperationException
}
def delete(parameters: (String, Any)*) {
idParam(parameters: _*).foreach(id => {
val category = LFQuizQuestCatLocalServiceUtil.getLFQuizQuestCat(id)
if (category != null) {
val quizID = category.getQuizId
val questions = LFQuizQuestionLocalServiceUtil.findByQuizAndCategory(quizID, category.getId.toInt).asScala
questions.foreach(q => LFQuizQuestionLocalServiceUtil.deleteLFQuizQuestion(q.getId))
val children = LFQuizQuestCatLocalServiceUtil.findByQuizIdAndParentId(quizID, id).asScala
children.foreach(c => delete("id" -> c.getId.toInt))
}
LFQuizQuestCatLocalServiceUtil.deleteLFQuizQuestCat(id)
})
}
def modify(parameters: (String, Any)*) {
idParam(parameters: _*).flatMap {
getLFEntityById
}.foreach {
lfEntity => doUpdateEntity(null, lfEntity, LFQuizQuestCatLocalServiceUtil.updateLFQuizQuestCat, parameters: _*)
}
}
private def getLFEntityById(id: Int) = Option(LFQuizQuestCatLocalServiceUtil.getLFQuizQuestCat(id))
def createAndGetID(entity: QuizQuestionCategory, parameters: (String, Any)*) = {
doCreate(entity, parameters: _*).getId.toInt
}
private def doCreate(entity: QuizQuestionCategory, parameters: (String, Any)*) = {
doUpdateEntity(entity, LFQuizQuestCatLocalServiceUtil.createLFQuizQuestionCategory(), LFQuizQuestCatLocalServiceUtil.addLFQuizQuestCat, parameters: _*)
}
private def doUpdateEntity(entity: QuizQuestionCategory, lfEntity: LFQuizQuestCat,
update: (LFQuizQuestCat) => LFQuizQuestCat,
parameters: (String, Any)*): LFQuizQuestCat = {
(entity, parameters) match {
case (entity: QuizQuestionCategory, params: Seq[(String, Any)]) =>
//title: String, description: String, quizID: Int, parentID: Option[Int]
lfEntity.setTitle(entity.title)
lfEntity.setDescription(entity.description)
lfEntity.setQuizId(entity.quizID)
params.foreach {
case ("parentID", parentId: Option[Int]) => lfEntity.setParentId(parentId)
case ("arrangementIndex", arrangementIndex: Int) => lfEntity.setArrangementIndex(arrangementIndex)
}
update(lfEntity)
case (null, params: Seq[(String, Any)]) =>
params.foreach {
case ("id", _) => () // skip
case ("title", title: String) => lfEntity.setTitle(title)
case ("description", description: String) => lfEntity.setDescription(description)
case ("parentID", parentId: Option[Int]) =>
if (parentId.isDefined) {
lfEntity.setParentId(parentId.get)
} else {
lfEntity.setParentId(nullInteger)
}
case ("arrangementIndex", arrangementIndex: Int) => lfEntity.setArrangementIndex(arrangementIndex)
}
update(lfEntity)
}
}
def createAndGetID(parameters: (String, Any)*) = throw new UnsupportedOperationException
def getByID(id: Int, parameters: (String, Any)*) = getLFEntityById(id) map {
extract
}
def create(entity: QuizQuestionCategory, parameters: (String, Any)*) {
throw new UnsupportedOperationException
}
def modify(entity: QuizQuestionCategory, parameters: (String, Any)*) {
parameters match {
case Seq(("parentID", parentID: Option[Int])) =>
val lfEntity = LFQuizQuestCatLocalServiceUtil.getLFQuizQuestCat(entity.id)
if (parentID.isDefined)
lfEntity.setParentId(parentID.get)
else
lfEntity.setParentId(null)
LFQuizQuestCatLocalServiceUtil.updateLFQuizQuestCat(lfEntity)
case _ => None
}
}
def extract(lfentity: LFQuizQuestCat) = QuizQuestionCategory(
lfentity.getId.toInt,
lfentity.getTitle,
lfentity.getDescription,
lfentity.getQuizId.toInt,
Option(lfentity.getParentId).map(_.toInt),
lfentity.getArrangementIndex
)
// for some reason, compiler gives an error in Maven build if this function is moved to package object
def idParam(parameters: (String, Any)*): Option[Int] = {
parameters find {
_._1 == "id"
} map {
_._2.asInstanceOf[Int]
}
}
}
|
ViLPy/Valamis
|
learn-persistence-liferay-wrapper/src/main/scala/com/arcusys/learn/quiz/storage/impl/liferay/LFQuizQuestionCategoryStorageImpl.scala
|
Scala
|
lgpl-3.0
| 6,152
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst
import java.util.regex.{Matcher, Pattern}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, LinkedHashSet, ListBuffer, Map}
import scala.language.implicitConversions
import scala.util.matching.Regex
import org.apache.hadoop.hive.ql.lib.Node
import org.apache.hadoop.hive.ql.parse._
import org.apache.spark.sql.catalyst.trees.CurrentOrigin
import org.apache.spark.sql.execution.command._
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.metadata.datatype.DataType
import org.apache.carbondata.core.metadata.schema.PartitionInfo
import org.apache.carbondata.core.metadata.schema.partition.PartitionType
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema
import org.apache.carbondata.core.util.{CarbonUtil, DataTypeUtil}
import org.apache.carbondata.processing.constants.LoggerAction
import org.apache.carbondata.processing.newflow.sort.SortScopeOptions
import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
import org.apache.carbondata.spark.util.{CommonUtil, DataTypeConverterUtil}
/**
* TODO remove the duplicate code and add the common methods to common class.
* Parser for All Carbon DDL cases
*/
abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
protected val AGGREGATE = carbonKeyWord("AGGREGATE")
protected val AS = carbonKeyWord("AS")
protected val AGGREGATION = carbonKeyWord("AGGREGATION")
protected val ALL = carbonKeyWord("ALL")
protected val HIGH_CARDINALITY_DIMS = carbonKeyWord("NO_DICTIONARY")
protected val BEFORE = carbonKeyWord("BEFORE")
protected val BY = carbonKeyWord("BY")
protected val CARDINALITY = carbonKeyWord("CARDINALITY")
protected val CASCADE = carbonKeyWord("CASCADE")
protected val CLASS = carbonKeyWord("CLASS")
protected val CLEAN = carbonKeyWord("CLEAN")
protected val COLS = carbonKeyWord("COLS")
protected val COLUMNS = carbonKeyWord("COLUMNS")
protected val COMPACT = carbonKeyWord("COMPACT")
protected val CREATE = carbonKeyWord("CREATE")
protected val CUBE = carbonKeyWord("CUBE")
protected val CUBES = carbonKeyWord("CUBES")
protected val DATA = carbonKeyWord("DATA")
protected val DATABASE = carbonKeyWord("DATABASE")
protected val DATABASES = carbonKeyWord("DATABASES")
protected val DELETE = carbonKeyWord("DELETE")
protected val DELIMITER = carbonKeyWord("DELIMITER")
protected val DESCRIBE = carbonKeyWord("DESCRIBE")
protected val DESC = carbonKeyWord("DESC")
protected val DETAIL = carbonKeyWord("DETAIL")
protected val DIMENSIONS = carbonKeyWord("DIMENSIONS")
protected val DIMFOLDERPATH = carbonKeyWord("DIMFOLDERPATH")
protected val DROP = carbonKeyWord("DROP")
protected val ESCAPECHAR = carbonKeyWord("ESCAPECHAR")
protected val EXCLUDE = carbonKeyWord("EXCLUDE")
protected val EXPLAIN = carbonKeyWord("EXPLAIN")
protected val EXTENDED = carbonKeyWord("EXTENDED")
protected val FORMATTED = carbonKeyWord("FORMATTED")
protected val FACT = carbonKeyWord("FACT")
protected val FIELDS = carbonKeyWord("FIELDS")
protected val FILEHEADER = carbonKeyWord("FILEHEADER")
protected val SERIALIZATION_NULL_FORMAT = carbonKeyWord("SERIALIZATION_NULL_FORMAT")
protected val BAD_RECORDS_LOGGER_ENABLE = carbonKeyWord("BAD_RECORDS_LOGGER_ENABLE")
protected val BAD_RECORDS_ACTION = carbonKeyWord("BAD_RECORDS_ACTION")
protected val IS_EMPTY_DATA_BAD_RECORD = carbonKeyWord("IS_EMPTY_DATA_BAD_RECORD")
protected val IS_EMPTY_COMMA_DATA_BAD_RECORD = carbonKeyWord("IS_NULL_DATA_BAD_RECORD")
protected val FILES = carbonKeyWord("FILES")
protected val FROM = carbonKeyWord("FROM")
protected val HIERARCHIES = carbonKeyWord("HIERARCHIES")
protected val IN = carbonKeyWord("IN")
protected val INCLUDE = carbonKeyWord("INCLUDE")
protected val INPATH = carbonKeyWord("INPATH")
protected val INTO = carbonKeyWord("INTO")
protected val LEVELS = carbonKeyWord("LEVELS")
protected val LIKE = carbonKeyWord("LIKE")
protected val LOAD = carbonKeyWord("LOAD")
protected val LOCAL = carbonKeyWord("LOCAL")
protected val MAPPED = carbonKeyWord("MAPPED")
protected val MEASURES = carbonKeyWord("MEASURES")
protected val MULTILINE = carbonKeyWord("MULTILINE")
protected val COMPLEX_DELIMITER_LEVEL_1 = carbonKeyWord("COMPLEX_DELIMITER_LEVEL_1")
protected val COMPLEX_DELIMITER_LEVEL_2 = carbonKeyWord("COMPLEX_DELIMITER_LEVEL_2")
protected val OPTIONS = carbonKeyWord("OPTIONS")
protected val OUTPATH = carbonKeyWord("OUTPATH")
protected val OVERWRITE = carbonKeyWord("OVERWRITE")
protected val PARTITION_COUNT = carbonKeyWord("PARTITION_COUNT")
protected val PARTITIONDATA = carbonKeyWord("PARTITIONDATA")
protected val PARTITIONER = carbonKeyWord("PARTITIONER")
protected val QUOTECHAR = carbonKeyWord("QUOTECHAR")
protected val RELATION = carbonKeyWord("RELATION")
protected val SCHEMA = carbonKeyWord("SCHEMA")
protected val SCHEMAS = carbonKeyWord("SCHEMAS")
protected val SET = Keyword("SET")
protected val SHOW = carbonKeyWord("SHOW")
protected val TABLES = carbonKeyWord("TABLES")
protected val TABLE = carbonKeyWord("TABLE")
protected val TERMINATED = carbonKeyWord("TERMINATED")
protected val TYPE = carbonKeyWord("TYPE")
protected val UPDATE = carbonKeyWord("UPDATE")
protected val USE = carbonKeyWord("USE")
protected val WHERE = Keyword("WHERE")
protected val WITH = carbonKeyWord("WITH")
protected val AGGREGATETABLE = carbonKeyWord("AGGREGATETABLE")
protected val ABS = carbonKeyWord("abs")
protected val FOR = carbonKeyWord("FOR")
protected val SCRIPTS = carbonKeyWord("SCRIPTS")
protected val USING = carbonKeyWord("USING")
protected val LIMIT = carbonKeyWord("LIMIT")
protected val DEFAULTS = carbonKeyWord("DEFAULTS")
protected val ALTER = carbonKeyWord("ALTER")
protected val ADD = carbonKeyWord("ADD")
protected val IF = carbonKeyWord("IF")
protected val NOT = carbonKeyWord("NOT")
protected val EXISTS = carbonKeyWord("EXISTS")
protected val DIMENSION = carbonKeyWord("DIMENSION")
protected val STARTTIME = carbonKeyWord("STARTTIME")
protected val SEGMENTS = carbonKeyWord("SEGMENTS")
protected val SEGMENT = carbonKeyWord("SEGMENT")
protected val STRING = carbonKeyWord("STRING")
protected val INTEGER = carbonKeyWord("INTEGER")
protected val TIMESTAMP = carbonKeyWord("TIMESTAMP")
protected val DATE = carbonKeyWord("DATE")
protected val CHAR = carbonKeyWord("CHAR")
protected val VARCHAR = carbonKeyWord("VARCHAR")
protected val NUMERIC = carbonKeyWord("NUMERIC")
protected val DECIMAL = carbonKeyWord("DECIMAL")
protected val DOUBLE = carbonKeyWord("DOUBLE")
protected val FLOAT = carbonKeyWord("FLOAT")
protected val SHORT = carbonKeyWord("SHORT")
protected val INT = carbonKeyWord("INT")
protected val BIGINT = carbonKeyWord("BIGINT")
protected val ARRAY = carbonKeyWord("ARRAY")
protected val STRUCT = carbonKeyWord("STRUCT")
protected val SMALLINT = carbonKeyWord("SMALLINT")
protected val CHANGE = carbonKeyWord("CHANGE")
protected val TBLPROPERTIES = carbonKeyWord("TBLPROPERTIES")
protected val doubleQuotedString = "\\"([^\\"]+)\\"".r
protected val singleQuotedString = "'([^']+)'".r
protected val newReservedWords =
this.getClass
.getMethods
.filter(_.getReturnType == classOf[Keyword])
.map(_.invoke(this).asInstanceOf[Keyword].str)
override val lexical = {
val sqllex = new SqlLexical()
sqllex.initialize(newReservedWords)
sqllex
}
import lexical.Identifier
implicit def regexToParser(regex: Regex): Parser[String] = {
acceptMatch(
s"identifier matching regex ${ regex }",
{ case Identifier(str) if regex.unapplySeq(str).isDefined => str }
)
}
/**
* This will convert key word to regular expression.
*
* @param keys
* @return
*/
private def carbonKeyWord(keys: String) = {
("(?i)" + keys).r
}
protected val escapedIdentifier = "`([^`]+)`".r
private def reorderDimensions(dims: Seq[Field]): Seq[Field] = {
var complexDimensions: Seq[Field] = Seq()
var dimensions: Seq[Field] = Seq()
dims.foreach { dimension =>
dimension.dataType.getOrElse("NIL") match {
case "Array" => complexDimensions = complexDimensions :+ dimension
case "Struct" => complexDimensions = complexDimensions :+ dimension
case _ => dimensions = dimensions :+ dimension
}
}
dimensions ++ complexDimensions
}
def getScaleAndPrecision(dataType: String): (Int, Int) = {
val m: Matcher = Pattern.compile("^decimal\\\\(([^)]+)\\\\)").matcher(dataType)
m.find()
val matchedString: String = m.group(1)
val scaleAndPrecision = matchedString.split(",")
(Integer.parseInt(scaleAndPrecision(0).trim), Integer.parseInt(scaleAndPrecision(1).trim))
}
/**
* This will prepate the Model from the Tree details.
*
* @param ifNotExistPresent
* @param dbName
* @param tableName
* @param fields
* @param partitionCols
* @param tableProperties
* @return
*/
def prepareTableModel(ifNotExistPresent: Boolean, dbName: Option[String]
, tableName: String, fields: Seq[Field],
partitionCols: Seq[PartitionerField],
tableProperties: mutable.Map[String, String],
bucketFields: Option[BucketFields], isAlterFlow: Boolean = false): TableModel = {
fields.zipWithIndex.foreach { case (field, index) =>
field.schemaOrdinal = index
}
val (dims, msrs, noDictionaryDims, sortKeyDims) = extractDimAndMsrFields(
fields, tableProperties)
if (dims.isEmpty && !isAlterFlow) {
throw new MalformedCarbonCommandException(
s"Table ${dbName.getOrElse(CarbonCommonConstants.DATABASE_DEFAULT_NAME)}.$tableName " +
"can not be created without key columns. Please use DICTIONARY_INCLUDE or " +
"DICTIONARY_EXCLUDE to set at least one key column " +
"if all specified columns are numeric types")
}
// column properties
val colProps = extractColumnProperties(fields, tableProperties)
// get column groups configuration from table properties.
val groupCols: Seq[String] = updateColumnGroupsInField(tableProperties,
noDictionaryDims, msrs, dims)
// get no inverted index columns from table properties.
val noInvertedIdxCols = extractNoInvertedIndexColumns(fields, tableProperties)
// get partitionInfo
val partitionInfo = getPartitionInfo(partitionCols, tableProperties)
// validate the tableBlockSize from table properties
CommonUtil.validateTableBlockSize(tableProperties)
TableModel(
ifNotExistPresent,
dbName.getOrElse(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
dbName,
tableName,
tableProperties,
reorderDimensions(dims.map(f => normalizeType(f)).map(f => addParent(f))),
msrs.map(f => normalizeType(f)),
Option(sortKeyDims),
Option(noDictionaryDims),
Option(noInvertedIdxCols),
groupCols,
Some(colProps),
bucketFields: Option[BucketFields],
partitionInfo)
}
/**
* Extract the column groups configuration from table properties.
* Based on this Row groups of fields will be determined.
*
* @param tableProperties
* @return
*/
protected def updateColumnGroupsInField(tableProperties: mutable.Map[String, String],
noDictionaryDims: Seq[String],
msrs: Seq[Field],
dims: Seq[Field]): Seq[String] = {
if (tableProperties.get(CarbonCommonConstants.COLUMN_GROUPS).isDefined) {
var splittedColGrps: Seq[String] = Seq[String]()
val nonSplitCols: String = tableProperties.get(CarbonCommonConstants.COLUMN_GROUPS).get
// row groups will be specified in table properties like -> "(col1,col2),(col3,col4)"
// here first splitting the value by () . so that the above will be splitted into 2 strings.
// [col1,col2] [col3,col4]
val m: Matcher = Pattern.compile("\\\\(([^)]+)\\\\)").matcher(nonSplitCols)
while (m.find()) {
val oneGroup: String = m.group(1)
CommonUtil.validateColumnGroup(oneGroup, noDictionaryDims, msrs, splittedColGrps, dims)
val arrangedColGrp = rearrangedColumnGroup(oneGroup, dims)
splittedColGrps :+= arrangedColGrp
}
// This will be furthur handled.
CommonUtil.arrangeColGrpsInSchemaOrder(splittedColGrps, dims)
} else {
null
}
}
def rearrangedColumnGroup(colGroup: String, dims: Seq[Field]): String = {
// if columns in column group is not in schema order than arrange it in schema order
var colGrpFieldIndx: Seq[Int] = Seq[Int]()
colGroup.split(',').map(_.trim).foreach { x =>
dims.zipWithIndex.foreach { dim =>
if (dim._1.column.equalsIgnoreCase(x)) {
colGrpFieldIndx :+= dim._2
}
}
}
// sort it
colGrpFieldIndx = colGrpFieldIndx.sorted
// check if columns in column group is in schema order
if (!checkIfInSequence(colGrpFieldIndx)) {
throw new MalformedCarbonCommandException("Invalid column group:" + colGroup)
}
def checkIfInSequence(colGrpFieldIndx: Seq[Int]): Boolean = {
for (i <- 0 until (colGrpFieldIndx.length - 1)) {
if ((colGrpFieldIndx(i + 1) - colGrpFieldIndx(i)) != 1) {
throw new MalformedCarbonCommandException(
"Invalid column group,column in group should be contiguous as per schema.")
}
}
true
}
val colGrpNames: StringBuilder = StringBuilder.newBuilder
for (i <- colGrpFieldIndx.indices) {
colGrpNames.append(dims(colGrpFieldIndx(i)).column)
if (i < (colGrpFieldIndx.length - 1)) {
colGrpNames.append(",")
}
}
colGrpNames.toString()
}
/**
* @param partitionCols
* @param tableProperties
*/
protected def getPartitionInfo(partitionCols: Seq[PartitionerField],
tableProperties: Map[String, String]): Option[PartitionInfo] = {
if (partitionCols.isEmpty) {
None
} else {
var partitionType: String = ""
var numPartitions = 0
var rangeInfo = List[String]()
var listInfo = ListBuffer[List[String]]()
var templist = ListBuffer[String]()
if (tableProperties.get(CarbonCommonConstants.PARTITION_TYPE).isDefined) {
partitionType = tableProperties.get(CarbonCommonConstants.PARTITION_TYPE).get
}
if (tableProperties.get(CarbonCommonConstants.NUM_PARTITIONS).isDefined) {
numPartitions = tableProperties.get(CarbonCommonConstants.NUM_PARTITIONS).get
.toInt
}
if (tableProperties.get(CarbonCommonConstants.RANGE_INFO).isDefined) {
rangeInfo = tableProperties.get(CarbonCommonConstants.RANGE_INFO).get.split(",")
.map(_.trim()).toList
}
if (tableProperties.get(CarbonCommonConstants.LIST_INFO).isDefined) {
val arr = tableProperties.get(CarbonCommonConstants.LIST_INFO).get.split(",")
.map(_.trim())
val iter = arr.iterator
while (iter.hasNext) {
val value = iter.next()
if (value.startsWith("(")) {
templist += value.replace("(", "").trim()
} else if (value.endsWith(")")) {
templist += value.replace(")", "").trim()
listInfo += templist.toList
templist.clear()
} else {
templist += value
listInfo += templist.toList
templist.clear()
}
}
}
val cols : ArrayBuffer[ColumnSchema] = new ArrayBuffer[ColumnSchema]()
partitionCols.foreach(partition_col => {
val columnSchema = new ColumnSchema
columnSchema.setDataType(DataTypeConverterUtil.
convertToCarbonType(partition_col.dataType.get))
columnSchema.setColumnName(partition_col.partitionColumn)
cols += columnSchema
})
var partitionInfo : PartitionInfo = null
partitionType.toUpperCase() match {
case "HASH" => partitionInfo = new PartitionInfo(cols.asJava, PartitionType.HASH)
partitionInfo.setNumPartitions(numPartitions)
case "RANGE" => partitionInfo = new PartitionInfo(cols.asJava, PartitionType.RANGE)
partitionInfo.setRangeInfo(rangeInfo.asJava)
case "LIST" => partitionInfo = new PartitionInfo(cols.asJava, PartitionType.LIST)
partitionInfo.setListInfo(listInfo.map(_.asJava).toList.asJava)
}
Some(partitionInfo)
}
}
protected def extractColumnProperties(fields: Seq[Field], tableProperties: Map[String, String]):
java.util.Map[String, java.util.List[ColumnProperty]] = {
val colPropMap = new java.util.HashMap[String, java.util.List[ColumnProperty]]()
fields.foreach { field =>
if (field.children.isDefined && field.children.get != null) {
fillAllChildrenColumnProperty(field.column, field.children, tableProperties, colPropMap)
} else {
fillColumnProperty(None, field.column, tableProperties, colPropMap)
}
}
colPropMap
}
protected def fillAllChildrenColumnProperty(parent: String, fieldChildren: Option[List[Field]],
tableProperties: Map[String, String],
colPropMap: java.util.HashMap[String, java.util.List[ColumnProperty]]) {
fieldChildren.foreach(fields => {
fields.foreach(field => {
fillColumnProperty(Some(parent), field.column, tableProperties, colPropMap)
}
)
}
)
}
protected def fillColumnProperty(parentColumnName: Option[String],
columnName: String,
tableProperties: Map[String, String],
colPropMap: java.util.HashMap[String, java.util.List[ColumnProperty]]) {
val (tblPropKey, colProKey) = getKey(parentColumnName, columnName)
val colProps = CommonUtil.getColumnProperties(tblPropKey, tableProperties)
if (colProps.isDefined) {
colPropMap.put(colProKey, colProps.get)
}
}
def getKey(parentColumnName: Option[String],
columnName: String): (String, String) = {
if (parentColumnName.isDefined) {
if (columnName == "val") {
(parentColumnName.get, parentColumnName.get + "." + columnName)
} else {
(parentColumnName.get + "." + columnName, parentColumnName.get + "." + columnName)
}
} else {
(columnName, columnName)
}
}
/**
* This will extract the no inverted columns fields.
* By default all dimensions use inverted index.
*
* @param fields
* @param tableProperties
* @return
*/
protected def extractNoInvertedIndexColumns(fields: Seq[Field],
tableProperties: Map[String, String]): Seq[String] = {
// check whether the column name is in fields
var noInvertedIdxColsProps: Array[String] = Array[String]()
var noInvertedIdxCols: Seq[String] = Seq[String]()
if (tableProperties.get(CarbonCommonConstants.NO_INVERTED_INDEX).isDefined) {
noInvertedIdxColsProps =
tableProperties.get(CarbonCommonConstants.NO_INVERTED_INDEX).get.split(',').map(_.trim)
noInvertedIdxColsProps.foreach { noInvertedIdxColProp =>
if (!fields.exists(x => x.column.equalsIgnoreCase(noInvertedIdxColProp))) {
val errormsg = "NO_INVERTED_INDEX column: " + noInvertedIdxColProp +
" does not exist in table. Please check create table statement."
throw new MalformedCarbonCommandException(errormsg)
}
}
}
// check duplicate columns and only 1 col left
val distinctCols = noInvertedIdxColsProps.toSet
// extract the no inverted index columns
fields.foreach(field => {
if (distinctCols.exists(x => x.equalsIgnoreCase(field.column))) {
noInvertedIdxCols :+= field.column
}
}
)
noInvertedIdxCols
}
/**
* This will extract the Dimensions and NoDictionary Dimensions fields.
* By default all string cols are dimensions.
*
* @param fields
* @param tableProperties
* @return
*/
protected def extractDimAndMsrFields(fields: Seq[Field],
tableProperties: Map[String, String]): (Seq[Field], Seq[Field], Seq[String], Seq[String]) = {
var dimFields: LinkedHashSet[Field] = LinkedHashSet[Field]()
var msrFields: Seq[Field] = Seq[Field]()
var dictExcludeCols: Array[String] = Array[String]()
var noDictionaryDims: Seq[String] = Seq[String]()
var dictIncludeCols: Seq[String] = Seq[String]()
// All columns in sortkey should be there in create table cols
val sortKeyOption = tableProperties.get(CarbonCommonConstants.SORT_COLUMNS)
var sortKeyDimsTmp: Seq[String] = Seq[String]()
val sortKeyString: String = if (sortKeyOption.isDefined) {
CarbonUtil.unquoteChar(sortKeyOption.get) trim
} else {
""
}
if (!sortKeyString.isEmpty) {
val sortKey = sortKeyString.split(',').map(_.trim)
sortKey.foreach { column =>
if (!fields.exists(x => x.column.equalsIgnoreCase(column))) {
val errormsg = "sort_columns: " + column +
" does not exist in table. Please check create table statement."
throw new MalformedCarbonCommandException(errormsg)
} else {
val dataType = fields.find(x =>
x.column.equalsIgnoreCase(column)).get.dataType.get
if (isComplexDimDictionaryExclude(dataType)) {
val errormsg = "sort_columns is unsupported for complex datatype column: " + column
throw new MalformedCarbonCommandException(errormsg)
}
}
}
sortKey.foreach { dimension =>
if (!sortKeyDimsTmp.exists(dimension.equalsIgnoreCase(_))) {
fields.foreach { field =>
if (field.column.equalsIgnoreCase(dimension)) {
sortKeyDimsTmp :+= field.column
}
}
}
}
}
// All excluded cols should be there in create table cols
if (tableProperties.get(CarbonCommonConstants.DICTIONARY_EXCLUDE).isDefined) {
dictExcludeCols =
tableProperties.get(CarbonCommonConstants.DICTIONARY_EXCLUDE).get.split(',').map(_.trim)
dictExcludeCols
.foreach { dictExcludeCol =>
if (!fields.exists(x => x.column.equalsIgnoreCase(dictExcludeCol))) {
val errormsg = "DICTIONARY_EXCLUDE column: " + dictExcludeCol +
" does not exist in table. Please check create table statement."
throw new MalformedCarbonCommandException(errormsg)
} else {
val dataType = fields.find(x =>
x.column.equalsIgnoreCase(dictExcludeCol)).get.dataType.get
if (isComplexDimDictionaryExclude(dataType)) {
val errormsg = "DICTIONARY_EXCLUDE is unsupported for complex datatype column: " +
dictExcludeCol
throw new MalformedCarbonCommandException(errormsg)
} else if (!isDataTypeSupportedForDictionary_Exclude(dataType)) {
val errorMsg = "DICTIONARY_EXCLUDE is unsupported for " + dataType.toLowerCase() +
" data type column: " + dictExcludeCol
throw new MalformedCarbonCommandException(errorMsg)
}
}
}
}
// All included cols should be there in create table cols
if (tableProperties.get(CarbonCommonConstants.DICTIONARY_INCLUDE).isDefined) {
dictIncludeCols =
tableProperties(CarbonCommonConstants.DICTIONARY_INCLUDE).split(",").map(_.trim)
dictIncludeCols.foreach { distIncludeCol =>
if (!fields.exists(x => x.column.equalsIgnoreCase(distIncludeCol.trim))) {
val errormsg = "DICTIONARY_INCLUDE column: " + distIncludeCol.trim +
" does not exist in table. Please check create table statement."
throw new MalformedCarbonCommandException(errormsg)
}
}
}
// include cols should not contain exclude cols
dictExcludeCols.foreach { dicExcludeCol =>
if (dictIncludeCols.exists(x => x.equalsIgnoreCase(dicExcludeCol))) {
val errormsg = "DICTIONARY_EXCLUDE can not contain the same column: " + dicExcludeCol +
" with DICTIONARY_INCLUDE. Please check create table statement."
throw new MalformedCarbonCommandException(errormsg)
}
}
// by default consider all String cols as dims and if any dictionary exclude is present then
// add it to noDictionaryDims list. consider all dictionary excludes/include cols as dims
fields.foreach { field =>
if (dictExcludeCols.toSeq.exists(x => x.equalsIgnoreCase(field.column))) {
val dataType = DataTypeUtil.getDataType(field.dataType.get.toUpperCase())
if (dataType != DataType.TIMESTAMP && dataType != DataType.DATE) {
noDictionaryDims :+= field.column
}
dimFields += field
} else if (dictIncludeCols.exists(x => x.equalsIgnoreCase(field.column))) {
dimFields += field
} else if (isDetectAsDimentionDatatype(field.dataType.get)) {
dimFields += field
} else if (sortKeyDimsTmp.exists(x => x.equalsIgnoreCase(field.column))) {
noDictionaryDims :+= field.column
dimFields += field
} else {
msrFields :+= field
}
}
var sortKeyDims = sortKeyDimsTmp
if (sortKeyOption.isEmpty) {
// if SORT_COLUMNS was not defined, add all dimension to SORT_COLUMNS.
dimFields.foreach { field =>
if (!isComplexDimDictionaryExclude(field.dataType.get)) {
sortKeyDims :+= field.column
}
}
}
if (sortKeyDims.isEmpty) {
// no SORT_COLUMNS
tableProperties.put(CarbonCommonConstants.SORT_COLUMNS, "")
} else {
tableProperties.put(CarbonCommonConstants.SORT_COLUMNS, sortKeyDims.mkString(","))
}
(dimFields.toSeq, msrFields, noDictionaryDims, sortKeyDims)
}
/**
* It fills non string dimensions in dimFields
*/
def fillNonStringDimension(dictIncludeCols: Seq[String],
field: Field, dimFields: LinkedHashSet[Field]) {
var dictInclude = false
if (dictIncludeCols.nonEmpty) {
dictIncludeCols.foreach(dictIncludeCol =>
if (field.column.equalsIgnoreCase(dictIncludeCol)) {
dictInclude = true
})
}
if (dictInclude) {
dimFields += field
}
}
/**
* detect dimention data type
*
* @param dimensionDatatype
*/
def isDetectAsDimentionDatatype(dimensionDatatype: String): Boolean = {
val dimensionType = Array("string", "array", "struct", "timestamp", "date", "char")
dimensionType.exists(x => dimensionDatatype.toLowerCase.contains(x))
}
/**
* detects whether complex dimension is part of dictionary_exclude
*/
def isComplexDimDictionaryExclude(dimensionDataType: String): Boolean = {
val dimensionType = Array("array", "struct")
dimensionType.exists(x => x.equalsIgnoreCase(dimensionDataType))
}
/**
* detects whether datatype is part of dictionary_exclude
*/
def isDataTypeSupportedForDictionary_Exclude(columnDataType: String): Boolean = {
val dataTypes = Array("string")
dataTypes.exists(x => x.equalsIgnoreCase(columnDataType))
}
/**
* Extract the DbName and table name.
*
* @param tableNameParts
* @return
*/
protected def extractDbNameTableName(tableNameParts: Node): (Option[String], String) = {
val (db, tableName) =
tableNameParts.getChildren.asScala.map {
case Token(part, Nil) => cleanIdentifier(part)
} match {
case Seq(tableOnly) => (None, tableOnly)
case Seq(databaseName, table) => (Some(convertDbNameToLowerCase(databaseName)), table)
}
(db, tableName)
}
/**
* This method will convert the database name to lower case
*
* @param dbName
* @return String
*/
protected def convertDbNameToLowerCase(dbName: String) = {
dbName.toLowerCase
}
/**
* This method will convert the database name to lower case
*
* @param dbName
* @return Option of String
*/
protected def convertDbNameToLowerCase(dbName: Option[String]): Option[String] = {
dbName match {
case Some(databaseName) => Some(convertDbNameToLowerCase(databaseName))
case None => dbName
}
}
protected def cleanIdentifier(ident: String): String = {
ident match {
case escapedIdentifier(i) => i
case plainIdent => plainIdent
}
}
protected def getClauses(clauseNames: Seq[String], nodeList: Seq[ASTNode]): Seq[Option[Node]] = {
var remainingNodes = nodeList
val clauses = clauseNames.map { clauseName =>
val (matches, nonMatches) = remainingNodes.partition(_.getText.toUpperCase == clauseName)
remainingNodes = nonMatches ++ (if (matches.nonEmpty) {
matches.tail
} else {
Nil
})
matches.headOption
}
if (remainingNodes.nonEmpty) {
sys.error(
s"""Unhandled clauses:
|You are likely trying to use an unsupported carbon feature."""".stripMargin)
}
clauses
}
object Token {
/** @return matches of the form (tokenName, children). */
def unapply(t: Any): Option[(String, Seq[ASTNode])] = {
t match {
case t: ASTNode =>
CurrentOrigin.setPosition(t.getLine, t.getCharPositionInLine)
Some((t.getText,
Option(t.getChildren).map(_.asScala.toList).getOrElse(Nil).asInstanceOf[Seq[ASTNode]]))
case _ => None
}
}
}
/**
* Extract the table properties token
*
* @param node
* @return
*/
protected def getProperties(node: Node): Seq[(String, String)] = {
node match {
case Token("TOK_TABLEPROPLIST", list) =>
list.map {
case Token("TOK_TABLEPROPERTY", Token(key, Nil) :: Token(value, Nil) :: Nil) =>
unquoteString(key) -> unquoteString(value)
}
}
}
protected def unquoteString(str: String) = {
str match {
case singleQuotedString(s) => s.toLowerCase()
case doubleQuotedString(s) => s.toLowerCase()
case other => other
}
}
protected def validateOptions(optionList: Option[List[(String, String)]]): Unit = {
// validate with all supported options
val options = optionList.get.groupBy(x => x._1)
val supportedOptions = Seq("DELIMITER", "QUOTECHAR", "FILEHEADER", "ESCAPECHAR", "MULTILINE",
"COMPLEX_DELIMITER_LEVEL_1", "COMPLEX_DELIMITER_LEVEL_2", "COLUMNDICT",
"SERIALIZATION_NULL_FORMAT", "BAD_RECORDS_LOGGER_ENABLE", "BAD_RECORDS_ACTION",
"ALL_DICTIONARY_PATH", "MAXCOLUMNS", "COMMENTCHAR", "DATEFORMAT",
"SINGLE_PASS", "IS_EMPTY_DATA_BAD_RECORD", "SORT_SCOPE", "BATCH_SORT_SIZE_INMB"
)
var isSupported = true
val invalidOptions = StringBuilder.newBuilder
options.foreach(value => {
if (!supportedOptions.exists(x => x.equalsIgnoreCase(value._1))) {
isSupported = false
invalidOptions.append(value._1)
}
}
)
if (!isSupported) {
val errorMessage = "Error: Invalid option(s): " + invalidOptions.toString()
throw new MalformedCarbonCommandException(errorMessage)
}
// COLUMNDICT and ALL_DICTIONARY_PATH can not be used together.
if (options.exists(_._1.equalsIgnoreCase("COLUMNDICT")) &&
options.exists(_._1.equalsIgnoreCase("ALL_DICTIONARY_PATH"))) {
val errorMessage = "Error: COLUMNDICT and ALL_DICTIONARY_PATH can not be used together" +
" in options"
throw new MalformedCarbonCommandException(errorMessage)
}
if (options.exists(_._1.equalsIgnoreCase("MAXCOLUMNS"))) {
val maxColumns: String = options.get("maxcolumns").get.head._2
try {
maxColumns.toInt
} catch {
case ex: NumberFormatException =>
throw new MalformedCarbonCommandException(
"option MAXCOLUMNS can only contain integer values")
}
}
if (options.exists(_._1.equalsIgnoreCase("BAD_RECORDS_ACTION"))) {
val optionValue: String = options.get("bad_records_action").get.head._2
try {
LoggerAction.valueOf(optionValue.toUpperCase)
}
catch {
case e: IllegalArgumentException =>
throw new MalformedCarbonCommandException(
"option BAD_RECORDS_ACTION can have only either FORCE or IGNORE or REDIRECT")
}
}
if (options.exists(_._1.equalsIgnoreCase("IS_EMPTY_DATA_BAD_RECORD"))) {
val optionValue: String = options.get("is_empty_data_bad_record").get.head._2
if (!("true".equalsIgnoreCase(optionValue) || "false".equalsIgnoreCase(optionValue))) {
throw new MalformedCarbonCommandException(
"option IS_EMPTY_DATA_BAD_RECORD can have option either true or false")
}
}
if (options.exists(_._1.equalsIgnoreCase("SORT_SCOPE"))) {
val optionValue: String = options.get("sort_scope").get.head._2
if (!SortScopeOptions.isValidSortOption(optionValue)) {
throw new MalformedCarbonCommandException(
"option SORT_SCOPE can have option either BATCH_SORT or LOCAL_SORT or GLOBAL_SORT")
}
}
// check for duplicate options
val duplicateOptions = options filter {
case (_, optionlist) => optionlist.size > 1
}
val duplicates = StringBuilder.newBuilder
if (duplicateOptions.nonEmpty) {
duplicateOptions.foreach(x => {
duplicates.append(x._1)
}
)
val errorMessage = "Error: Duplicate option(s): " + duplicates.toString()
throw new MalformedCarbonCommandException(errorMessage)
}
}
protected lazy val dbTableIdentifier: Parser[Seq[String]] =
(ident <~ ".").? ~ ident ^^ {
case databaseName ~ tableName =>
if (databaseName.isDefined) {
Seq(databaseName.get, tableName)
} else {
Seq(tableName)
}
}
protected lazy val loadOptions: Parser[(String, String)] =
(stringLit <~ "=") ~ stringLit ^^ {
case opt ~ optvalue => (opt.trim.toLowerCase(), optvalue)
case _ => ("", "")
}
protected lazy val valueOptions: Parser[(Int, Int)] =
(numericLit <~ ",") ~ numericLit ^^ {
case opt ~ optvalue => (opt.toInt, optvalue.toInt)
case _ => (0, 0)
}
protected lazy val columnOptions: Parser[(String, String)] =
(stringLit <~ ",") ~ stringLit ^^ {
case opt ~ optvalue => (opt, optvalue)
case _ =>
throw new MalformedCarbonCommandException(s"value cannot be empty")
}
protected lazy val dimCol: Parser[Field] = anyFieldDef
protected lazy val primitiveTypes =
STRING ^^^ "string" | INTEGER ^^^ "integer" |
TIMESTAMP ^^^ "timestamp" | NUMERIC ^^^ "numeric" |
BIGINT ^^^ "bigint" | (SHORT | SMALLINT) ^^^ "smallint" |
INT ^^^ "int" | DOUBLE ^^^ "double" | FLOAT ^^^ "double" | decimalType |
DATE ^^^ "date" | charType
/**
* Matching the decimal(10,0) data type and returning the same.
*/
private lazy val charType =
(CHAR | VARCHAR ) ~ ("(" ~>numericLit <~ ")") ^^ {
case char ~ digit =>
s"$char($digit)"
}
/**
* Matching the decimal(10,0) data type and returning the same.
*/
private lazy val decimalType =
DECIMAL ~ (("(" ~> numericLit <~ ",") ~ (numericLit <~ ")")).? ^^ {
case decimal ~ precisionAndScale => if (precisionAndScale.isDefined) {
s"$decimal(${ precisionAndScale.get._1 }, ${ precisionAndScale.get._2 })"
} else {
s"$decimal(10,0)"
}
}
protected lazy val nestedType: Parser[Field] = structFieldType | arrayFieldType |
primitiveFieldType
lazy val anyFieldDef: Parser[Field] =
(ident | stringLit) ~ (":".? ~> nestedType) ~ (IN ~> (ident | stringLit)).? ^^ {
case e1 ~ e2 ~ e3 =>
Field(e1, e2.dataType, Some(e1), e2.children, null, e3)
}
protected lazy val primitiveFieldType: Parser[Field] =
primitiveTypes ^^ {
case e1 =>
Field("unknown", Some(e1), Some("unknown"), Some(null))
}
protected lazy val arrayFieldType: Parser[Field] =
((ARRAY ^^^ "array") ~> "<" ~> nestedType <~ ">") ^^ {
case e1 =>
Field("unknown", Some("array"), Some("unknown"),
Some(List(Field("val", e1.dataType, Some("val"),
e1.children))))
}
protected lazy val structFieldType: Parser[Field] =
((STRUCT ^^^ "struct") ~> "<" ~> repsep(anyFieldDef, ",") <~ ">") ^^ {
case e1 =>
Field("unknown", Some("struct"), Some("unknown"), Some(e1))
}
protected lazy val measureCol: Parser[Field] =
(ident | stringLit) ~ (INTEGER ^^^ "integer" | NUMERIC ^^^ "numeric" | SHORT ^^^ "smallint" |
BIGINT ^^^ "bigint" | DECIMAL ^^^ "decimal").? ~
(AS ~> (ident | stringLit)).? ~ (IN ~> (ident | stringLit)).? ^^ {
case e1 ~ e2 ~ e3 ~ e4 => Field(e1, e2, e3, Some(null))
}
private def normalizeType(field: Field): Field = {
val dataType = field.dataType.getOrElse("NIL")
dataType match {
case "string" =>
Field(field.column, Some("String"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
case "smallint" =>
Field(field.column, Some("SmallInt"), field.name, Some(null),
field.parent, field.storeType, field.schemaOrdinal,
field.precision, field.scale, field.rawSchema)
case "integer" | "int" =>
Field(field.column, Some("Integer"), field.name, Some(null),
field.parent, field.storeType, field.schemaOrdinal,
field.precision, field.scale, field.rawSchema)
case "long" => Field(field.column, Some("Long"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
case "double" => Field(field.column, Some("Double"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
case "float" => Field(field.column, Some("Double"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
case "timestamp" =>
Field(field.column, Some("Timestamp"), field.name, Some(null),
field.parent, field.storeType, field.schemaOrdinal,
field.precision, field.scale, field.rawSchema)
case "numeric" => Field(field.column, Some("Numeric"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
case "array" =>
Field(field.column, Some("Array"), field.name,
field.children.map(f => f.map(normalizeType(_))),
field.parent, field.storeType, field.schemaOrdinal,
field.precision, field.scale, field.rawSchema)
case "struct" =>
Field(field.column, Some("Struct"), field.name,
field.children.map(f => f.map(normalizeType(_))),
field.parent, field.storeType, field.schemaOrdinal,
field.precision, field.scale, field.rawSchema)
case "bigint" => Field(field.column, Some("BigInt"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
case "decimal" => Field(field.column, Some("Decimal"), field.name, Some(null), field.parent,
field.storeType, field.schemaOrdinal, field.precision, field.scale, field.rawSchema
)
// checking if the nested data type contains the child type as decimal(10,0),
// if it is present then extracting the precision and scale. resetting the data type
// with Decimal.
case _ if dataType.startsWith("decimal") =>
val (precision, scale) = getScaleAndPrecision(dataType)
Field(field.column,
Some("Decimal"),
field.name,
Some(null),
field.parent,
field.storeType, field.schemaOrdinal, precision,
scale,
field.rawSchema
)
case _ =>
field
}
}
private def addParent(field: Field): Field = {
field.dataType.getOrElse("NIL") match {
case "Array" => Field(field.column, Some("Array"), field.name,
field.children.map(f => f.map(appendParentForEachChild(_, field.column))), field.parent,
field.storeType, field.schemaOrdinal, rawSchema = field.rawSchema)
case "Struct" => Field(field.column, Some("Struct"), field.name,
field.children.map(f => f.map(appendParentForEachChild(_, field.column))), field.parent,
field.storeType, field.schemaOrdinal, rawSchema = field.rawSchema)
case _ => field
}
}
private def appendParentForEachChild(field: Field, parentName: String): Field = {
field.dataType.getOrElse("NIL") match {
case "String" => Field(parentName + "." + field.column, Some("String"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "SmallInt" => Field(parentName + "." + field.column, Some("SmallInt"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Integer" => Field(parentName + "." + field.column, Some("Integer"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Long" => Field(parentName + "." + field.column, Some("Long"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Double" => Field(parentName + "." + field.column, Some("Double"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Float" => Field(parentName + "." + field.column, Some("Double"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Timestamp" => Field(parentName + "." + field.column, Some("Timestamp"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Numeric" => Field(parentName + "." + field.column, Some("Numeric"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Array" => Field(parentName + "." + field.column, Some("Array"),
Some(parentName + "." + field.name.getOrElse(None)),
field.children
.map(f => f.map(appendParentForEachChild(_, parentName + "." + field.column))),
parentName)
case "Struct" => Field(parentName + "." + field.column, Some("Struct"),
Some(parentName + "." + field.name.getOrElse(None)),
field.children
.map(f => f.map(appendParentForEachChild(_, parentName + "." + field.column))),
parentName)
case "BigInt" => Field(parentName + "." + field.column, Some("BigInt"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName)
case "Decimal" => Field(parentName + "." + field.column, Some("Decimal"),
Some(parentName + "." + field.name.getOrElse(None)), Some(null), parentName,
field.storeType, field.schemaOrdinal, field.precision, field.scale)
case _ => field
}
}
protected lazy val segmentId: Parser[String] =
numericLit ^^ { u => u } |
elem("decimal", p => {
p.getClass.getSimpleName.equals("FloatLit") ||
p.getClass.getSimpleName.equals("DecimalLit")
}) ^^ (_.chars)
/**
* This method will parse the given data type and validate against the allowed data types
*
* @param dataType
* @param values
* @return
*/
protected def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = {
dataType match {
case "bigint" | "long" =>
if (values.isDefined) {
throw new MalformedCarbonCommandException("Invalid data type")
}
DataTypeInfo(dataType)
case "decimal" =>
var precision: Int = 0
var scale: Int = 0
if (values.isDefined) {
precision = values.get(0)._1
scale = values.get(0)._2
} else {
throw new MalformedCarbonCommandException("Decimal format provided is invalid")
}
// precision should be > 0 and <= 38 and scale should be >= 0 and <= 38
if (precision < 1 || precision > 38) {
throw new MalformedCarbonCommandException("Invalid value for precision")
} else if (scale < 0 || scale > 38) {
throw new MalformedCarbonCommandException("Invalid value for scale")
}
DataTypeInfo("decimal", precision, scale)
case _ =>
throw new MalformedCarbonCommandException("Data type provided is invalid.")
}
}
}
|
ksimar/incubator-carbondata
|
integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
|
Scala
|
apache-2.0
| 45,547
|
package picasso.analysis
import picasso.math._
import picasso.utils.{LogCritical, LogError, LogWarning, LogNotice, LogInfo, LogDebug, Logger, Misc, Config}
trait KarpMillerTree extends CoveringSet {
self : WSTS with WADL =>
sealed abstract class KMTree {
/** Returns the state/limit contained on that node */
def apply(): S
/** Checks whether the tree covers some state.
* Since the tree is cut due to subsumed nodes,
* this methods makes only sense if applied at the root. */
def covers(state: S): Boolean = ordering.lteq(this(), state) || (children exists (_ covers this()))
def ancestors: Seq[KMTree]
/** Checks whether a states is covered by some parent */
def ancestorSmaller(s: S): Seq[KMTree] = {
this.ancestorSmallerQuick(s, Set.empty[KMTree])
//ancestors.drop(1).par.filter(t => ordering.lt(t(), s)).seq
}
def ancestorSmallerQuick(s: S, toInclude: Set[KMTree]): Seq[KMTree]
/** List of children (modified inplace) */
protected var _children: List[KMNode] = Nil
def children = _children
def addChildren(c: KMNode) = _children = c :: _children
def pathCovering(s: S): Option[List[KMTree]] = {
if (ordering.lteq(s, this())) Some(List(this))
else ((None: Option[List[KMTree]]) /: children) ( (acc, child) => acc orElse (child.pathCovering(s) map (this::_)))
}
def extractCover: DownwardClosedSet[S] = (DownwardClosedSet(apply()) /: children)( (acc, c) => acc ++ c.extractCover )
def size: Int = (1 /: children)((acc,ch) => acc + ch.size)
//covering edges: points to the node that made the exploration of this tree stopped
var subsumed: Option[KMTree] = None
}
case class KMRoot(state: S) extends KMTree {
override def toString = "KMRoot("+state+")"
def apply() = state
def ancestors: Seq[KMTree] = Seq(this)
def ancestorSmallerQuick(s: S, toInclude: Set[KMTree]): Seq[KMTree] = {
if (toInclude(this) || ordering.lt(state, s)) Seq(this)
else Seq[KMTree]()
}
}
case class KMNode(parent: KMTree, by: T, state: S, acceleratedFrom: List[KMTree]) extends KMTree {
override def toString = "KMNode("+state+")"
def replaceParent(np: KMTree): KMNode = {
val newThis = KMNode(np, by, state, acceleratedFrom)
for (c <- children) newThis.addChildren(c.replaceParent(newThis))
newThis
}
def apply() = state
def ancestors: Seq[KMTree] = this +: parent.ancestors
def ancestorSmallerQuick(s: S, toInclude: Set[KMTree]): Seq[KMTree] = {
if (toInclude(this)) this +: parent.ancestorSmallerQuick(s, toInclude ++ acceleratedFrom)
else if (ordering.lt(state, s)) this +: parent.ancestorSmallerQuick(s, toInclude)
else parent.ancestorSmallerQuick(s, toInclude)
}
}
object TreePrinter {
private def add(s: StringBuilder, t: KMTree, indent: Int): Unit = {
s append ((0 until indent) map ( _ => " ")).mkString("","","")
s append t
s append "\\n"
t.children foreach (add(s,_,indent+2))
}
def print(t: KMTree) = {
val string = new StringBuilder()
add(string, t, 0)
string.toString
}
/* arg 1: the tree
* arg 2: the graph ID (starting by cluster_)
* arg 3: the prefix
* returns a subgraph
*/
type TreeToGV = (KMTree, String, String) => scala.text.Document
private def giveIDs(t: KMTree, prefix: String): Map[KMTree, String] = {
var children = t.children.zipWithIndex.map{ case (c, i) =>
giveIDs(c, prefix + "_" + i)
}
(Map[KMTree, String]() /: children)(_ ++ _) + (t -> prefix)
}
private def makeEdges(t: KMTree, directory: Map[KMTree, String]): Seq[(String, String)] = {
val myId = directory(t)
t.children.flatMap( c => (myId, directory(c)) +: makeEdges(c, directory))
}
private def makeCoveringEdges(t: KMTree, directory: Map[KMTree, String]): Seq[(String, String)] = {
val childrenEdges = t.children.flatMap( c => makeCoveringEdges(c, directory))
t.subsumed match {
case Some(s) =>
(directory(t), directory(s)) +: childrenEdges
case None =>
childrenEdges
}
}
def toGraphviz(t: KMTree, nodeToGraph: TreeToGV): scala.text.Document = {
import scala.text.Document._
//step 1: assigns ids to the nodes in the tree
val withIDs = giveIDs(t, "cluster")
//step 2: make the subgraphs
val gv = for ( (tree, id) <- withIDs) yield nodeToGraph(tree, id, id + "__")
val oneDocG = gv.reduceRight(_ :/: _)
//step 3: the edges between clusters
val edges1 = makeEdges(t, withIDs).map{ case (a, b) => a :: " -> " :: b :: text(";") }
val edges2 = makeCoveringEdges(t, withIDs).map{ case (a, b) => a :: " -> " :: b :: text("[color=\\"#0000aa\\"];") }
val oneDocE = (edges1 ++ edges2).foldRight(empty: scala.text.Document)(_ :/: _)
//setp 4: the whole graph
"digraph KMTree {" :/: nest(4, empty :/: oneDocG :/: oneDocE) :/: text("}")
}
def printGraphviz(t: KMTree, nodeToGraph: TreeToGV) = {
Misc.docToString(toGraphviz(t, nodeToGraph))
}
}
//logging part
final val logThresold = 10000
protected var time = java.lang.System.currentTimeMillis
protected var ticks = 0
protected def logIteration(tree: KMTree, current: KMTree, cover: DownwardClosedSet[S]) {
ticks += 1
val newTime = java.lang.System.currentTimeMillis
if (newTime - time > logThresold) {
Logger("Analysis", LogInfo, "KMTree size " + tree.size +
",\\tcover has size " + cover.size +
",\\t current branch depth " + current.ancestors.size +
",\\t ticks " + ticks)
Logger("Analysis", LogDebug, "Current cover is " + cover)
time = newTime
}
}
protected def expBackoff[A](seq: Seq[A]): Seq[A] = {
//Console.println("expBackoff: " + seq.size)
var count = 2
val buffer = scala.collection.mutable.Buffer.empty[A]
var idx = 0
while (idx < seq.size) {
buffer += seq(idx)
idx += 1 + scala.util.Random.nextInt(count)
count = count * 2
}
buffer
}
protected def wideningPolicy(current: KMTree, t: T, s: S): KMNode = {
val acceleratedFrom = current.ancestorSmaller(s)
val reducedSeq = expBackoff(acceleratedFrom)
val s2 = (s /: reducedSeq)( (bigger, smaller) => widening(smaller(), bigger))
KMNode(current, t, s2, acceleratedFrom.toList)
}
protected def oneStepPost(current: KMTree): scala.collection.GenSeq[(T, S)] = {
val possible = transitions.filter(_ isDefinedAt current()).par
val successors = possible.flatMap( t => t(current()).map(t -> _)).par
//Logger("Analysis", LogInfo, "#successors: " + successors.size)
if (Config.KM_fullTree) {
successors
} else {
//at that point keep only the greatest successors
val successors2 = DownwardClosedSet(successors.map(_._2).seq:_*).basis.toSeq
successors2.map(b => successors.find(_._2 == b).get)
}
}
//TODO smarter search policy
//when the depth of the tree increases, it becomes very slow.
//I am wondering if I should do a periodic restart (keep the current cover, but drop the trees.)
final val restartThresold = 600000
protected var sinceRestart = java.lang.System.currentTimeMillis
protected def start = sinceRestart = java.lang.System.currentTimeMillis
protected def checkRestart: Boolean = {
val newTime = java.lang.System.currentTimeMillis
if (newTime - sinceRestart > restartThresold) {
Logger("Analysis", LogInfo, "KMTree restarting.")
sinceRestart = newTime
true
} else {
false
}
}
//TODO the termination of this algorithm is not guarranteed (but should be better in practice)
//to get termination the restartThresold should be progressively increased
def buildTreeWithRestart(initCover: DownwardClosedSet[S]): (DownwardClosedSet[S], Seq[KMTree]) = {
val startTime = java.lang.System.currentTimeMillis
val roots = initCover.seq.toSeq.map(initState => KMRoot(initState))
var cover = initCover
val coverMap = scala.collection.mutable.HashMap[S, KMTree]()
val stack = scala.collection.mutable.Stack[KMTree]()
var cleanUpCounter = 0
val cleanUpThreshold = 1000
def periodicCleanUp {
cleanUpCounter += 1
if (cleanUpCounter > cleanUpThreshold) {
cleanUpCounter = 0
val unNeededKeys = coverMap.keys.filterNot(k => cover.basis.contains(k))
coverMap --= unNeededKeys
}
}
def restart {
//fold over the tree and collect the parts to process:
val restartMap = scala.collection.mutable.HashMap[KMRoot, KMTree]()
val restartStub = scala.collection.mutable.Buffer[KMRoot]()
while (!stack.isEmpty) {
val current = stack.pop()
if (!cover(current())) {
current match {
case r @ KMRoot(_) => restartStub += r
case n @ KMNode(_, _, s, _) =>
val r = KMRoot(s)
restartStub += r
restartMap += (r -> n)
}
}
}
for (stub <- restartStub) {
//build from Root in restartStub
buildFromRoot(stub)
//glue back to the original tree
for (original <- restartMap.get(stub); c <- stub.children) {
original.addChildren(c.replaceParent(original))
}
}
}
def buildFromRoot(root: KMRoot, forceRoot: Boolean = false) {
Logger("Analysis", LogDebug, "starting from " + root())
assert(stack.isEmpty)
stack.push(root)
start
while (!stack.isEmpty) {
if (checkRestart) {
restart
} else {
//like the normal buildTree
val current = stack.pop()
logIteration(root, current, cover)
periodicCleanUp
cover.elementCovering(current()) match {
case Some(elt) if forceRoot && current != root => //force taking transitions if it is the root
val by = coverMap(elt)
current.subsumed = Some(by)
case _ =>
cover = cover + current()
coverMap += (current() -> current)
val successors = oneStepPost(current).par
val nodes = successors.map { case (t, s) => wideningPolicy(current, t, s) }
//do this sequentially to avoid data races + use library sorting
val sortedNodes = current match {
case KMRoot(_) => nodes.seq
case KMNode(_, by, _, acceleratedFrom) =>
val scoredNodes = nodes.map( n => n -> transitionsAffinity(by, n.by) )
scoredNodes.seq.sortWith( (n1, n2) => n1._2 > n2._2 ).map(_._1) //TODO what about acceleration
}
sortedNodes.foreach( n => {
current.addChildren(n)
stack.push(n)
})
}
}
}
}
for (root <- roots) buildFromRoot(root, true)
val endTime = java.lang.System.currentTimeMillis
Logger("Analysis", LogInfo, "KMTree computed in " + ((endTime - startTime)/1000F) + " sec (cover of size "+cover.size+", K-M tree of size " + (0 /: roots)(_ + _.size) + ").")
Logger("Analysis", LogDebug, "KMTree are\\n" + roots.map(TreePrinter.print(_)).mkString("\\n"))
Logger("Analysis", LogInfo, "Checking fixed-point.")
if (checkFixedPoint(cover)) {
Logger("Analysis", LogInfo, "Fixed-point checked.")
} else {
Logger("Analysis", LogError, "Fixed-point checking failed.")
}
(cover, roots)
}
def buildTreeWithRestart(initState: S): (DownwardClosedSet[S], KMTree) = {
val (cover, trees) = buildTreeWithRestart(DownwardClosedSet(initState))
assert(trees.size == 1)
(cover, trees.head)
}
////////////////////////////////////////
// Getting a flat trace from the tree //
////////////////////////////////////////
private def toTrace(nodes: List[KMTree]): TransfiniteTrace[S,T] = {
//TODO can the list have nested acceleration ? how to flatten them ?
//The KMTree should finish only on flattable system, so flattening must be possible.
//outermost to innermost is needed for WSTS which are not strongly compatible.
//1 identifies the loop by pair (start, end)
//2 unfoldind (to preserve strictness) depends on how much the outer loops will consumes ...
//this version does not handle nested loops
val rawLoops = nodes flatMap (n => n match {
case KMNode(_, _, _, acceleratedFrom) => List((n, acceleratedFrom))
case _ => Nil
})
//raw loops are already ordered by loop ending.
val nodesArray = Array(nodes:_*)
def findIndex(n: KMTree) = nodesArray indexWhere (_ == n)
def findConfig(i: Int) = nodesArray(i)()
def path(from: Int, until: Int): Seq[T] = nodesArray.slice(from+1, until+1) map {
case KMNode(_, by, _, _) => by
case _ => sys.error("KMTree: root in a path")
}
val rawIndexedLoops = rawLoops map { case (end, starts) => (findIndex(end), starts map findIndex) }
val pathes = rawIndexedLoops map { case (end, starts) =>
val pathes = starts map (path(_,end))
Pair(end, pathes map (p => Accelerate(p.toList)))
}
val finalPath = nodesArray.zipWithIndex flatMap {
case (KMNode(_, by, _, acceleratedFrom), idx) =>
Normal(List(by)) :: (pathes find (_._1 == idx) map (_._2) getOrElse Nil)
case (KMRoot(_), idx) => Nil
}
val init = nodesArray(0)()
val emptyTrace: TransfiniteTrace[S,T] = TransfiniteTrace.empty(DownwardClosedSet(init))
val revTrace = ( emptyTrace /: finalPath)( (trace, w) => trace.prepend(postCover(trace.start, w), w) )
revTrace.reverse
}
def forwardCoveringWithTrace(initState: S, targetState: S): Option[TransfiniteTrace[S,T]] = {
//TODO stop early
val (_, tree) = buildTreeWithRestart(initState)
tree.pathCovering(targetState) map toTrace
}
def forwardCovering(initState: S, targetState: S): Boolean = {
//TODO replace be forwardCoveringWithTrace(initState, targetState).isDefined
val (cover, tree) = buildTreeWithRestart(initState)
//tree.covers(targetState)
cover(targetState)
}
def computeTree(initState: S) = {
buildTreeWithRestart(initState)
}
def computeCover(initCover: DownwardClosedSet[S]) = {
val (cover, trees) = buildTreeWithRestart(initCover)
//tree.extractCover
cover
}
}
|
dzufferey/picasso
|
core/src/main/scala/picasso/analysis/KarpMillerTree.scala
|
Scala
|
bsd-2-clause
| 14,503
|
package org.archive.archivespark.sparkling
class SparklingDistributedProp (get: => Any, set: Any => Unit) extends Serializable {
private var value: Any = _
def save(): Unit = value = get
def restore(): Unit = set(value)
}
|
helgeho/ArchiveSpark
|
src/main/scala/org/archive/archivespark/sparkling/SparklingDistributedProp.scala
|
Scala
|
mit
| 229
|
/*
* Copyright 2013 websudos ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.websudos.phantom.dsl.query
import org.joda.time.DateTime
import org.scalatest.{FlatSpec, Matchers}
import com.websudos.phantom.Implicits._
import com.websudos.phantom.tables.{Recipes, Recipe}
import com.websudos.util.testing._
class BatchRestrictionTest extends FlatSpec with Matchers {
val s = Recipes
val b = BatchStatement
val d = new DateTime
it should "not allow using Select queries in a batch" in {
"BatchStatement().add(Primitives.select)" shouldNot compile
}
it should "not allow using a primary key in a conditional clause" in {
"""Recipes.update.where(_.url eqs "someUrl").modify(_.name setTo "test").onlyIf(_.id eqs secondary)""" shouldNot compile
}
it should "not allow using SelectWhere queries in a batch" in {
"BatchStatement().add(Primitives.select.where(_.pkey eqs gen[String]))" shouldNot compile
}
it should "not allow using Truncate queries in a batch" in {
"BatchStatement().add(Primitives.truncate)" shouldNot compile
}
it should "not allow using Create queries in a batch" in {
"BatchStatement().add(Primitives.create)" shouldNot compile
}
it should "allow setting a timestamp on a Batch query" in {
val url = gen[String]
"BatchStatement().timestamp(gen[DateTime].getMillis).add(Recipes.update.where(_.url eqs url).modify(_.description setTo Some(url)).timestamp(gen[DateTime].getMillis))" should compile
}
it should "allow setting a timestamp on an Update query" in {
val url = gen[String]
"Recipes.update.where(_.url eqs url).modify(_.description setTo Some(url)).timestamp(gen[DateTime].getMillis)" should compile
}
it should "allow setting a timestamp on a Compare-and-Set Update query" in {
val url = gen[String]
"Recipes.update.where(_.url eqs url).modify(_.description setTo Some(url)).onlyIf(_.description eqs Some(url)).timestamp(gen[DateTime].getMillis)" should compile
}
it should "allow using a timestamp on an Insert query" in {
val sample = gen[Recipe]
"Recipes.insert.value(_.url, sample.url).value(_.description, sample.description).timestamp(gen[DateTime].getMillis)" should compile
}
}
|
nosheenzaza/phantom-data-centric
|
phantom-dsl/src/test/scala/com/websudos/phantom/dsl/query/BatchRestrictionTest.scala
|
Scala
|
gpl-2.0
| 2,745
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.