code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package com.bostontechnologies.quickfixs
sealed trait FixVersion
trait Fix50 extends FixVersion
trait Fix44 extends FixVersion
object FixVersion {
implicit object Fix50 extends Fix50
implicit object Fix44 extends Fix44
}
|
Forexware/quickfixs
|
src/main/scala/com/bostontechnologies/quickfixs/FixVersion.scala
|
Scala
|
apache-2.0
| 231
|
package cafesat
package api
import Formulas._
import asts.fol.Trees.{Or, And, freshPropositionalVariable}
import scala.language.implicitConversions
/** Contains helper functions for building [[Formulas.Formula Formula]].
*
* The basic building blocks of formulas are true/false literals and propositional
* variables. You get a [[Formulas.PropVar PropVar]] with [[FormulaBuilder.propVar propVar]] and
* [[FormulaBuilder.bool2formula bool2formula]] provides an implicit conversion from `Boolean`
* to [[Formulas.True True]]/[[Formulas.False False]].
*
* Once you have a [[Formulas.Formula Formula]] instance, you can combine it using its methods
* matching regular boolean operators. You can also use n-ary combinators
* [[FormulaBuilder.or or]] and [[FormulaBuilder.and and]].
* {{{
* import Formulas._
* import FormulaBuilder._
*
* val a: PropVar = propVar("a")
* val b: PropVar = propVar("b")
* val f: Formula = and(a, (!a || b), (!b || false))
* }}}
*/
object FormulaBuilder {
/** Builds a disjunction of formulas.
*
* @param fs sequence of formulas to be combined
* @return a new Formula instance that represents the disjunction of the input formulas
*/
def or(fs: Formula*): Formula = new Formula(Or(fs.map(_.formula): _*))
/** Builds a conjunction of formulas.
*
* @param fs sequence of formulas to be combined
* @return a new Formula instance that represents the conjunction of the input formulas
*/
def and(fs: Formula*): Formula = new Formula(And(fs.map(_.formula): _*))
/** Returns the formula representation of a `Boolean`.
*
* @param b a Boolean literal to wrap into a formula
* @return [[Formulas.True]] if input is `true` and [[Formulas.False]] if input is `false`
*/
implicit def bool2formula(b: Boolean) = if(b) True else False
implicit def boolList2formulaList(bs: List[Boolean]): List[Formula] = bs.map(b => if(b) True else False)
/** Returns the formula representation of a propositional variable.
*
* The propositional variable is '''always fresh''' and unique, there is no need to use
* a different name for different invocations. You need to maintain a pointer to the
* returned instance if you want to refer to it later in your code.
*
* @param prefix a String prefix to the name of that variable
* @return A fresh and unique propositional variable with a name starting with `prefix`
*/
def propVar(prefix: String = "P"): PropVar = new PropVar(freshPropositionalVariable(prefix))
}
|
regb/cafesat
|
src/main/scala/cafesat/api/FormulaBuilder.scala
|
Scala
|
mit
| 2,550
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kudu.spark.kudu
import java.math.BigDecimal
import java.util.Date
import scala.collection.JavaConverters._
import scala.collection.immutable.IndexedSeq
import org.apache.spark.SparkConf
import org.apache.kudu.ColumnSchema.ColumnSchemaBuilder
import org.apache.kudu.ColumnTypeAttributes.ColumnTypeAttributesBuilder
import org.apache.kudu.client.CreateTableOptions
import org.apache.kudu.client.KuduClient
import org.apache.kudu.client.KuduTable
import org.apache.kudu.Schema
import org.apache.kudu.Type
import org.apache.kudu.test.KuduTestHarness
import org.apache.kudu.util.DecimalUtil
import org.apache.spark.sql.SparkSession
import org.junit.After
import org.junit.Before
import org.junit.Rule
import org.scalatest.junit.JUnitSuite
import scala.annotation.meta.getter
trait KuduTestSuite extends JUnitSuite {
var ss: SparkSession = _
var kuduClient: KuduClient = _
var table: KuduTable = _
var kuduContext: KuduContext = _
val tableName: String = "test"
val simpleTableName: String = "simple-test"
lazy val schema: Schema = {
val columns = List(
new ColumnSchemaBuilder("key", Type.INT32).key(true).build(),
new ColumnSchemaBuilder("c1_i", Type.INT32).build(),
new ColumnSchemaBuilder("c2_s", Type.STRING).nullable(true).build(),
new ColumnSchemaBuilder("c3_double", Type.DOUBLE).build(),
new ColumnSchemaBuilder("c4_long", Type.INT64).build(),
new ColumnSchemaBuilder("c5_bool", Type.BOOL).build(),
new ColumnSchemaBuilder("c6_short", Type.INT16).build(),
new ColumnSchemaBuilder("c7_float", Type.FLOAT).build(),
new ColumnSchemaBuilder("c8_binary", Type.BINARY).build(),
new ColumnSchemaBuilder("c9_unixtime_micros", Type.UNIXTIME_MICROS)
.build(),
new ColumnSchemaBuilder("c10_byte", Type.INT8).build(),
new ColumnSchemaBuilder("c11_decimal32", Type.DECIMAL)
.typeAttributes(
new ColumnTypeAttributesBuilder()
.precision(DecimalUtil.MAX_DECIMAL32_PRECISION)
.build()
)
.build(),
new ColumnSchemaBuilder("c12_decimal64", Type.DECIMAL)
.typeAttributes(
new ColumnTypeAttributesBuilder()
.precision(DecimalUtil.MAX_DECIMAL64_PRECISION)
.build()
)
.build(),
new ColumnSchemaBuilder("c13_decimal128", Type.DECIMAL)
.typeAttributes(
new ColumnTypeAttributesBuilder()
.precision(DecimalUtil.MAX_DECIMAL128_PRECISION)
.build()
)
.build()
)
new Schema(columns.asJava)
}
lazy val simpleSchema: Schema = {
val columns = List(
new ColumnSchemaBuilder("key", Type.INT32).key(true).build(),
new ColumnSchemaBuilder("val", Type.STRING).nullable(true).build()).asJava
new Schema(columns)
}
val tableOptions: CreateTableOptions = {
val bottom = schema.newPartialRow() // Unbounded.
val middle = schema.newPartialRow()
middle.addInt("key", 50)
val top = schema.newPartialRow() // Unbounded.
new CreateTableOptions()
.setRangePartitionColumns(List("key").asJava)
.addRangePartition(bottom, middle)
.addRangePartition(middle, top)
.setNumReplicas(1)
}
val appID: String = new Date().toString + math
.floor(math.random * 10E4)
.toLong
.toString
val conf: SparkConf = new SparkConf()
.setMaster("local[*]")
.setAppName("test")
.set("spark.ui.enabled", "false")
.set("spark.app.id", appID)
// Ensure the annotation is applied to the getter and not the field
// or else Junit will complain that the Rule must be public.
@(Rule @getter)
val harness = new KuduTestHarness()
@Before
def setUpBase(): Unit = {
ss = SparkSession.builder().config(conf).getOrCreate()
kuduContext = new KuduContext(harness.getMasterAddressesAsString, ss.sparkContext)
// Spark tests should use the client from the kuduContext.
kuduClient = kuduContext.syncClient
table = kuduClient.createTable(tableName, schema, tableOptions)
val simpleTableOptions = new CreateTableOptions()
.setRangePartitionColumns(List("key").asJava)
.setNumReplicas(1)
kuduClient.createTable(simpleTableName, simpleSchema, simpleTableOptions)
}
@After
def tearDownBase() {
if (ss != null) ss.stop()
KuduClientCache.clearCacheForTests()
}
def deleteRow(key: Int): Unit = {
val kuduSession = kuduClient.newSession()
val delete = table.newDelete()
delete.getRow.addInt(0, key)
kuduSession.apply(delete)
}
def insertRows(
targetTable: KuduTable,
rowCount: Int,
startIndex: Int = 0): IndexedSeq[(Int, Int, String, Long)] = {
val kuduSession = kuduClient.newSession()
val rows = Range(startIndex, rowCount + startIndex).map { i =>
val insert = targetTable.newInsert
val row = insert.getRow
row.addInt(0, i)
row.addInt(1, i)
row.addDouble(3, i.toDouble)
row.addLong(4, i.toLong)
row.addBoolean(5, i % 2 == 1)
row.addShort(6, i.toShort)
row.addFloat(7, i.toFloat)
row.addBinary(8, s"bytes $i".getBytes())
val ts = System.currentTimeMillis() * 1000
row.addLong(9, ts)
row.addByte(10, i.toByte)
row.addDecimal(11, BigDecimal.valueOf(i))
row.addDecimal(12, BigDecimal.valueOf(i))
row.addDecimal(13, BigDecimal.valueOf(i))
// Sprinkling some nulls so that queries see them.
val s = if (i % 2 == 0) {
row.addString(2, i.toString)
i.toString
} else {
row.setNull(2)
null
}
kuduSession.apply(insert)
(i, i, s, ts)
}
rows
}
}
|
InspurUSA/kudu
|
java/kudu-spark/src/test/scala/org/apache/kudu/spark/kudu/KuduTestSuite.scala
|
Scala
|
apache-2.0
| 6,470
|
package databench.util
import grizzled.slf4j.{ Logging => GrizzledLogging }
import grizzled.slf4j.Logger
trait Logging {
@transient
val logger = Logger(getClass)
protected def trace(s: String) =
logger.trace(s)
protected def debug(s: String) =
logger.debug(s)
protected def info(s: String) =
logger.info(s)
protected def warn(s: String) =
logger.warn(s)
protected def error(s: String) =
logger.error(s)
protected def logTrace[A](id: => String)(f: => A): A =
logLevel(id, (s: String) => trace(s))(f)
protected def logDebug[A](id: => String)(f: => A): A =
logLevel(id, (s: String) => debug(s))(f)
protected def logInfo[A](id: => String)(f: => A): A =
logLevel(id, (s: String) => info(s))(f)
protected def logWarn[A](id: => String)(f: => A): A =
logLevel(id, (s: String) => warn(s))(f)
protected def logError[A](id: => String)(f: => A): A =
logLevel(id, (s: String) => error(s))(f)
private[this] def logLevel[A](id: => String, level: (String) => Unit)(f: => A): A = {
level(id)
f
}
}
|
databench/databench
|
databench-runner/src/main/scala/databench/util/Logging.scala
|
Scala
|
lgpl-2.1
| 1,147
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.repository.xml
import scala.Option.option2Iterable
import org.eclipse.jgit.lib.ObjectId
import org.eclipse.jgit.revwalk.RevTag
import com.normation.cfclerk.services.GitRepositoryProvider
import com.normation.cfclerk.services.GitRevisionProvider
import com.normation.rudder.domain.policies.GroupTarget
import com.normation.rudder.domain.policies.RuleTargetInfo
import com.normation.rudder.repository._
import com.normation.rudder.services.marshalling.NodeGroupCategoryUnserialisation
import com.normation.rudder.services.marshalling.NodeGroupUnserialisation
import com.normation.utils.Control._
import com.normation.utils.UuidRegex
import com.normation.utils.XmlUtils
import com.normation.rudder.migration.XmlEntityMigration
import net.liftweb.common.Box
class GitParseGroupLibrary(
categoryUnserialiser: NodeGroupCategoryUnserialisation
, groupUnserialiser : NodeGroupUnserialisation
, repo : GitRepositoryProvider
, xmlMigration : XmlEntityMigration
, libRootDirectory : String //relative name to git root file
, categoryFileName : String = "category.xml"
) extends ParseGroupLibrary {
def getArchive(archiveId:GitCommitId) = {
for {
treeId <- GitFindUtils.findRevTreeFromRevString(repo.db, archiveId.value)
archive <- getArchiveForRevTreeId(treeId)
} yield {
archive
}
}
private[this] def getArchiveForRevTreeId(revTreeId:ObjectId) = {
val root = {
val p = libRootDirectory.trim
if(p.size == 0) ""
else if(p.endsWith("/")) p
else p + "/"
}
//// BE CAREFUL: GIT DOES NOT LIST DIRECTORIES
val paths = GitFindUtils.listFiles(repo.db, revTreeId, List(root.substring(0, root.size-1)), Nil)
//directoryPath must end with "/"
def recParseDirectory(directoryPath:String) : Box[NodeGroupCategoryContent] = {
val categoryPath = directoryPath + categoryFileName
// that's the directory of an NodeGroupCategory.
// ignore files other than NodeGroup (UUID.xml) and directories
// don't forget to recurse sub-categories
for {
xml <- GitFindUtils.getFileContent(repo.db, revTreeId, categoryPath){ inputStream =>
XmlUtils.parseXml(inputStream, Some(categoryPath)) ?~! "Error when parsing file '%s' as a category".format(categoryPath)
}
categoryXml <- xmlMigration.getUpToDateXml(xml)
category <- categoryUnserialiser.unserialise(categoryXml) ?~! "Error when unserializing category for file '%s'".format(categoryPath)
groupFiles = {
paths.filter { p =>
p.size > directoryPath.size &&
p.startsWith(directoryPath) &&
p.endsWith(".xml") &&
UuidRegex.isValid(p.substring(directoryPath.size,p.size - 4))
}
}
groups <- sequence(groupFiles.toSeq) { groupPath =>
for {
xml2 <- GitFindUtils.getFileContent(repo.db, revTreeId, groupPath){ inputStream =>
XmlUtils.parseXml(inputStream, Some(groupPath)) ?~! "Error when parsing file '%s' as a directive".format(groupPath)
}
groupXml <- xmlMigration.getUpToDateXml(xml2)
group <- groupUnserialiser.unserialise(groupXml) ?~! "Error when unserializing group for file '%s'".format(groupPath)
} yield {
group
}
}
subDirs = {
//we only wants to keep paths that are non-empty directories with a uptcFileName/uptFileName in them
paths.flatMap { p =>
if(p.size > directoryPath.size && p.startsWith(directoryPath)) {
val split = p.substring(directoryPath.size).split("/")
if(split.size == 2 && (split(1) == categoryFileName) ) {
Some(directoryPath + split(0) + "/")
} else None
} else None
}
}
subCats <- sequence(subDirs.toSeq) { dir =>
recParseDirectory(dir)
}
} yield {
val s = subCats.toSet
val g = groups.toSet
val cat = category.copy(
children = s.map { _.category.id }.toList
, items = g.map { x =>
RuleTargetInfo(
target = GroupTarget(x.id)
, name = x.name
, description = x.description
, isEnabled = x.isEnabled
, isSystem = x.isSystem
)
}.toList
)
NodeGroupCategoryContent(cat, s, g)
}
}
recParseDirectory(root)
}
}
|
armeniaca/rudder
|
rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitParseGroupLibrary.scala
|
Scala
|
gpl-3.0
| 6,856
|
package io.rout.routing
import com.twitter.finagle.{Filter, Service}
import com.twitter.finagle.http.{Request, Response}
import io.rout.ReqRead
case class FilterRequestToService[X](filter: Filter[Request,Response,X,Response] => RequestToService){
def apply(x: Filter[Request,Response,X,Response]): RequestToService = filter(x)
def apply(x: ReqRead[X]): RequestToService = filter(x.toFilter)
}
case class RequestToService(request: Request => Boolean,service: Service[Request,Response]) extends Routable { self =>
val routes = Seq(self)
def filter(filter: Filter[Request,Response,Request,Response]) =
copy(service = filter andThen service)
def withFilter(filter: Filter[Request,Response,Request,Response]) =
copy(service = filter andThen service)
}
|
teodimoff/rOut
|
routing/src/io/rout/routing/RequestComposition.scala
|
Scala
|
apache-2.0
| 776
|
package info.hupel.isabelle
import scala.concurrent.{Future, Promise}
import scalaz.concurrent.Task
/**
* Tools for setting up an [[Setup Isabelle installation]] and managing
* [[Resources Isabelle sources]] and [[Resolver environments]] at runtime.
* Most functions in this package have some effect on the local file system and
* may download content from the Internet.
*/
package object setup {
implicit class TaskOps[T](task: Task[T]) {
def toScalaFuture: Future[T] = {
val promise = Promise[T]
task.unsafePerformAsync { res =>
res.fold(promise.failure, promise.success)
()
}
promise.future
}
}
}
|
larsrh/libisabelle
|
modules/setup/src/main/scala/package.scala
|
Scala
|
apache-2.0
| 662
|
package com.rikmuld.camping.features.blocks.campfire.cook
import com.rikmuld.camping.CampingMod
import com.rikmuld.camping.Definitions.Kit
import com.rikmuld.camping.common.inventory.{SlotItem, SlotItemMeta}
import com.rikmuld.camping.features.blocks.campfire.cook.equipment.CookingEquipment
import com.rikmuld.corerm.gui.container.ContainerSimple
import com.rikmuld.corerm.utils.WorldUtils
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.init.Items
import net.minecraft.inventory.IInventory
import net.minecraft.item.ItemStack
class ContainerCampfireCook(player: EntityPlayer, tile: IInventory) extends ContainerSimple[TileEntityCampfireCook](player) {
private var slots: Seq[SlotCooking] =
_
override def playerInvY: Int =
106
override def addInventorySlots(): Unit = {
val thisPlayer = player
val instance = this
addSlotToContainer(new SlotItem(tile, 0, 80, 84, Items.COAL))
addSlotToContainer(new SlotItemMeta(tile, 1, 150, 9, CampingMod.OBJ.kit, Vector(Kit.SPIT, Kit.GRILL, Kit.PAN)) with SlotEquipment {
val player: EntityPlayer = thisPlayer
val container: ContainerCampfireCook = instance
})
slots = for (i <- 0 until 10)
yield new SlotCooking(tile, i + 2, 0, 0)
slots.foreach(slot => addSlotToContainer(slot))
setSlots()
}
override def initIInventory: TileEntityCampfireCook =
tile.asInstanceOf[TileEntityCampfireCook]
override def mergeToInventory(stack: ItemStack, original: ItemStack, index: Int): Boolean =
if (stack.getItem == Items.COAL) {
mergeItemStack(stack, 0, 1, false)
} else if (stack.getItem == CampingMod.OBJ.kit && stack.getItemDamage != Kit.USELESS && stack.getItemDamage != Kit.EMPTY) {
mergeItemStack(stack, 1, 2, false)
} else if(getIInventory.getEquipment.fold(false)(_.canCook(stack))) {
mergeItemStack(stack, 2, 2 + getIInventory.getEquipment.get.getMaxCookingSlot, false)
} else false
def getID: String =
tile.getName
def setSlots(): Unit =
for (i <- 0 until 10)
initializeSlot(i, slots(i), getIInventory.getEquipment)
def initializeSlot(index: Int, slot: SlotCooking, equipment: Option[CookingEquipment]): Unit = {
if (slot.active && !slot.getStack.isEmpty) {
WorldUtils.dropItemInWorld(getIInventory.getWorld, slot.getStack, getIInventory.getPos)
getIInventory.setInventorySlotContents(index + 2, ItemStack.EMPTY)
}
if (equipment.isEmpty)
slot.deActivate()
else if (index < equipment.get.getMaxCookingSlot) {
val position = equipment.get.getSlotPosition(index)
slot.activate(position._1, position._2, equipment.get, getIInventory)
}
}
}
|
Rikmuld/MC-Camping
|
scala/com/rikmuld/camping/features/blocks/campfire/cook/ContainerCampfireCook.scala
|
Scala
|
gpl-3.0
| 2,686
|
package skinny.util
import org.scalatest._
class TimeLoggingSpec extends FlatSpec with Matchers with TimeLogging {
behavior of "TimeLogging"
it should "work" in {
val result = warnElapsedTime(10) {
Thread.sleep(100)
"AAA"
}
result should equal("AAA")
}
}
|
holycattle/skinny-framework
|
common/src/test/scala/skinny/util/TimeLoggingSpec.scala
|
Scala
|
mit
| 290
|
package edu.scalanus.ir
object IrTreePrettyPrinter {
private val INDENT = " "
def apply(root: IrNode): String = {
val sb = new StringBuilder
apply(root, 0, sb)
sb.toString()
}
private def apply(node: IrNode, indent: Int, sb: StringBuilder): Unit = {
sb.append(INDENT * indent).append(node).append('\\n')
node.childrenNodes.foreach {
apply(_, indent + 1, sb)
}
}
}
|
mkaput/scalanus
|
src/main/scala/edu/scalanus/ir/IrTreePrettyPrinter.scala
|
Scala
|
mit
| 409
|
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib
import scala.language.implicitConversions
import scala.collection.JavaConversions._
import scala.collection.mutable
import org.scalajs.jasminetest.JasmineTest
import scala.scalajs.runtime.UndefinedBehaviorError
import java.util.PriorityQueue
import java.util.Comparator
object PriorityQueueTest extends JasmineTest {
describe("java.util.PriorityQueue") {
it("should store and remove ordered integers") {
val pq = new PriorityQueue[Int]()
expect(pq.size()).toEqual(0)
expect(pq.add(111)).toBeTruthy
expect(pq.size()).toEqual(1)
expect(pq.add(222)).toBeTruthy
expect(pq.size()).toEqual(2)
expect(pq.poll()).toEqual(111)
expect(pq.size()).toEqual(1)
expect(pq.poll()).toEqual(222)
expect(pq.add(222)).toBeTruthy
expect(pq.add(222)).toBeTruthy
expect(pq.remove(222)).toBeTruthy
expect(pq.remove(222)).toBeTruthy
expect(pq.remove(222)).toBeFalsy
}
it("should store and remove ordered strings") {
val pq = new PriorityQueue[String]()
expect(pq.size()).toEqual(0)
expect(pq.add("aaa")).toBeTruthy
expect(pq.size()).toEqual(1)
expect(pq.add("bbb")).toBeTruthy
expect(pq.size()).toEqual(2)
expect(pq.poll()).toEqual("aaa")
expect(pq.size()).toEqual(1)
expect(pq.poll()).toEqual("bbb")
expect(pq.add("bbb")).toBeTruthy
expect(pq.add("bbb")).toBeTruthy
expect(pq.remove("bbb")).toBeTruthy
expect(pq.remove("bbb")).toBeTruthy
expect(pq.remove("bbb")).toBeFalsy
expect(pq.poll()).toBeNull
}
it("should store objects with custom comparables") {
case class Rect(x: Int, y: Int)
val areaComp = new Comparator[Rect] {
def compare(a: Rect, b: Rect): Int = (a.x*a.y) - (b.x*b.y)
}
val pq = new PriorityQueue[Rect](11, areaComp)
expect(pq.add(Rect(1,2))).toBeTruthy
expect(pq.add(Rect(2,3))).toBeTruthy
expect(pq.add(Rect(1,3))).toBeTruthy
val first = pq.poll()
expect(first.x).toEqual(1)
expect(first.y).toEqual(2)
expect(pq.remove(first)).toBeFalsy
val second = pq.peek()
expect(second.x).toEqual(1)
expect(second.y).toEqual(3)
expect(pq.remove(second)).toBeTruthy
expect(pq.remove(second)).toBeFalsy
val third = pq.peek()
expect(third.x).toEqual(2)
expect(third.y).toEqual(3)
expect(pq.remove(third)).toBeTruthy
expect(pq.remove(third)).toBeFalsy
expect(pq.isEmpty).toBeTruthy
expect(pq.peek() eq null).toBeTruthy
expect(pq.poll() eq null).toBeTruthy
}
it("should store ordered Double even in corner cases") {
val pq = new PriorityQueue[Double]()
expect(pq.add(1.0)).toBeTruthy
expect(pq.add(+0.0)).toBeTruthy
expect(pq.add(-0.0)).toBeTruthy
expect(pq.add(Double.NaN)).toBeTruthy
expect(pq.poll.equals(-0.0)).toBeTruthy
expect(pq.poll.equals(+0.0)).toBeTruthy
expect(pq.poll.equals(1.0)).toBeTruthy
expect(pq.peek.isNaN).toBeTruthy
expect(pq.remove(Double.NaN)).toBeTruthy
expect(pq.isEmpty).toBeTruthy
}
it("could be instantiated with a prepopulated Collection") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val pq = new PriorityQueue[Int](l)
expect(pq.size()).toEqual(5)
for (i <- 1 to 5) {
expect(pq.poll()).toEqual(i)
}
expect(pq.isEmpty).toBeTruthy
}
it("could be instantiated with a prepopulated PriorityQueue") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val pq1 = new PriorityQueue[Int](l)
val pq2 = new PriorityQueue[Int](pq1)
expect(pq1.size()).toEqual(5)
expect(pq2.size()).toEqual(5)
for (i <- 1 to 5) {
expect(pq1.poll()).toEqual(pq2.poll())
}
expect(pq1.isEmpty).toBeTruthy
expect(pq2.isEmpty).toBeTruthy
}
it("could be instantiated with a prepopulated SortedSet") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val ss = new java.util.concurrent.ConcurrentSkipListSet[Int](l)
val pq1 = new PriorityQueue[Int](l)
val pq2 = new PriorityQueue[Int](ss)
expect(pq1.size()).toEqual(5)
expect(pq2.size()).toEqual(5)
for (i <- 1 to 5) {
expect(pq1.poll()).toEqual(pq2.poll())
}
expect(pq1.isEmpty).toBeTruthy
expect(pq2.isEmpty).toBeTruthy
}
it("should be cleared in a single operation") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val pq = new PriorityQueue[Int](l)
expect(pq.size()).toEqual(5)
pq.clear()
expect(pq.size()).toEqual(0)
}
it("should add multiple elemnt in one operation") {
val l = asJavaCollection(Set(1, 5, 2, 3, 4))
val pq = new PriorityQueue[Int]()
expect(pq.size()).toEqual(0)
pq.addAll(l)
expect(pq.size()).toEqual(5)
pq.add(6)
expect(pq.size()).toEqual(6)
}
it("should check contained values even in double corner cases") {
val pq = new PriorityQueue[Double]()
expect(pq.add(11111.0)).toBeTruthy
expect(pq.size()).toEqual(1)
expect(pq.contains(11111.0)).toBeTruthy
expect(pq.iterator.next()).toEqual(11111.0)
expect(pq.add(Double.NaN)).toBeTruthy
expect(pq.size()).toEqual(2)
expect(pq.contains(Double.NaN)).toBeTruthy
expect(pq.contains(+0.0)).toBeFalsy
expect(pq.contains(-0.0)).toBeFalsy
expect(pq.remove(Double.NaN)).toBeTruthy
expect(pq.add(+0.0)).toBeTruthy
expect(pq.size()).toEqual(2)
expect(pq.contains(Double.NaN)).toBeFalsy
expect(pq.contains(+0.0)).toBeTruthy
expect(pq.contains(-0.0)).toBeFalsy
expect(pq.remove(+0.0)).toBeTruthy
expect(pq.add(-0.0)).toBeTruthy
expect(pq.size()).toEqual(2)
expect(pq.contains(Double.NaN)).toBeFalsy
expect(pq.contains(+0.0)).toBeFalsy
expect(pq.contains(-0.0)).toBeTruthy
expect(pq.add(+0.0)).toBeTruthy
expect(pq.add(Double.NaN)).toBeTruthy
expect(pq.contains(Double.NaN)).toBeTruthy
expect(pq.contains(+0.0)).toBeTruthy
expect(pq.contains(-0.0)).toBeTruthy
}
it("should retrieve the first(ordered) element") {
val pqInt = new PriorityQueue[Int]()
expect(pqInt.add(1000)).toBeTruthy
expect(pqInt.add(10)).toBeTruthy
expect(pqInt.poll()).toEqual(10)
val pqString = new PriorityQueue[String]()
expect(pqString.add("pluto")).toBeTruthy
expect(pqString.add("pippo")).toBeTruthy
expect(pqString.poll()).toEqual("pippo")
val pqDouble = new PriorityQueue[Double]()
expect(pqDouble.add(+10000.987)).toBeTruthy
expect(pqDouble.add(-0.987)).toBeTruthy
expect(pqDouble.poll()).toEqual(-0.987)
}
}
}
|
doron123/scala-js
|
test-suite/src/test/scala/org/scalajs/testsuite/javalib/PriorityQueueTest.scala
|
Scala
|
bsd-3-clause
| 7,337
|
package org.bitcoins.core.hd
/** The address types covered by BIP44, BIP49 and BIP84 */
sealed abstract class AddressType
object AddressType {
/** Uses BIP84 address derivation, gives bech32 address (`bc1...`) */
final case object SegWit extends AddressType
/** Uses BIP49 address derivation, gives SegWit addresses wrapped
* in P2SH addresses (`3...`)
*/
final case object NestedSegWit extends AddressType
/** Uses BIP44 address derivation (`1...`) */
final case object Legacy extends AddressType
}
|
bitcoin-s/bitcoin-s-core
|
core/src/main/scala/org/bitcoins/core/hd/AddressType.scala
|
Scala
|
mit
| 525
|
package com.twitter.scalding
import com.stripe.dagon.{Dag, FunctionK, Literal, Memoize, PartialRule, Rule}
import com.twitter.scalding.ExecutionOptimizationRules.ZipMap.{MapLeft, MapRight}
import com.twitter.scalding.typed.functions.ComposedFunctions.ComposedMapFn
import com.twitter.scalding.typed.functions.{ComposedFunctions, Identity, Swap}
import scala.annotation.tailrec
import scala.concurrent.{ExecutionContext => ConcurrentExecutionContext, Future}
object ExecutionOptimizationRules {
type LiteralExecution[T] = Literal[Execution, T]
/**
* Since our Execution is covariant, but the Literal is not this is actually safe in this context, but not
* in general
*/
def widen[T](l: LiteralExecution[_ <: T]): LiteralExecution[T] =
// to prove this is safe, see that if you have
// LiteralExecution[_ <: T] we can call .evaluate to get
// Execution[_ <: T] which due to covariance is
// Execution[T], and then using toLiteral we can get
// LiteralExecution[T]
//
// that would be wasteful to apply since the final
// result is identity.
l.asInstanceOf[LiteralExecution[T]]
def toLiteral: FunctionK[Execution, LiteralExecution] =
Memoize.functionK[Execution, LiteralExecution](
new Memoize.RecursiveK[Execution, LiteralExecution] {
override def toFunction[A] = {
case (e @ Execution.ReaderExecution, _) =>
Literal.Const(e)
case (e: Execution.FutureConst[a], _) =>
Literal.Const(e)
case (e: Execution.UniqueIdExecution[a], _) =>
Literal.Const(e)
case (e: Execution.FlowDefExecution, _) =>
Literal.Const(e)
case (e: Execution.WriteExecution[a], _) =>
Literal.Const(e)
case (e: Execution.GetCounters[a], f) =>
widen(
Literal.Unary[Execution, a, (a, ExecutionCounters)](
f(e.prev),
Execution.GetCounters(_: Execution[a])
)
)
case (e: Execution.ResetCounters[a], f) =>
Literal.Unary(f(e.prev), Execution.ResetCounters(_: Execution[a]))
case (e: Execution.WithNewCache[a], f) =>
Literal.Unary(f(e.prev), Execution.WithNewCache(_: Execution[a]))
case (e: Execution.TransformedConfig[a], f) =>
Literal.Unary(f(e.prev), Execution.TransformedConfig(_: Execution[a], e.fn))
case (e: Execution.OnComplete[a], f) =>
Literal.Unary(f(e.prev), Execution.OnComplete(_: Execution[a], e.fn))
case (e: Execution.RecoverWith[a], f) =>
Literal.Unary(f(e.prev), Execution.RecoverWith(_: Execution[a], e.fn))
case (e: Execution.Mapped[a, b], f) =>
Literal.Unary(f(e.prev), Execution.Mapped(_: Execution[a], e.fn))
case (e: Execution.FlatMapped[a, b], f) =>
Literal.Unary(f(e.prev), Execution.FlatMapped(_: Execution[a], e.fn))
case (e: Execution.Zipped[a, b], f) =>
Literal.Binary(f(e.one), f(e.two), Execution.Zipped(_: Execution[a], _: Execution[b]))
}
}
)
/**
* If `Execution` is `FlowDefExecution` or `WriteExecution`, we are considering those executions as slow,
* since they will schedule some expensive work, like Hadoop or Spark Job.
*
* If `Execution` is `FlatMapped` or `UniqueIdExecution`, we are considering those executions as slow, since
* we don't know which execution they can produce.
*
* Everything else we are considering as fast execution compare to `FlowDefExecution` and `WriteExecution`.
*/
def isFastExecution[A](e: Execution[A]): Boolean =
areFastExecution(e :: Nil)
/**
* If `Execution` is `FlowDefExecution` or `WriteExecution`, we are considering those executions as slow,
* since they will schedule some expensive work, like Hadoop or Spark Job.
*
* If `Execution` is `FlatMapped` or `UniqueIdExecution`, we are considering those executions as slow, since
* we don't know which execution they can produce.
*
* Everything else we are considering as fast execution compare to `FlowDefExecution` and `WriteExecution`.
*/
@tailrec
def areFastExecution(es: List[Execution[Any]]): Boolean =
es match {
case Nil => true
case h :: tail =>
h match {
case Execution.UniqueIdExecution(_) => false
case Execution.FlowDefExecution(_) => false
case Execution.WriteExecution(_, _, _) => false
case Execution.FlatMapped(_, _) => false
case Execution.ReaderExecution => areFastExecution(tail)
case Execution.FutureConst(_) => areFastExecution(tail)
case Execution.GetCounters(e) => areFastExecution(e :: tail)
case Execution.ResetCounters(e) => areFastExecution(e :: tail)
case Execution.WithNewCache(e) => areFastExecution(e :: tail)
case Execution.TransformedConfig(e, _) => areFastExecution(e :: tail)
case Execution.OnComplete(e, _) => areFastExecution(e :: tail)
case Execution.RecoverWith(e, _) => areFastExecution(e :: tail)
case Execution.Mapped(e, _) => areFastExecution(e :: tail)
case Execution.Zipped(one, two) => areFastExecution(one :: two :: tail)
}
}
/**
* This is a rather complex optimization rule, but also very important. After this runs, there will only be
* 1 WriteExecution in a graph, other than within recoverWith/flatMap/uniqueId nodes.
*
* This is the best we can do without running those functions. The motivation for this is to allow the user
* to write Executions as is convenient in code, but still have full access to a TypedPipe graph when
* planning a stage. Without this, we can wind up recomputing work that we don't need to do.
*/
case object ZipWrite extends Rule[Execution] {
import Execution._
/*
* First we define some case class functions to make sure
* the rule is reproducible and doesn't break equality
*/
case class Twist[A, B, C]() extends Function1[((A, B), C), (A, (B, C))] {
def apply(in: ((A, B), C)) =
(in._1._1, (in._1._2, in._2))
}
case class UnTwist[A, B, C]() extends Function1[(A, (B, C)), ((A, B), C)] {
def apply(in: (A, (B, C))) =
((in._1, in._2._1), in._2._2)
}
case class TwistSwap[A, B, C]() extends Function1[(A, (B, C)), (B, (A, C))] {
def apply(in: (A, (B, C))) =
(in._2._1, (in._1, in._2._2))
}
case class ComposedFn[A1, A2, A, B1, B2, B](
fn1: Function1[(A1, A2), A],
fn2: Function1[(B1, B2), B]
) extends Function1[((A1, B1), (A2, B2)), (A, B)] {
override def apply(v1: ((A1, B1), (A2, B2))): (A, B) =
(fn1(v1._1._1, v1._2._1), fn2(v1._1._2, v1._2._2))
}
case class ComposeWriteFn[A, B, C, D, E](
fn1: ((A, B, C, ConcurrentExecutionContext)) => Future[D],
fn2: ((A, B, C, ConcurrentExecutionContext)) => Future[E]
) extends Function1[(A, B, C, ConcurrentExecutionContext), Future[(D, E)]] {
def apply(tup: (A, B, C, ConcurrentExecutionContext)): Future[(D, E)] =
Execution.failFastZip(fn1(tup), fn2(tup))(tup._4)
}
def mergeWrite[A, B](w1: WriteExecution[A], w2: WriteExecution[B]): WriteExecution[(A, B)] = {
val newFn = ComposeWriteFn(w1.result, w2.result)
WriteExecution(w1.head, w1.tail ::: (w2.head :: w2.tail), newFn)
}
/**
* This is the fundamental type we use to optimize zips, basically we expand graphs of WriteExecution,
* Zipped, Mapped. Our goal to optimize any `Execution`'s DAG to have at most one write.
*
* This is achieved by optimizing any `Execution` to either:
* - `NonWrite` execution
* - `Write` execution
* - composed execution which has both write and non write.
*/
private sealed trait FlattenedZip[+A]
private object FlattenedZip {
final case class NonWrite[T](nonWrite: Execution[T]) extends FlattenedZip[T]
final case class Write[T](write: WriteExecution[T]) extends FlattenedZip[T]
final case class Composed[T1, T2, T](
write: WriteExecution[T1],
nonWrite: Execution[T2],
compose: Function1[(T1, T2), T]
) extends FlattenedZip[T]
def toExecution[A](ex: FlattenedZip[A]): Execution[A] = ex match {
case NonWrite(nonWrite) => nonWrite
case Write(write) => write
case c @ Composed(_, _, _) => c.write.zip(c.nonWrite).map(c.compose)
}
def map[A, B](ex: FlattenedZip[A], fn: A => B): FlattenedZip[B] = ex match {
case NonWrite(nonWrite) =>
NonWrite(nonWrite.map(fn))
case Write(write) =>
Write(WriteExecution(write.head, write.tail, MapWrite.ComposeMap(write.result, fn)))
case Composed(write, nonWrite, compose) =>
Composed(write, nonWrite, ComposedMapFn(compose, fn))
}
def zip[A, B](left: FlattenedZip[A], right: FlattenedZip[B]): FlattenedZip[(A, B)] =
(left, right) match {
case (left @ NonWrite(_), right @ NonWrite(_)) =>
NonWrite(left.nonWrite.zip(right.nonWrite))
case (left @ NonWrite(_), right @ Write(_)) =>
Composed(right.write, left.nonWrite, Swap[B, A]())
case (left @ NonWrite(_), right @ Composed(_, _, _)) =>
zipNonWriteComposed(left, right)
case (left @ Write(_), right @ NonWrite(_)) =>
Composed(left.write, right.nonWrite, Identity[(A, B)]())
case (left @ Write(_), right @ Write(_)) =>
Write(mergeWrite(left.write, right.write))
case (left @ Write(_), right @ Composed(_, _, _)) =>
zipWriteComposed(left, right)
case (left @ Composed(_, _, _), right @ NonWrite(_)) =>
map(zipNonWriteComposed(right, left), Swap[B, A]())
case (left @ Composed(_, _, _), right @ Write(_)) =>
map(zipWriteComposed(right, left), Swap[B, A]())
case (left @ Composed(_, _, _), right @ Composed(_, _, _)) =>
Composed(
mergeWrite(left.write, right.write),
left.nonWrite.zip(right.nonWrite),
ComposedFn(left.compose, right.compose)
)
}
private def zipNonWriteComposed[A, B1, B2, B](
left: NonWrite[A],
right: Composed[B1, B2, B]
): Composed[B1, (B2, A), (A, B)] =
Composed(
right.write,
right.nonWrite.zip(left.nonWrite),
ComposedMapFn(ComposedMapFn(UnTwist(), MapLeft[(B1, B2), A, B](right.compose)), Swap[B, A]())
)
private def zipWriteComposed[A, B1, B2, B](
left: Write[A],
right: Composed[B1, B2, B]
): Composed[(A, B1), B2, (A, B)] =
Composed(
mergeWrite(left.write, right.write),
right.nonWrite,
ComposedMapFn(Twist(), MapRight[A, (B1, B2), B](right.compose))
)
/**
* Convert an Execution to the Flattened (tuple-ized) representation
*/
def apply[A](ex: Execution[A]): FlattenedZip[A] =
ex match {
case Zipped(left, right) => zip(apply(left), apply(right))
case Mapped(that, fn) => map(apply(that), fn)
case write @ WriteExecution(_, _, _) => FlattenedZip.Write(write)
case notZipMap => FlattenedZip.NonWrite(notZipMap)
}
}
/**
* Apply the optimization of merging all zipped/mapped WriteExecution into a single value. If ex is
* already optimal (0 or 1 write) return None
*/
def optimize[A](ex: Execution[A]): Option[Execution[A]] = {
def writes(execution: Execution[_]): Int = {
@tailrec
def loop(executions: List[Execution[_]], acc: Int): Int = executions match {
case Nil => acc
case head :: tail =>
head match {
case Zipped(left, right) => loop(left :: right :: tail, acc)
case Mapped(that, _) => loop(that :: tail, acc)
case WriteExecution(_, _, _) => loop(tail, acc + 1)
case _ => loop(tail, acc)
}
}
loop(execution :: Nil, 0)
}
// only optimize if there are 2 or more writes, otherwise we create an infinite loop
if (writes(ex) > 1)
Some(FlattenedZip.toExecution(FlattenedZip(ex)))
else
None
}
def apply[A](on: Dag[Execution]) = {
case z @ Zipped(_, _) => optimize(z)
case _ =>
// since this optimization only applies to zips, there
// is no need to check on nodes that aren't zips.
None
}
}
object ZipMap extends PartialRule[Execution] {
case class MapLeft[S, T, B](fn: S => B) extends (((S, T)) => (B, T)) {
override def apply(st: (S, T)): (B, T) = (fn(st._1), st._2)
}
case class MapRight[S, T, B](fn: T => B) extends (((S, T)) => (S, B)) {
override def apply(st: (S, T)): (S, B) = (st._1, fn(st._2))
}
override def applyWhere[T](on: Dag[Execution]) = {
case Execution.Zipped(Execution.Mapped(left, fn), right) =>
Execution.Zipped(left, right).map(MapLeft(fn))
case Execution.Zipped(left, Execution.Mapped(right, fn)) =>
Execution.Zipped(left, right).map(MapRight(fn))
}
}
object ZipFlatMap extends PartialRule[Execution] {
case class LeftZipRight[S, T, B](left: Execution[B], fn: S => Execution[T])
extends (S => Execution[(B, T)]) {
private val fun = fn.andThen(left.zip)
override def apply(s: S): Execution[(B, T)] = fun(s)
}
case class RightZipLeft[S, T, B](right: Execution[B], fn: S => Execution[T])
extends (S => Execution[(T, B)]) {
private val fun = fn.andThen(_.zip(right))
override def apply(s: S): Execution[(T, B)] = fun(s)
}
case class NestedZip[S, T, B, A](right: Execution[B], lfn: S => Execution[T], rfn: B => Execution[A])
extends (S => Execution[(T, A)]) {
private val fun = lfn.andThen { lr =>
Execution.FlatMapped(right, rfn.andThen(lr.zip))
}
override def apply(s: S): Execution[(T, A)] = fun(s)
}
override def applyWhere[T](on: Dag[Execution]) = {
case Execution.Zipped(Execution.FlatMapped(left, lfn), Execution.FlatMapped(right, rfn))
if isFastExecution(left) && isFastExecution(right) =>
Execution.FlatMapped(left, NestedZip(right, lfn, rfn))
case Execution.Zipped(Execution.FlatMapped(left, fn), right) if isFastExecution(left) =>
Execution.FlatMapped(left, RightZipLeft(right, fn))
case Execution.Zipped(left, Execution.FlatMapped(right, fn)) if isFastExecution(right) =>
Execution.FlatMapped(right, LeftZipRight(left, fn))
}
}
object MapWrite extends PartialRule[Execution] {
case class ComposeMap[A, B, C, D, E](
fn1: ((A, B, C, ConcurrentExecutionContext)) => Future[D],
fn2: D => E
) extends Function1[(A, B, C, ConcurrentExecutionContext), Future[E]] {
def apply(tup: (A, B, C, ConcurrentExecutionContext)): Future[E] =
fn1(tup).map(fn2)(tup._4)
}
override def applyWhere[T](on: Dag[Execution]) = {
case Execution.Mapped(Execution.WriteExecution(h, t, f1), f2) =>
Execution.WriteExecution(h, t, ComposeMap(f1, f2))
}
}
case object FuseMaps extends PartialRule[Execution] {
import Execution._
def applyWhere[A](on: Dag[Execution]) = { case Mapped(Mapped(ex, fn0), fn1) =>
Mapped(ex, ComposedFunctions.ComposedMapFn(fn0, fn1))
}
}
val std: Rule[Execution] =
Rule.orElse(
List(
ZipWrite,
MapWrite,
ZipMap,
ZipFlatMap,
FuseMaps
)
)
def apply[A](e: Execution[A], r: Rule[Execution]): Execution[A] =
try {
Dag.applyRule(e, toLiteral, r)
} catch {
case _: StackOverflowError => e
}
def stdOptimizations[A](e: Execution[A]): Execution[A] =
apply(e, std)
}
|
twitter/scalding
|
scalding-core/src/main/scala/com/twitter/scalding/ExecutionOptimizationRules.scala
|
Scala
|
apache-2.0
| 16,134
|
import scala.scalanative.native._, stdio._
object Hello {
def main(args: Array[String]): Unit = {
throw new Exception()
}
}
|
phdoerfler/scala-native
|
tests/run/errors-reported/changes/Hello-1.scala
|
Scala
|
bsd-3-clause
| 133
|
package org.phenoscape.kb.ingest.zfin
import org.apache.commons.lang3.StringUtils
import org.phenoscape.kb.ingest.util.Vocab
import org.phenoscape.scowl._
import org.semanticweb.owlapi.apibinding.OWLManager
import org.semanticweb.owlapi.model.{IRI, OWLAxiom}
import scala.collection.mutable
import scala.io.Source
object ZFINPreviousGeneNamesToOWL {
val factory = OWLManager.getOWLDataFactory
val hasRelatedSynonym = factory.getOWLAnnotationProperty(Vocab.HAS_RELATED_SYNONYM)
def convert(data: Source): Set[OWLAxiom] = data.getLines.flatMap(translate).toSet
def translate(line: String): Set[OWLAxiom] = {
val items = line.split("\\t")
if (!items(0).startsWith("ZDB-GENE")) {
Set.empty
} else {
val axioms = mutable.Set.empty[OWLAxiom]
val geneID = StringUtils.stripToNull(items(0))
val previousName = StringUtils.stripToNull(items(3))
val geneIRI = IRI.create("http://zfin.org/" + geneID)
val gene = Individual(geneIRI)
axioms.add(Declaration(gene))
axioms.add(geneIRI Annotation (hasRelatedSynonym, previousName))
axioms.toSet
}
}
}
|
phenoscape/phenoscape-kb-ingest
|
src/main/scala/org/phenoscape/kb/ingest/zfin/ZFINPreviousGeneNamesToOWL.scala
|
Scala
|
mit
| 1,120
|
package com.sksamuel.elastic4s.tasks
import com.sksamuel.elastic4s.ElasticsearchClientUri
import com.sksamuel.elastic4s.http.{ElasticDsl, HttpClient}
import com.sksamuel.elastic4s.testkit.SharedElasticSugar
import org.scalatest.{FlatSpec, Matchers}
class TasksTest extends FlatSpec with SharedElasticSugar with Matchers with ElasticDsl {
val http = HttpClient(ElasticsearchClientUri("elasticsearch://" + node.ipAndPort))
"list tasks" should "include all fields" in {
val resp = http.execute {
listTasks()
}.await
resp.nodes.head._2.host shouldBe "local"
resp.nodes.head._2.roles shouldBe Seq("master", "data", "ingest")
resp.nodes.head._2.tasks.values.forall(_.startTime.toMillis > 0) shouldBe true
resp.nodes.head._2.tasks.values.forall(_.runningTime.toMillis > 0) shouldBe true
}
}
|
aroundus-inc/elastic4s
|
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/tasks/TasksTest.scala
|
Scala
|
apache-2.0
| 825
|
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 the "License";
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalawebtest.integration.gauge
import org.scalawebtest.integration.ScalaWebTestBaseSpec
import dotty.xml.interpolator.*
/**
* Some HTML element attributes, such as the class attribute, allow white spaces. Other than separating
* multiple attribute values, these spaces must be ignored.
*/
class WhitespaceInAttributesGaugeSpec extends ScalaWebTestBaseSpec {
path = "/nonNormalWhitespaceAttributes.jsp"
"An element with an empty attribute" should "be matched by a gauge" in {
fits (
xml"""
<div class="">
<div class="first"></div>
</div>
"""
)
fits (
xml"""
<div>
<div class="first"></div>
</div>
"""
)
}
"A preceding space" should "not affect a gauge" in {
fits(xml"""<div class="precedingSpace"></div>""")
}
"A surrounding space" should "not affect a gauge" in {
fits(xml"""<div class="surroundingSpace"></div>""")
}
"A preceding tab" should "not affect a gauge" in {
fits(xml"""<div class="precedingTab"></div>""")
}
"A surrounding tab" should "not affect a gauge" in {
fits(xml"""<div class="surroundingSpace"></div>""")
}
"White spaces" should "be treated as value separators" in {
fits(
xml"""<div class="space separated"></div>"""
)
}
"Line breaks" should "be treated like spaces in" in {
fits(xml"""<div class="line breaks"><div id="lineBreakChild"></div></div>""")
}
}
|
unic/ScalaWebTest
|
scalawebtest-integration/src/it/scala/org/scalawebtest/integration/gauge/WhitespaceInAttributesGaugeSpec.scala
|
Scala
|
apache-2.0
| 2,066
|
package ml.combust.mleap.core.feature
import ml.combust.mleap.core.types._
import org.scalatest.FunSpec
class RegexTokenizerModelSpec extends FunSpec {
describe("regex tokenizer model") {
val model = RegexTokenizerModel(regex = """\s""".r, matchGaps = true,
tokenMinLength = 3, lowercaseText = true)
it("has the right input schema") {
assert(model.inputSchema.fields ==
Seq(StructField("input", ScalarType.String.nonNullable)))
}
it("has the right output schema") {
assert(model.outputSchema.fields ==
Seq(StructField("output", ListType(BasicType.String))))
}
}
}
|
combust/mleap
|
mleap-core/src/test/scala/ml/combust/mleap/core/feature/RegexTokenizerModelSpec.scala
|
Scala
|
apache-2.0
| 627
|
package meowsynth
object Tagline {
val taglines = Seq(
"Meow meow meow!",
"You make me want to meow.",
"What's new, pussycat?",
"Purrfect harmony.",
"I'd like to teach the world to meow.",
"Built with cats!",
"Now with twice the meow.",
"No cats were harmed during the making of this music."
)
def random = taglines((math.random * taglines.length).toInt)
}
|
davegurnell/meowsynth
|
src/main/scala/meowsynth/Tagline.scala
|
Scala
|
apache-2.0
| 397
|
package fr.univ.nantes.roomanager.bean
import java.util.Calendar
import scala.beans.BeanProperty
/**
* @author Pierre Gaultier
* @author Alexis Giraudet
*/
class ReservationBean(@BeanProperty var id_salle: Int,
@BeanProperty var id_demandeur: Int,
@BeanProperty var id_typeManifestation: Int,
@BeanProperty var id_typeDuree: Int,
@BeanProperty var dateResa: Calendar) extends BaseBean {}
|
Giraudux/roomanager
|
src/main/scala/fr/univ/nantes/roomanager/bean/ReservationBean.scala
|
Scala
|
mit
| 482
|
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.regex.java8
import java.time.ZonedDateTime
import kantan.regex.{GroupDecoder, MatchDecoder}
import kantan.regex.java8.arbitrary._
import kantan.regex.laws.discipline.{DisciplineSuite, GroupDecoderTests, MatchDecoderTests, SerializableTests}
class ZonedDateTimeDecoderTests extends DisciplineSuite {
checkAll("GroupDecoder[ZonedDateTime]", GroupDecoderTests[ZonedDateTime].decoder[Int, Int])
checkAll("GroupDecoder[ZonedDateTime]", SerializableTests[GroupDecoder[ZonedDateTime]].serializable)
checkAll("MatchDecoder[ZonedDateTime]", MatchDecoderTests[ZonedDateTime].decoder[Int, Int])
checkAll("MatchDecoder[ZonedDateTime]", SerializableTests[MatchDecoder[ZonedDateTime]].serializable)
}
|
nrinaudo/kantan.regex
|
java8/src/test/scala/kantan/regex/java8/ZonedDateTimeDecoderTests.scala
|
Scala
|
apache-2.0
| 1,315
|
package blended.mgmt.repo.rest.internal
import blended.akka.http.HttpContext
import blended.akka.http.SimpleHttpContext
import blended.domino.TypesafeConfigWatching
import blended.mgmt.repo.ArtifactRepo
import blended.security.BlendedPermissionManager
import blended.util.logging.Logger
import domino.DominoActivator
import domino.service_watching.ServiceWatcherEvent.AddingService
import domino.service_watching.ServiceWatcherEvent.ModifiedService
import domino.service_watching.ServiceWatcherEvent.RemovedService
class ArtifactRepoRestActivator
extends DominoActivator
with TypesafeConfigWatching {
private[this] val log = Logger[ArtifactRepoRestActivator]
whenBundleActive {
log.info(s"Activating bundle ${bundleContext.getBundle().getSymbolicName()}")
whenServicePresent[BlendedPermissionManager] { mgr =>
val repoRoutes = new ArtifactRepoRoutesImpl(mgr)
onStop {
repoRoutes.clearRepos()
}
log.info("Registering route under context path: repo")
new SimpleHttpContext("repo", repoRoutes.httpRoute).providesService[HttpContext]
watchServices[ArtifactRepo] {
case AddingService(repo, r) =>
repoRoutes.addRepo(repo)
case ModifiedService(repo, r) =>
// nothing to do
case RemovedService(repo, r) =>
repoRoutes.removeRepo(repo)
}
}
}
}
|
lefou/blended
|
blended.mgmt.repo.rest/src/main/scala/blended/mgmt/repo/rest/internal/ArtifactRepoRestActivator.scala
|
Scala
|
apache-2.0
| 1,374
|
package net.liftmodules
import _root_.net.liftweb._
import http._
import common._
/**
* ==FoBo AngularJS Toolkit Module==
* This FoBo toolkit module provides AngularJS API and Resource components to the FoBo Module,
* but can also be used as-is, see below for setup information.
*
* If you are using this module via the FoBo/FoBo module see also [[net.liftmodules.fobo]] for setup information.
*/
package object foboajs {
override def toString() = {
foboajs.Toolkit.toString() + " " + foboajs.Resource
.toString() + " " + foboajs.API.toString()
}
/**
* Initiate FoBo's Angular Toolkit(s) in you bootstrap liftweb Boot.
* Using the Toolkit initiation you will bring in both the
* toolkit's resources and if present the FoBo API associated
* with the toolkit.
*
* @example
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.[Toolkit Object]
* }}}
* '''Note:''' To see available objects click on the round trait icon in the header of this page.
*/
sealed trait Toolkit
/**
* Initiate FoBo's Angular Resource(s) in you bootstrap liftweb Boot.
*
* @example
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.[Resource Object]
* }}}
* '''Note:''' To see available objects click on the round trait icon in the header of this page.
*/
sealed trait Resource
/**
* Initiate FoBo's Angular API in you bootstrap liftweb Boot.
*
* @example
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.API.init=fobo.API.[API Object]
* }}}
* '''Note:''' To see available objects click on the round trait icon in the header of this page.
*/
sealed trait API
/*=== Toolkit ============================================*/
object Toolkit extends Toolkit {
//we don't actually need to store the objects (for now) so lets just save
//the object name, we can easily change this if we need to
private type Store = List[String] //List[Toolkit]
private var store: Store = List()
def init: Store = store
def init_=(t: Toolkit): Store = {
store = if (store contains t.toString) store else t.toString :: store
store
}
override def toString() = "foboajs.Toolkit = " + store.toString()
/**
* Enable usage of FoBo's AngularJS API and resources version 1․5․3 in your bootstrap liftweb Boot.
* @version 1.5.3
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS153
* }}}
*
*/
case object AngularJS153 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS153
}
/**
* Enable usage of AngularJS i18n resources files in your bootstrap liftweb Boot.
* @version 1.5.3 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS153i18n
* }}}
*
*/
case object AngularJS153i18n extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS153i18n
}
/**
* Enable usage of FoBo's AngularJS API and resources version 1․4․8 in your bootstrap liftweb Boot.
* @version 1.4.8
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS148
* }}}
*
*/
case object AngularJS148 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS148
}
/**
* Enable usage of AngularJS i18n resources files in your bootstrap liftweb Boot.
* @version 1.4.8 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS148i18n
* }}}
*
*/
case object AngularJS148i18n extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS148i18n
}
/**
* Enable usage of FoBo's AngularJS API and resources version 1․4․1 in your bootstrap liftweb Boot.
* @version 1.4.1
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS148
* }}}
*
*/
case object AngularJS141 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS141
}
/**
* Enable usage of AngularJS i18n resources files in your bootstrap liftweb Boot.
* @version 1.4.1 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS141i18n
* }}}
*
*/
case object AngularJS141i18n extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS141i18n
}
/**
* Enable usage of FoBo's AngularJS API and resources version 1․3․15 in your bootstrap liftweb Boot.
* @version 1.3.15
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS1315
* }}}
*
*/
case object AngularJS1315 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS1315
}
/**
* Enable usage of AngularJS i18n resource files in your bootstrap liftweb Boot.
* @version 1.3.15 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS1315i18n
* }}}
*
*/
case object AngularJS1315i18n extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS1315i18n
}
/**
* Enable usage of FoBo's AngularJS API and resources version 1․2․19 in your bootstrap liftweb Boot.
* @version 1.2.19
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS1219
* }}}
*
*/
case object AngularJS1219 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS1219
}
/**
* Enable usage of AngularJS i18n resource files in your bootstrap liftweb Boot.
* @version 1.2.19 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AngularJS1219i18n
* }}}
*
*/
case object AngularJS1219i18n extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AngularJS1219i18n
}
/**
* Enable usage of FoBo's Angular Material API and resources version 1․0․8 in your bootstrap liftweb Boot.
* @version 1.0.8
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJMaterial108
* }}}
*
*/
@deprecated("Use AJMaterial111 or later", "2.0.0")
case object AJMaterial108 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJMaterial108
}
/**
* Enable usage of FoBo's Angular Material API and resources version 1․1․1 in your bootstrap liftweb Boot.
* @version 1.1.1
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJMaterial111
* }}}
*
* @since v1.7
*/
case object AJMaterial111 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJMaterial111
}
/**
* Enable usage of FoBo's Angular Material API and resources version 1․1․4 in your bootstrap liftweb Boot.
* @version 1.1.4
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJMaterial114
* }}}
*
* @since v2.0
*/
case object AJMaterial114 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJMaterial114
}
/**
* Enable usage of FoBo's Angular API and AngularUI-Bootstrap resources version 2․5․0 in your bootstrap liftweb Boot.
* @version 2.5.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJSUIBootstrap250
* }}}
* @since v2.0
*/
case object AJSUIBootstrap250 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJSUIBootstrap250
}
/**
* Enable usage of FoBo's Angular API and AngularUI-Bootstrap resources version 0․10․0 in your bootstrap liftweb Boot.
* @version 0.10.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJSUIBootstrap0100
* }}}
*
*/
@deprecated("Use AJSUIBootstrap250 or later", "2.0.0")
case object AJSUIBootstrap0100 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJSUIBootstrap0100
}
/**
* Enable usage of FoBo's Angular API and AngularUI-Bootstrap resources version 0․7․0 in your bootstrap liftweb Boot.
* @version 0.7.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJSUIBootstrap070
* }}}
*
*/
@deprecated("Use AJSUIBootstrap0100 or later", "2.0.0")
case object AJSUIBootstrap070 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJSUIBootstrap070
}
/**
* Enable usage of FoBo's Angular API and AngularUI-Bootstrap resources version 0․2․0 in your bootstrap liftweb Boot.
* @version 0.2.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJSUIBootstrap020
* }}}
*
*/
@deprecated("Use AJSUIBootstrap0100 or later", "2.0.0")
case object AJSUIBootstrap020 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJSUIBootstrap020
}
/**
* Enable usage of FoBo's Angular API and NG-Grid resources version 2․0․7 in your bootstrap liftweb Boot.
* @version 2.0.7
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJSNGGrid207
* }}}
*
*/
case object AJSNGGrid207 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJSNGGrid207
}
/**
* Enable usage of FoBo's Angular API and UI-Grid resources version 3․0․7 in your bootstrap liftweb Boot.
* @version 3.0.7
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Toolkit.init=fobo.Toolkit.AJSUIGrid307
* }}}
*
*/
case object AJSUIGrid307 extends Toolkit {
foboajsapi.API.Angular1
foboajsres.Resource.AJSUIGrid307
}
}
/*=== Resource ============================================*/
object Resource extends Resource {
//we don't actually need to store the objects (for now) so lets just save
//the object name, we can easily change this if we need to
private type Store = List[String] //List[Resource]
private var store: Store = List()
def init: Store = store
def init_=(t: Resource): Store = {
store = if (store contains t.toString) store else t.toString :: store
store
}
override def toString() = "foboajs.Resource = " + store.toString()
/**
* Enable usage of FoBo's AngularJS version 1․5․3 resources files in your bootstrap liftweb Boot.
* @version 1.5.3
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AngularJS153
* }}}
*
*/
case object AngularJS153 extends Resource {
foboajsres.Resource.AngularJS153
}
/**
* Enable usage of AngularJS i18n resources files in your bootstrap liftweb Boot.
* @version 1.5.3 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AngularJS153i18n
* }}}
*
*/
case object AngularJS153i18n extends Resource {
foboajsres.Resource.AngularJS153i18n
}
/**
* Enable usage of FoBo's AngularJS version 1․4․8 resources files in your bootstrap liftweb Boot.
* @version 1.4.8
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AngularJS148
* }}}
*
*/
case object AngularJS148 extends Resource {
foboajsres.Resource.AngularJS148
}
/**
* Enable usage of AngularJS i18n resources files in your bootstrap liftweb Boot.
* @version 1.4.8 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AngularJS148i18n
* }}}
*
*/
case object AngularJS148i18n extends Resource {
foboajsres.Resource.AngularJS148i18n
}
/**
* Enable usage of AngularJS version 1․4․1 resource files in your bootstrap liftweb Boot.
* @version 1.4.1
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resources.init=fobo.Resouces.AngularJS148
* }}}
*
*/
case object AngularJS141 extends Resource {
foboajsres.Resource.AngularJS141
}
/**
* Enable usage of AngularJS i18n resources files in your bootstrap liftweb Boot.
* @version 1.4.1 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resources.init=fobo.Resources.AngularJS141i18n
* }}}
*
*/
case object AngularJS141i18n extends Resource {
foboajsres.Resource.AngularJS141i18n
}
/**
* Enable usage of AngularJS version 1․3․15 resource files in your bootstrap liftweb Boot.
* @version 1.3.15
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resources.init=fobo.Resources.AngularJS1315
* }}}
*
*/
case object AngularJS1315 extends Resource {
foboajsres.Resource.AngularJS1315
}
/**
* Enable usage of AngularJS i18n resource files in your bootstrap liftweb Boot.
* @version 1.3.15 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resources.init=fobo.Resources.AngularJS1315i18n
* }}}
*
*/
case object AngularJS1315i18n extends Resource {
foboajsres.Resource.AngularJS1315i18n
}
/**
* Enable usage of AngularJS version 1․2․19 resource files in your bootstrap liftweb Boot.
* @version 1.2.19
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AngularJS1219
* }}}
*
*/
case object AngularJS1219 extends Resource {
foboajsres.Resource.AngularJS1219
}
/**
* Enable usage of AngularJS i18n resource files in your bootstrap liftweb Boot.
* @version 1.2.19 i18n
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AngularJS1219i18n
* }}}
*
*/
case object AngularJS1219i18n extends Resource {
foboajsres.Resource.AngularJS1219i18n
}
/**
* Enable usage of Angular Material version 1․0․8 resource files in your bootstrap liftweb Boot.
* @version 1.0.8
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJMaterial108
* }}}
*
*/
@deprecated("Use AJMaterial111 or later", "2.0.0")
case object AJMaterial108 extends Resource {
foboajsres.Resource.AJMaterial108
}
/**
* Enable usage of Angular Material version 1․1․1 resource files in your bootstrap liftweb Boot.
* @version 1.1.1
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJMaterial111
* }}}
*
*/
case object AJMaterial111 extends Resource {
foboajsres.Resource.AJMaterial111
}
/**
* Enable usage of Angular Material version 1․1․4 resource files in your bootstrap liftweb Boot.
* @version 1.1.4
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJMaterial114
* }}}
* @since 2.0
*/
case object AJMaterial114 extends Resource {
foboajsres.Resource.AJMaterial114
}
/**
* Enable usage of AngularUI-Bootstrap version 2․5․0 resource files in your bootstrap liftweb Boot.
* @version 2.5.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJSUIBootstrap250
* }}}
* @since v2.0
*/
case object AJSUIBootstrap250 extends Resource {
foboajsres.Resource.AJSUIBootstrap250
}
/**
* Enable usage of AngularUI-Bootstrap version 0․10․0 resource files in your bootstrap liftweb Boot.
* @version 0.10.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJSUIBootstrap0100
* }}}
*
*/
@deprecated("Use AJSUIBootstrap250 or later", "2.0.0")
case object AJSUIBootstrap0100 extends Resource {
foboajsres.Resource.AJSUIBootstrap0100
}
/**
* Enable usage of AngularUI-Bootstrap version 0․7․0 resource files in your bootstrap liftweb Boot.
* @version 0.7.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJSUIBootstrap070
* }}}
*
*/
@deprecated("Use AJSUIBootstrap0100 or later", "2.0.0")
case object AJSUIBootstrap070 extends Resource {
foboajsres.Resource.AJSUIBootstrap070
}
/**
* Enable usage of AngularUI-Bootstrap version 0․2․0 resource files in your bootstrap liftweb Boot.
* @version 0.2.0
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJSUIBootstrap020
* }}}
*
*/
@deprecated("Use AJSUIBootstrap0100 or later", "2.0.0")
case object AJSUIBootstrap020 extends Resource {
foboajsres.Resource.AJSUIBootstrap020
}
/**
* Enable usage of NG-Grid version 2․0․7 resource files in your bootstrap liftweb Boot.
* @version 2.0.7
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJSNGGrid207
* }}}
*
*/
case object AJSNGGrid207 extends Resource {
foboajsres.Resource.AJSNGGrid207
}
/**
* Enable usage of UI-Grid version 3․0․7 resource files in your bootstrap liftweb Boot.
* @version 3.0.7
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.Resource.init=fobo.Resource.AJSUIGrid307
* }}}
*
*/
case object AJSUIGrid307 extends Resource {
foboajsres.Resource.AJSUIGrid307
}
}
/*=== API ============================================*/
object API extends API {
//we don't actually need to store the objects (for now) so lets just save
//the object name, we can easily change this if we need to
private type Store = List[String] //List[API]
private var store: Store = List()
def init: Store = store
def init_=(t: API): Store = {
store = if (store contains t.toString) store else t.toString :: store
store
}
override def toString() = "foboajs.API = " + store.toString()
/**
* Enable usage of FoBo's AngularJS API 1․x․x in your bootstrap liftweb Boot.
* @version 1.x.x
*
* @example
*
* {{{
* import net.liftmodules.{foboajs => fobo}
* :
* fobo.API.init=fobo.API.Angular1
* }}}
*
*/
case object Angular1 extends API {
foboajsapi.API.Angular1
}
}
}
|
karma4u101/FoBo
|
Angular/AngularJS/src/main/scala/net/liftmodules/foboajs/foboajs.scala
|
Scala
|
apache-2.0
| 21,668
|
package org.jetbrains.plugins.hocon.editor
import com.intellij.codeInsight.editorActions.moveUpDown.StatementUpDownMover.MoveInfo
import com.intellij.codeInsight.editorActions.moveUpDown.{LineMover, LineRange}
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.util.Key
import com.intellij.psi.{PsiElement, PsiFile}
import org.jetbrains.plugins.hocon.editor.HoconObjectEntryMover.{PrefixModKey, PrefixModification}
import org.jetbrains.plugins.hocon.psi._
import scala.annotation.tailrec
/**
* An implementation of [[com.intellij.codeInsight.editorActions.moveUpDown.StatementUpDownMover]] which can
* move entire HOCON object entries (object fields or include statements).
*
* The entry being moved is being required to be the only entry in its own lines, i.e. there may not be any other
* entry (or left/right object brace) in the first or last line occupied by the entry being moved. If this requirement is not
* met, the "move statement" action will fallback to "move line".
* <p/>
* If the entry is "movable" (as defined above), the four scenarios are possible:
* <p/>
* 1. An object field may be "taken out" of its enclosing object field and have its prefix prepended, e.g.
* {{{
* a {
* |b = c
* }
* }}}
* After "move statement up":
* {{{
* a.|b = c
* a {
* }
* }}}
* <p/>
* 2. An object field may be "inserted" into adjacent object field and have its prefix removed, i.e. a reverse
* operation to "taking out". This is only possible when path of target field is a prefix of path of source field.
* Also, caret must NOT be at position inside the path prefix that needs to be removed.
* <p/>
* 3. If neither "taking out" or "inserting" is possible, objeect entry may be simply swapped with its adjacent entry.
* <p/>
* 4. If there is no adjacent entry for swapping, object entry is simply swapped with adjacent line.
*
* @author ghik
*/
class HoconObjectEntryMover extends LineMover {
override def checkAvailable(editor: Editor, file: PsiFile, info: MoveInfo, down: Boolean): Boolean =
super.checkAvailable(editor, file, info, down) && !editor.getSelectionModel.hasSelection &&
(file match {
case hoconFile: HoconPsiFile =>
checkAvailableHocon(editor, hoconFile, info, down)
case _ =>
false
})
private def checkAvailableHocon(editor: Editor, file: HoconPsiFile, info: MoveInfo, down: Boolean): Boolean = {
val document = editor.getDocument
val offset = editor.getCaretModel.getOffset
val element = file.findElementAt(offset)
if (element == null) return false
val currentLine = document.getLineNumber(offset)
def startLine(el: PsiElement) =
document.getLineNumber(el.getTextRange.getStartOffset)
def endLine(el: PsiElement) =
document.getLineNumber(el.getTextRange.getEndOffset)
def firstNonCommentLine(el: PsiElement) =
document.getLineNumber(el.getTextOffset)
def canInsertBefore(entry: HObjectEntry) = {
val lineStart = document.getLineStartOffset(startLine(entry))
entry.parent.exists(_.getTextRange.getStartOffset <= lineStart) &&
entry.previousEntry.forall(_.getTextRange.getEndOffset < lineStart)
}
def canInsertAfter(entry: HObjectEntry) = {
val lineEnd = document.getLineEndOffset(endLine(entry))
entry.parent.exists(_.getTextRange.getEndOffset >= lineEnd) &&
entry.nextEntry.forall(_.getTextRange.getStartOffset > lineEnd)
}
// Checks if lines occupied by this entry do not overlap with any adjacent entry or
// some other part of enclosing object
def movableLines(entry: HObjectEntry): Boolean =
canInsertBefore(entry) && canInsertAfter(entry)
// Finds ancestor object entry that can be "grabbed and moved" by current offset
@tailrec def enclosingAnchoredEntry(el: PsiElement): Option[HObjectEntry] = el match {
case _: PsiFile => None
case _ if firstNonCommentLine(el) != currentLine => None
case entry: HObjectEntry if movableLines(entry) => Some(entry)
case _ => enclosingAnchoredEntry(el.getParent)
}
def isByEdge(entry: HObjectEntry) = !entry.parent.exists(_.isToplevel) && { // todo suspicious
if (down) entry.nextEntry.forall(ne => entry.parent.exists(pp => startLine(ne) == endLine(pp)))
else entry.previousEntry.forall(pe => entry.parent.exists(pp => endLine(pe) == startLine(pp)))
}
def keyString(keyedField: HKeyedField) =
keyedField.key.map(_.getText).getOrElse("")
def inSingleLine(entry: HObjectEntry) =
firstNonCommentLine(entry) == endLine(entry)
def lineRange(el: PsiElement) =
new LineRange(startLine(el), endLine(el) + 1)
def singleLineRange(line: Int) =
new LineRange(line, line + 1)
def adjacentMovableEntry(entry: HObjectEntry) =
if (down) entry.nextEntry.filter(canInsertAfter)
else entry.previousEntry.filter(canInsertBefore)
def fieldToAscendOutOf(field: HObjectField): Option[(HObjectField, List[String])] =
if (isByEdge(field)) {
def edgeLine(element: PsiElement) =
if (down) endLine(element) else firstNonCommentLine(element)
def canInsert(field: HObjectField) =
if (down) canInsertAfter(field) else canInsertBefore(field)
field.parent.flatMap(_.prefixingField).map(_.enclosingObjectField)
.filter(of => field.parent.exists(pp => edgeLine(of) == edgeLine(pp)) && canInsert(of))
.map(of => (of, of.keyedField.fieldsInPathForward.map(keyString).toList))
} else None
def canInsertInto(field: HObjectField) =
!inSingleLine(field) && {
val lineToInsertAfter = if (down) firstNonCommentLine(field) else endLine(field) - 1
file.elementsAt(document.getLineEndOffset(lineToInsertAfter)).collectFirst {
case entries: HObjectEntries => entries.prefixingField.map(_.enclosingObjectField).contains(field)
case _: HKeyedField => false
} getOrElse false
}
def adjacentEntry(entry: HObjectEntry) =
if (down) entry.nextEntry else entry.previousEntry
def fieldToDescendInto(field: HObjectField): Option[(HObjectField, List[String])] =
for {
adjacentField <- adjacentEntry(field).collect({ case f: HObjectField => f }).filter(canInsertInto)
prefixToRemove <- {
val prefix = adjacentField.keyedField.fieldsInPathForward.map(keyString).toList
val removablePrefix = field.keyedField.fieldsInPathForward.takeWhile {
case prefixed: HPrefixedField => prefixed.subField.getTextRange.contains(offset)
case _ => false
}.map(keyString).toList
if (removablePrefix.startsWith(prefix)) Some(prefix) else None
}
} yield (adjacentField, prefixToRemove)
def trySpecializedFieldMove(objField: HObjectField) = {
val sourceRange = lineRange(objField)
fieldToAscendOutOf(objField).map { case (enclosingField, prefixToAdd) =>
val targetRange =
if (down) new LineRange(sourceRange.endLine, endLine(enclosingField) + 1)
else new LineRange(startLine(enclosingField), sourceRange.startLine)
val mod = PrefixModification(objField.getTextOffset, 0, prefixToAdd.mkString("", ".", "."))
(sourceRange, targetRange, Some(mod))
} orElse fieldToDescendInto(objField).map { case (adjacentField, prefixToRemove) =>
val targetRange =
if (down) new LineRange(sourceRange.endLine, firstNonCommentLine(adjacentField) + 1)
else new LineRange(endLine(adjacentField), sourceRange.startLine)
val prefixStr = prefixToRemove.mkString("", ".", ".")
val needsGuard = document.getCharsSequence.charAt(objField.getTextOffset + prefixStr.length).isWhitespace
val mod = PrefixModification(objField.getTextOffset, prefixStr.length, if (needsGuard) "\\"\\"" else "")
(sourceRange, targetRange, Some(mod))
}
}
def tryEntryMove(entry: HObjectEntry) = {
val sourceRange = lineRange(entry)
adjacentMovableEntry(entry).map { adjacentEntry =>
(sourceRange, lineRange(adjacentEntry), None)
} orElse {
val maxLinePos = editor.offsetToLogicalPosition(document.getTextLength)
val maxLine = if (maxLinePos.column == 0) maxLinePos.line else maxLinePos.line + 1
val nearLine = if (down) sourceRange.endLine else sourceRange.startLine - 1
if (nearLine >= 0 && nearLine < maxLine)
Some((sourceRange, singleLineRange(nearLine), None))
else None
}
}
val rangesOpt: Option[(LineRange, LineRange, Option[PrefixModification])] =
enclosingAnchoredEntry(element).flatMap {
case objField: HObjectField =>
trySpecializedFieldMove(objField) orElse tryEntryMove(objField)
case include: HInclude =>
tryEntryMove(include)
}
rangesOpt.foreach { case (source, target, prefixMod) =>
info.toMove = source
info.toMove2 = target
info.putUserData(PrefixModKey, prefixMod)
}
rangesOpt.isDefined
}
override def beforeMove(editor: Editor, info: MoveInfo, down: Boolean): Unit =
info.getUserData(PrefixModKey).foreach {
case PrefixModification(offset, length, replacement) =>
// we need to move caret manually when adding prefix exactly at caret position
val caretModel = editor.getCaretModel
val shouldMoveCaret = length == 0 && caretModel.getOffset == offset
editor.getDocument.replaceString(offset, offset + length, replacement)
if (shouldMoveCaret) {
caretModel.moveToOffset(caretModel.getOffset + replacement.length)
}
}
}
object HoconObjectEntryMover {
case class PrefixModification(offset: Int, length: Int, replacement: String)
val PrefixModKey = new Key[Option[PrefixModification]]("PrefixMod")
}
|
ghik/intellij-hocon
|
src/org/jetbrains/plugins/hocon/editor/HoconObjectEntryMover.scala
|
Scala
|
apache-2.0
| 9,904
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.nisp.helpers
import java.time.LocalDate
import play.api.http.Status
import play.api.libs.json.{Json, Reads}
import uk.gov.hmrc.domain.{Generator, Nino}
import scala.concurrent.Future
import scala.io.Source
import scala.util.Random
import scala.concurrent.ExecutionContext.Implicits.global
import uk.gov.hmrc.http.HttpResponse
import uk.gov.hmrc.nisp.models.citizen.CitizenDetailsResponse
import resource._
import uk.gov.hmrc.auth.core.retrieve.Name
import uk.gov.hmrc.nisp.controllers.auth.NispAuthedUser
import uk.gov.hmrc.nisp.models.UserName
object TestAccountBuilder {
def randomNino: Nino = Nino(new Generator(new Random()).nextNino.nino.replaceFirst("MA", "AA"))
val nispAuthedUser =
NispAuthedUser(regularNino, LocalDate.now, UserName(Name(Some("first"), Some("name"))), None, None, false)
val nonExistentNino: Nino = randomNino
val regularNino: Nino = randomNino
val mqpNino: Nino = randomNino
val forecastOnlyNino: Nino = randomNino
val contractedOutBTestNino: Nino = Nino(randomNino.nino.replaceAll("[02468]", "1"))
val fullUserNino: Nino = randomNino
val blankNino: Nino = randomNino
val notFoundNino: Nino = randomNino
val spaUnderConsiderationNino: Nino = randomNino
val spaUnderConsiderationNoFlagNino: Nino = randomNino
val spaUnderConsiderationExclusionAmountDisNino: Nino = randomNino
val spaUnderConsiderationExclusionIoMNino: Nino = randomNino
val spaUnderConsiderationExclusionMwrreNino: Nino = randomNino
val spaUnderConsiderationExclusionOverSpaNino: Nino = randomNino
val spaUnderConsiderationExclusionMultipleNino: Nino = randomNino
val spaUnderConsiderationExclusionNoFlagNino: Nino = randomNino
val urBannerNino: Nino = randomNino
val noUrBannerNino: Nino = randomNino
val invalidKeyNino: Nino = randomNino
val cachedNino: Nino = randomNino
val noNameNino: Nino = randomNino
val weakNino: Nino = randomNino
val abroadNino: Nino = randomNino
val mqpAbroadNino: Nino = randomNino
val hrpNino: Nino = randomNino
val fillGapSingle: Nino = randomNino
val fillGapsMultiple: Nino = randomNino
val noQualifyingYears: Nino = randomNino
val backendNotFound: Nino = randomNino
val excludedAll: Nino = randomNino
val excludedAllButDead: Nino = randomNino
val excludedAllButDeadMCI: Nino = randomNino
val excludedDissonanceIomMwrreAbroad: Nino = randomNino
val excludedIomMwrreAbroad: Nino = randomNino
val excludedMwrreAbroad: Nino = randomNino
val excludedMwrre: Nino = randomNino
val excludedAbroad: Nino = randomNino
val internalServerError: Nino = randomNino
val mappedTestAccounts = Map(
regularNino -> "regular",
mqpNino -> "mqp",
forecastOnlyNino -> "forecastonly",
contractedOutBTestNino -> "contractedout",
fullUserNino -> "fulluser",
blankNino -> "blank",
invalidKeyNino -> "invalidkey",
noNameNino -> "noname",
abroadNino -> "abroad",
mqpAbroadNino -> "mqp_abroad",
hrpNino -> "homeresponsibilitiesprotection",
fillGapSingle -> "fillgaps-singlegap",
fillGapsMultiple -> "fillgaps-multiple",
noQualifyingYears -> "no-qualifying-years",
backendNotFound -> "backend-not-found",
spaUnderConsiderationNino -> "spa-under-consideration",
spaUnderConsiderationNoFlagNino -> "spa-under-consideration-no-flag",
spaUnderConsiderationExclusionAmountDisNino -> "spa-under-consideration-exclusion-amount-dis",
spaUnderConsiderationExclusionIoMNino -> "spa-under-consideration-exclusion-iom",
spaUnderConsiderationExclusionMwrreNino -> "spa-under-consideration-exclusion-mwrre",
spaUnderConsiderationExclusionOverSpaNino -> "spa-under-consideration-exclusion-over-spa",
spaUnderConsiderationExclusionMultipleNino -> "spa-under-consideration-exclusion-multiple",
spaUnderConsiderationExclusionNoFlagNino -> "spa-under-consideration-exclusion-no-flag",
urBannerNino -> "showurbanner",
noUrBannerNino -> "hideurbanner",
excludedAll -> "excluded-all",
excludedAllButDead -> "excluded-all-but-dead",
excludedAllButDeadMCI -> "excluded-all-but-dead-mci",
excludedDissonanceIomMwrreAbroad -> "excluded-dissonance-iom-mwrre-abroad",
excludedIomMwrreAbroad -> "excluded-iom-mwrre-abroad",
excludedMwrreAbroad -> "excluded-mwrre-abroad",
excludedMwrre -> "excluded-mwrre",
excludedAbroad -> "excluded-abroad"
)
def directJsonResponse(nino: Nino, api: String): CitizenDetailsResponse =
jsonResponseByType[CitizenDetailsResponse](nino, api)
def jsonResponseByType[A](nino: Nino, api: String)(implicit fjs: Reads[A]): A =
managed(Source.fromFile(s"test/resources/${mappedTestAccounts(nino)}/$api.json"))
.acquireAndGet(resource => Json.parse(resource.mkString.replace("<NINO>", nino.nino)).as[A])
def getRawJson(nino: Nino, api: String) =
managed(Source.fromFile(s"test/resources/${mappedTestAccounts(nino)}/$api.json"))
.acquireAndGet(resource => Json.parse(resource.mkString.replace("<NINO>", nino.nino)))
private def fileContents(filename: String): Future[String] = Future {
managed(Source.fromFile(filename)).acquireAndGet(_.mkString)
}
def jsonResponse(nino: Nino, api: String): Future[HttpResponse] =
fileContents(s"test/resources/${mappedTestAccounts(nino)}/$api.json").map { string: String =>
HttpResponse(Status.OK, Some(Json.parse(string.replace("<NINO>", nino.nino))))
}
}
|
hmrc/nisp-frontend
|
test/uk/gov/hmrc/nisp/helpers/TestAccountBuilder.scala
|
Scala
|
apache-2.0
| 7,163
|
/**
* @author Yuuto
*/
package yuuto.enhancedinventories.compat.inventorytools
import net.minecraft.block.Block
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.init.Blocks
import net.minecraft.item.ItemStack
import net.minecraft.tileentity.TileEntity
import net.minecraft.world.World
import net.minecraftforge.common.util.ForgeDirection
import yuuto.inventorytools.api.dolly.BlockData
import yuuto.inventorytools.api.dolly.DollyHandlerRegistry
import yuuto.inventorytools.api.dolly.handlers.defaults.block.DefaultDollyBlockHandler
import yuuto.inventorytools.api.dolly.handlers.defaults.tile.IDollyTileHandler
import yuuto.enhancedinventories.proxy.ProxyCommon
import yuuto.enhancedinventories.block.BlockImprovedChest
import yuuto.enhancedinventories.block.BlockLocker
import yuuto.enhancedinventories.block.BlockCabinet
import yuuto.enhancedinventories.util.LogHelperEI
import yuuto.enhancedinventories.block.traits.TBlockReverseable
import net.minecraft.util.MathHelper
object ConnectiveDollyBlockHandler extends DefaultDollyBlockHandler{
override def onPlaced(blockData:BlockData, player:EntityPlayer, world:World, x:Int, y:Int, z:Int, side:Int, hitX:Float, hitY:Float, hitZ:Float):Boolean={
var targetSide:ForgeDirection = ForgeDirection.getOrientation(side);
var targX:Int=x;
var targY:Int=y;
var targZ:Int=z;
var targetBlock:Block = world.getBlock(targX, targY, targZ);
val blockStack:ItemStack=new ItemStack(blockData.block, 1, blockData.meta);
if(blockData.tileData != null && blockData.tileData.hasKey("stackData")){
blockStack.setTagCompound(blockData.tileData.getCompoundTag("stackData"))
}
if(targetBlock != null && !targetBlock.isReplaceable(world, targX, targY, targZ)){
if(targetBlock == Blocks.snow)
targetSide=ForgeDirection.UP;
if(targetBlock != Blocks.vine && targetBlock != Blocks.tallgrass && targetBlock != Blocks.deadbush){
targX+=targetSide.offsetX;
targY+=targetSide.offsetY;
targZ+=targetSide.offsetZ;
}
}
if(!player.canPlayerEdit(targX, targY, targZ, side, blockStack))
return false;
if(targY==255 && blockData.block.getMaterial().isSolid())
return false;
if(!(world.canPlaceEntityOnSide(blockData.block, targX, targY, targZ, false, side, player, blockStack)))
return false;
if(!canPlaceBlock(blockStack, blockData, player, world, targX, targY, targZ, side, hitX, hitY, hitZ))
return false;
var handler=DollyHandlerRegistry.getTileHandler(blockData.handlerName);
blockData.meta=blockData.block.onBlockPlaced(world, targX, targY, targZ, side, hitX, hitY, hitZ, blockData.meta);
if(!world.setBlock(targX, targY, targZ, blockData.block, blockData.meta, 3))
return false;
if(world.getBlock(targX, targY, targZ) != blockData.block)
return true;
if(handler!=null){
val tile:TileEntity=world.getTileEntity(targX, targY, targZ);
handler.onPlaced(tile, blockData, player, world, targX, targY, targZ, side, hitX, hitY, hitZ);
}
blockData.block.onBlockPlacedBy(world, targX, targY, targZ, player, blockStack);
blockData.block.onPostBlockPlaced(world, targX, targY, targZ, blockData.meta);
return true;
}
private def canPlaceBlock(blockStack:ItemStack, blockData:BlockData, player:EntityPlayer, world:World, x:Int, y:Int, z:Int, side:Int, hitX:Float, hitY:Float, hitZ:Float):Boolean={
if(blockData.block.isInstanceOf[BlockImprovedChest]){
return blockData.block.asInstanceOf[BlockImprovedChest].canPlaceBlockAt(blockStack, world, x, y, z);
}
if(blockData.block.isInstanceOf[BlockLocker]){
val l:Int = MathHelper.floor_double((player.rotationYaw * 4.0F / 360.0F) + 0.5D) & 3;
var dir:ForgeDirection = ForgeDirection.UNKNOWN;
if(l == 0)
dir = ForgeDirection.getOrientation(2);
else if(l == 1)
dir = ForgeDirection.getOrientation(5);
else if(l == 2)
dir = ForgeDirection.getOrientation(3);
else if(l == 3)
dir = ForgeDirection.getOrientation(4);
if(dir == ForgeDirection.UNKNOWN)
return false;
val angle:Double = TBlockReverseable.getRotation(dir.getOpposite());
val yaw:Double = ((player.rotationYaw % 360) + 360) % 360;
return blockData.block.asInstanceOf[BlockLocker].canPlaceBlockAt(blockStack, world, x, y, z, TBlockReverseable.angleDifference(angle, yaw) > 0);
}
if(blockData.block.isInstanceOf[BlockCabinet]){
return blockData.block.asInstanceOf[BlockCabinet].canPlaceBlockAt(blockStack, world, x, y, z);
}
return false;
}
}
|
AnimeniacYuuto/EnhancedInventories
|
src/main/scala/yuuto/enhancedinventories/compat/inventorytools/ConnectiveDollyBlockHandler.scala
|
Scala
|
gpl-2.0
| 4,628
|
// Copyright 2016 Carl Pulley
import com.typesafe.sbt.SbtGhPages.ghpages
import com.typesafe.sbt.SbtGit.git
import com.typesafe.sbt.SbtSite.site
import sbt.Keys._
object ScalaDoc {
val settings =
site.settings ++
ghpages.settings ++
site.includeScaladoc() ++
Seq(git.remoteRepo := s"https://github.com/carlpulley/${name.value}.git")
}
|
carlpulley/docker-compose-testkit
|
project/ScalaDoc.scala
|
Scala
|
apache-2.0
| 361
|
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import monifu.concurrent.Cancelable
import monifu.concurrent.atomic.padded.Atomic
import monifu.reactive.Ack.{Continue, Cancel}
import monifu.reactive.internals._
import monifu.reactive.exceptions.CompositeException
import monifu.reactive.observers.BufferedSubscriber
import monifu.reactive.{OverflowStrategy, Ack, Observer, Observable}
import monifu.reactive.observables.GroupedObservable
import scala.annotation.tailrec
import scala.concurrent.Future
import scala.util.control.NonFatal
private[reactive] object groupBy {
/** Implementation for [[Observable.groupBy]] */
def apply[T,K](source: Observable[T], os: OverflowStrategy.Synchronous, keyFn: T => K): Observable[GroupedObservable[K,T]] = {
Observable.create { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] { self =>
private[this] var isDone = false
private[this] val downstream = BufferedSubscriber(subscriber, os)
private[this] val cacheRef = Atomic(Map.empty[K, Observer[T]])
@tailrec
private[this] def recycleKey(key: K): Unit = {
val current = cacheRef.get
if (!cacheRef.compareAndSet(current, current - key))
recycleKey(key)
}
@tailrec
def onNext(elem: T): Future[Ack] = {
if (isDone) Cancel else {
val cache = cacheRef.get
var streamError = true
val result = try {
val key = keyFn(elem)
streamError = false
if (cache.contains(key)) {
cache(key).onNext(elem)
// if downstream cancels, we retry
.onCancelStreamOnNext(self, elem)
}
else {
val onCancel = Cancelable(recycleKey(key))
val (observer, observable) =
GroupedObservable.broadcast[K,T](key, onCancel)
if (cacheRef.compareAndSet(cache, cache.updated(key, observer)))
downstream.onNext(observable).fastFlatMap {
case Continue =>
// pushing the first element
observer.onNext(elem).mapToContinue
case Cancel =>
val errors = completeAll()
if (errors.nonEmpty)
self.onError(CompositeException(errors))
Cancel
}
else
null // this will trigger a tailrec retry
}
}
catch {
case NonFatal(ex) =>
if (!streamError) Future.failed(ex) else {
self.onError(ex)
Cancel
}
}
if (result == null)
onNext(elem)
else
result
}
}
private[this] def completeAll(): Seq[Throwable] = {
val cache = cacheRef.get
if (!cacheRef.compareAndSet(cache, Map.empty))
completeAll()
else
cache.values.foldLeft(Vector.empty[Throwable]) { (acc, o) =>
try {
o.onComplete()
acc
}
catch {
case NonFatal(ex) =>
acc :+ ex
}
}
}
def onError(ex: Throwable): Unit = {
if (!isDone) {
isDone = true
val errors = completeAll()
if (errors.nonEmpty)
downstream.onError(CompositeException(ex +: errors))
else
downstream.onError(ex)
}
}
def onComplete(): Unit = {
if (!isDone) {
isDone = true
val errors = completeAll()
if (errors.nonEmpty)
downstream.onError(CompositeException(errors))
else
downstream.onComplete()
}
}
})
}
}
}
|
virtualirfan/monifu
|
monifu/shared/src/main/scala/monifu/reactive/internals/operators/groupBy.scala
|
Scala
|
apache-2.0
| 4,710
|
/*
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*/
package org.locationtech.geomesa.filter
import org.opengis.filter.expression.Literal
/**
* Holder for a property name, literal value(s), and the order they are in
*/
case class PropertyLiteral(name: String,
literal: Literal,
secondary: Option[Literal],
flipped: Boolean = false)
|
vpipkt/geomesa
|
geomesa-filter/src/main/scala/org/locationtech/geomesa/filter/PropertyLiteral.scala
|
Scala
|
apache-2.0
| 689
|
package leo.modules.calculus
import leo.datastructures.{Signature, Term}
import leo.datastructures.Term._
import leo.modules.HOLSignature._
import leo.{Checked, LeoTestSuite}
/**
* Created by lex on 09.01.17.
*/
class MatchingTest extends LeoTestSuite {
private val IMPL = HOPatternMatching // HOMatching
private def preCheck(vargen: FreshVarGen, s: Term, t: Term)(implicit sig: Signature): Unit = {
assert(Term.wellTyped(s), s"${s.pretty(sig)} not well-typed")
assert(Term.wellTyped(t), s"${t.pretty(sig)} not well-typed")
}
private def shouldMatch(vargen: FreshVarGen, s: Term, t: Term)(implicit sig: Signature): Unit = {
val result = IMPL.matchTerms(vargen, s, t).iterator
assert(result.nonEmpty, "Terms should have matched")
result.foreach { case (termSubst, typeSubst) =>
assert(s.substitute(termSubst, typeSubst).etaExpand == t.etaExpand, s"substitution ${termSubst.pretty} does not match")
}
}
private def shouldNotMatch(vargen: FreshVarGen, s: Term, t: Term)(implicit sig: Signature): Unit = {
val result = IMPL.matchTerms(vargen, s, t).iterator
assert(result.isEmpty, "Terms should not have matched")
}
test("f(x,x) = f(a,a)", Checked){
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i ->: i))
val vargen = freshVarGenFromBlank
val x = vargen(i)
val t1 : Term = mkTermApp(f , Seq(x,x))
val t2 : Term = mkTermApp(f , Seq(a,a))
preCheck(vargen, t1, t2)
shouldMatch(vargen, t1, t2)
}
test("x(a) = f(a,a)", Checked){
implicit val s = getFreshSignature
val vargen = freshVarGenFromBlank
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i ->: i))
val t1 : Term = mkTermApp(vargen(i ->: i),a)
val t2 : Term = mkTermApp(f , Seq(a,a))
preCheck(vargen, t1, t2)
shouldMatch(vargen, t1, t2)
}
test("f(x,g(y)) = f(a,g(f(a,g(a))))", Checked){
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i ->: i))
val g = mkAtom(s.addUninterpreted("g", i ->: i))
val vargen = freshVarGenFromBlank
val x = vargen(i)
val y = vargen(i)
val t1 : Term = mkTermApp(f , Seq(x, mkTermApp(g, y)))
val t2 : Term = mkTermApp(f , Seq(a, mkTermApp(g, mkTermApp(f, Seq(a, mkTermApp(g, a))))))
preCheck(vargen, t1, t2)
shouldMatch(vargen, t1, t2)
}
test("f(x,g(x)) = f(a,g(f(a,a)))", Checked){
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i ->: i))
val g = mkAtom(s.addUninterpreted("g", i ->: i))
val vargen = freshVarGenFromBlank
val x = vargen(i)
val t1 : Term = mkTermApp(f , Seq(x, mkTermApp(g, x)))
val t2 : Term = mkTermApp(f , Seq(a, mkTermApp(g, mkTermApp(f, Seq(a,a)))))
preCheck(vargen, t1, t2)
shouldNotMatch(vargen, t1, t2)
}
test("f(x,g(y)) = f(a,g(f(z,z)))", Checked){
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i ->: i))
val g = mkAtom(s.addUninterpreted("g", i ->: i))
val vargen = freshVarGenFromBlank
val x = vargen(i)
val y = vargen(i)
val z = vargen(i)
val t1 : Term = mkTermApp(f , Seq(x, mkTermApp(g, y)))
val t2 : Term = mkTermApp(f , Seq(a, mkTermApp(g, mkTermApp(f, Seq(z,z)))))
preCheck(vargen, t1, t2)
shouldMatch(vargen, t1, t2)
}
test("f(x,g(a)) = f(a,g(z))", Checked){
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i ->: i))
val g = mkAtom(s.addUninterpreted("g", i ->: i))
val vargen = freshVarGenFromBlank
val x = vargen(i)
val z = vargen(i)
val t1 : Term = mkTermApp(f , Seq(x, mkTermApp(g, a)))
val t2 : Term = mkTermApp(f , Seq(a, mkTermApp(g, Seq(z))))
preCheck(vargen, t1, t2)
shouldNotMatch(vargen, t1, t2)
}
test("(f(a) = x) = (f(a) = g(a))", Checked){
import leo.modules.HOLSignature.{=== => EQ}
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i))
val g = mkAtom(s.addUninterpreted("g", i ->: i))
val vargen = freshVarGenFromBlank
val x = vargen(i)
val t1 : Term = EQ(mkTermApp(f , a), x)
val t2 : Term = EQ(mkTermApp(f , a), mkTermApp(g, a))
preCheck(vargen, t1, t2)
shouldMatch(vargen, t1, t2)
}
test("(p(a) = true) = (f(a) = g(a))", Checked){
import leo.modules.HOLSignature.{=== => EQ}
implicit val s = getFreshSignature
val a = mkAtom(s.addUninterpreted("a", i))
val f = mkAtom(s.addUninterpreted("f", i ->: i))
val g = mkAtom(s.addUninterpreted("g", i ->: i))
val p = mkAtom(s.addUninterpreted("p", i ->: o))
val vargen = freshVarGenFromBlank
val t1 : Term = EQ(mkTermApp(p , a), LitTrue)
val t2 : Term = EQ(mkTermApp(f , a), mkTermApp(g, a))
preCheck(vargen, t1, t2)
shouldNotMatch(vargen, t1, t2)
}
// Thanks to Petar for reporting this!
// p ⋅ (λ[$i]. (3:$i -> $o ⋅ (1:$i;⊥));λ[$i]. (3:$i -> $o ⋅ (1:$i;⊥));⊥)
// p ⋅ (λ[$i]. (2:$i -> $o ⋅ (1:$i;⊥));λ[$i]. ($false);⊥)
test("p(X,X) = p(Y,lambda.F))", Checked){
import leo.modules.HOLSignature.{=== => EQ}
implicit val s = getFreshSignature
val p = mkAtom(s.addUninterpreted("p", (i ->: o) ->: (i ->: o) ->: o))
val vargen = freshVarGenFromBlank
val x = vargen(i ->: o)
val y = vargen(i ->: o)
val t1 : Term = p(y, y)
val t2 : Term = p(x, λ(i)(LitFalse))
preCheck(vargen, t1, t2)
shouldNotMatch(vargen, t1, t2)
}
}
|
leoprover/Leo-III
|
src/test/scala/leo/modules/calculus/MatchingTest.scala
|
Scala
|
bsd-3-clause
| 5,817
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import scala.collection.mutable
import org.json4s.JsonAST.{JArray, JString}
import org.json4s.jackson.JsonMethods._
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{GlobalTempView, LocalTempView, PersistedView, ViewType}
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType, SessionCatalog, TemporaryViewRelation}
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, SubqueryExpression, UserDefinedExpression}
import org.apache.spark.sql.catalyst.plans.logical.{AnalysisOnlyCommand, LogicalPlan, Project, View}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.NamespaceHelper
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
import org.apache.spark.sql.types.{MetadataBuilder, StructType}
import org.apache.spark.sql.util.SchemaUtils
/**
* Create or replace a view with given query plan. This command will generate some view-specific
* properties(e.g. view default database, view query output column names) and store them as
* properties in metastore, if we need to create a permanent view.
*
* @param name the name of this view.
* @param userSpecifiedColumns the output column names and optional comments specified by users,
* can be Nil if not specified.
* @param comment the comment of this view.
* @param properties the properties of this view.
* @param originalText the original SQL text of this view, can be None if this view is created via
* Dataset API.
* @param plan the logical plan that represents the view; this is used to generate the logical
* plan for temporary view and the view schema.
* @param allowExisting if true, and if the view already exists, noop; if false, and if the view
* already exists, throws analysis exception.
* @param replace if true, and if the view already exists, updates it; if false, and if the view
* already exists, throws analysis exception.
* @param viewType the expected view type to be created with this command.
* @param isAnalyzed whether this command is analyzed or not.
*/
case class CreateViewCommand(
name: TableIdentifier,
userSpecifiedColumns: Seq[(String, Option[String])],
comment: Option[String],
properties: Map[String, String],
originalText: Option[String],
plan: LogicalPlan,
allowExisting: Boolean,
replace: Boolean,
viewType: ViewType,
isAnalyzed: Boolean = false) extends RunnableCommand with AnalysisOnlyCommand {
import ViewHelper._
override protected def withNewChildrenInternal(
newChildren: IndexedSeq[LogicalPlan]): CreateViewCommand = {
assert(!isAnalyzed)
copy(plan = newChildren.head)
}
// `plan` needs to be analyzed, but shouldn't be optimized so that caching works correctly.
override def childrenToAnalyze: Seq[LogicalPlan] = plan :: Nil
def markAsAnalyzed(): LogicalPlan = copy(isAnalyzed = true)
if (viewType == PersistedView) {
require(originalText.isDefined, "'originalText' must be provided to create permanent view")
}
if (allowExisting && replace) {
throw QueryCompilationErrors.createViewWithBothIfNotExistsAndReplaceError()
}
private def isTemporary = viewType == LocalTempView || viewType == GlobalTempView
// Disallows 'CREATE TEMPORARY VIEW IF NOT EXISTS' to be consistent with 'CREATE TEMPORARY TABLE'
if (allowExisting && isTemporary) {
throw QueryCompilationErrors.defineTempViewWithIfNotExistsError()
}
// Temporary view names should NOT contain database prefix like "database.table"
if (isTemporary && name.database.isDefined) {
val database = name.database.get
throw QueryCompilationErrors.notAllowedToAddDBPrefixForTempViewError(database)
}
override def run(sparkSession: SparkSession): Seq[Row] = {
if (!isAnalyzed) {
throw QueryCompilationErrors.logicalPlanForViewNotAnalyzedError()
}
val analyzedPlan = plan
if (userSpecifiedColumns.nonEmpty &&
userSpecifiedColumns.length != analyzedPlan.output.length) {
throw QueryCompilationErrors.createViewNumColumnsMismatchUserSpecifiedColumnLengthError(
analyzedPlan.output.length, userSpecifiedColumns.length)
}
val catalog = sparkSession.sessionState.catalog
// When creating a permanent view, not allowed to reference temporary objects.
// This should be called after `qe.assertAnalyzed()` (i.e., `child` can be resolved)
verifyTemporaryObjectsNotExists(catalog, isTemporary, name, analyzedPlan)
verifyAutoGeneratedAliasesNotExists(analyzedPlan, isTemporary, name)
if (viewType == LocalTempView) {
val aliasedPlan = aliasPlan(sparkSession, analyzedPlan)
val tableDefinition = createTemporaryViewRelation(
name,
sparkSession,
replace,
catalog.getRawTempView,
originalText,
analyzedPlan,
aliasedPlan)
catalog.createTempView(name.table, tableDefinition, overrideIfExists = replace)
} else if (viewType == GlobalTempView) {
val db = sparkSession.sessionState.conf.getConf(StaticSQLConf.GLOBAL_TEMP_DATABASE)
val viewIdent = TableIdentifier(name.table, Option(db))
val aliasedPlan = aliasPlan(sparkSession, analyzedPlan)
val tableDefinition = createTemporaryViewRelation(
viewIdent,
sparkSession,
replace,
catalog.getRawGlobalTempView,
originalText,
analyzedPlan,
aliasedPlan)
catalog.createGlobalTempView(name.table, tableDefinition, overrideIfExists = replace)
} else if (catalog.tableExists(name)) {
val tableMetadata = catalog.getTableMetadata(name)
if (allowExisting) {
// Handles `CREATE VIEW IF NOT EXISTS v0 AS SELECT ...`. Does nothing when the target view
// already exists.
} else if (tableMetadata.tableType != CatalogTableType.VIEW) {
throw QueryCompilationErrors.tableIsNotViewError(name)
} else if (replace) {
// Detect cyclic view reference on CREATE OR REPLACE VIEW.
val viewIdent = tableMetadata.identifier
checkCyclicViewReference(analyzedPlan, Seq(viewIdent), viewIdent)
// uncache the cached data before replacing an exists view
logDebug(s"Try to uncache ${viewIdent.quotedString} before replacing.")
CommandUtils.uncacheTableOrView(sparkSession, viewIdent.quotedString)
// Handles `CREATE OR REPLACE VIEW v0 AS SELECT ...`
// Nothing we need to retain from the old view, so just drop and create a new one
catalog.dropTable(viewIdent, ignoreIfNotExists = false, purge = false)
catalog.createTable(prepareTable(sparkSession, analyzedPlan), ignoreIfExists = false)
} else {
// Handles `CREATE VIEW v0 AS SELECT ...`. Throws exception when the target view already
// exists.
throw QueryCompilationErrors.viewAlreadyExistsError(name)
}
} else {
// Create the view if it doesn't exist.
catalog.createTable(prepareTable(sparkSession, analyzedPlan), ignoreIfExists = false)
}
Seq.empty[Row]
}
/**
* If `userSpecifiedColumns` is defined, alias the analyzed plan to the user specified columns,
* else return the analyzed plan directly.
*/
private def aliasPlan(session: SparkSession, analyzedPlan: LogicalPlan): LogicalPlan = {
if (userSpecifiedColumns.isEmpty) {
analyzedPlan
} else {
val projectList = analyzedPlan.output.zip(userSpecifiedColumns).map {
case (attr, (colName, None)) => Alias(attr, colName)()
case (attr, (colName, Some(colComment))) =>
val meta = new MetadataBuilder().putString("comment", colComment).build()
Alias(attr, colName)(explicitMetadata = Some(meta))
}
session.sessionState.executePlan(Project(projectList, analyzedPlan)).analyzed
}
}
/**
* Returns a [[CatalogTable]] that can be used to save in the catalog. Generate the view-specific
* properties(e.g. view default database, view query output column names) and store them as
* properties in the CatalogTable, and also creates the proper schema for the view.
*/
private def prepareTable(session: SparkSession, analyzedPlan: LogicalPlan): CatalogTable = {
if (originalText.isEmpty) {
throw QueryCompilationErrors.createPersistedViewFromDatasetAPINotAllowedError()
}
val aliasedSchema = CharVarcharUtils.getRawSchema(
aliasPlan(session, analyzedPlan).schema)
val newProperties = generateViewProperties(
properties, session, analyzedPlan, aliasedSchema.fieldNames)
CatalogTable(
identifier = name,
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = aliasedSchema,
properties = newProperties,
viewOriginalText = originalText,
viewText = originalText,
comment = comment
)
}
}
/**
* Alter a view with given query plan. If the view name contains database prefix, this command will
* alter a permanent view matching the given name, or throw an exception if view not exist. Else,
* this command will try to alter a temporary view first, if view not exist, try permanent view
* next, if still not exist, throw an exception.
*
* @param name the name of this view.
* @param originalText the original SQL text of this view. Note that we can only alter a view by
* SQL API, which means we always have originalText.
* @param query the logical plan that represents the view; this is used to generate the new view
* schema.
*/
case class AlterViewAsCommand(
name: TableIdentifier,
originalText: String,
query: LogicalPlan,
isAnalyzed: Boolean = false) extends RunnableCommand with AnalysisOnlyCommand {
import ViewHelper._
override protected def withNewChildrenInternal(
newChildren: IndexedSeq[LogicalPlan]): AlterViewAsCommand = {
assert(!isAnalyzed)
copy(query = newChildren.head)
}
override def childrenToAnalyze: Seq[LogicalPlan] = query :: Nil
def markAsAnalyzed(): LogicalPlan = copy(isAnalyzed = true)
override def run(session: SparkSession): Seq[Row] = {
val isTemporary = session.sessionState.catalog.isTempView(name)
verifyTemporaryObjectsNotExists(session.sessionState.catalog, isTemporary, name, query)
verifyAutoGeneratedAliasesNotExists(query, isTemporary, name)
if (isTemporary) {
alterTemporaryView(session, query)
} else {
alterPermanentView(session, query)
}
Seq.empty[Row]
}
private def alterTemporaryView(session: SparkSession, analyzedPlan: LogicalPlan): Unit = {
val catalog = session.sessionState.catalog
val getRawTempView: String => Option[TemporaryViewRelation] = if (name.database.isEmpty) {
catalog.getRawTempView
} else {
catalog.getRawGlobalTempView
}
val tableDefinition = createTemporaryViewRelation(
name,
session,
replace = true,
getRawTempView,
Some(originalText),
analyzedPlan,
aliasedPlan = analyzedPlan)
session.sessionState.catalog.alterTempViewDefinition(name, tableDefinition)
}
private def alterPermanentView(session: SparkSession, analyzedPlan: LogicalPlan): Unit = {
val viewMeta = session.sessionState.catalog.getTableMetadata(name)
// Detect cyclic view reference on ALTER VIEW.
val viewIdent = viewMeta.identifier
checkCyclicViewReference(analyzedPlan, Seq(viewIdent), viewIdent)
logDebug(s"Try to uncache ${viewIdent.quotedString} before replacing.")
CommandUtils.uncacheTableOrView(session, viewIdent.quotedString)
val newProperties = generateViewProperties(
viewMeta.properties, session, analyzedPlan, analyzedPlan.schema.fieldNames)
val newSchema = CharVarcharUtils.getRawSchema(analyzedPlan.schema)
val updatedViewMeta = viewMeta.copy(
schema = newSchema,
properties = newProperties,
viewOriginalText = Some(originalText),
viewText = Some(originalText))
session.sessionState.catalog.alterTable(updatedViewMeta)
}
}
/**
* A command for users to get views in the given database.
* If a databaseName is not given, the current database will be used.
* The syntax of using this command in SQL is:
* {{{
* SHOW VIEWS [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
* }}}
*/
case class ShowViewsCommand(
databaseName: String,
tableIdentifierPattern: Option[String],
override val output: Seq[Attribute]) extends LeafRunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
// Show the information of views.
val views = tableIdentifierPattern.map(catalog.listViews(databaseName, _))
.getOrElse(catalog.listViews(databaseName, "*"))
views.map { tableIdent =>
val namespace = tableIdent.database.toArray.quoted
val tableName = tableIdent.table
val isTemp = catalog.isTempView(tableIdent)
Row(namespace, tableName, isTemp)
}
}
}
object ViewHelper extends SQLConfHelper with Logging {
private val configPrefixDenyList = Seq(
SQLConf.MAX_NESTED_VIEW_DEPTH.key,
"spark.sql.optimizer.",
"spark.sql.codegen.",
"spark.sql.execution.",
"spark.sql.shuffle.",
"spark.sql.adaptive.",
// ignore optimization configs used in `RelationConversions`
"spark.sql.hive.convertMetastoreParquet",
"spark.sql.hive.convertMetastoreOrc",
"spark.sql.hive.convertInsertingPartitionedTable",
"spark.sql.hive.convertMetastoreCtas",
SQLConf.ADDITIONAL_REMOTE_REPOSITORIES.key)
private val configAllowList = Seq(
SQLConf.DISABLE_HINTS.key
)
/**
* Capture view config either of:
* 1. exists in allowList
* 2. do not exists in denyList
*/
private def shouldCaptureConfig(key: String): Boolean = {
configAllowList.exists(prefix => key.equals(prefix)) ||
!configPrefixDenyList.exists(prefix => key.startsWith(prefix))
}
import CatalogTable._
/**
* Generate the view query output column names in `properties`.
*/
private def generateQueryColumnNames(columns: Seq[String]): Map[String, String] = {
val props = new mutable.HashMap[String, String]
if (columns.nonEmpty) {
props.put(VIEW_QUERY_OUTPUT_NUM_COLUMNS, columns.length.toString)
columns.zipWithIndex.foreach { case (colName, index) =>
props.put(s"$VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX$index", colName)
}
}
props.toMap
}
/**
* Remove the view query output column names in `properties`.
*/
private def removeQueryColumnNames(properties: Map[String, String]): Map[String, String] = {
// We can't use `filterKeys` here, as the map returned by `filterKeys` is not serializable,
// while `CatalogTable` should be serializable.
properties.filterNot { case (key, _) =>
key.startsWith(VIEW_QUERY_OUTPUT_PREFIX)
}
}
/**
* Convert the view SQL configs to `properties`.
*/
private def sqlConfigsToProps(conf: SQLConf): Map[String, String] = {
val modifiedConfs = conf.getAllConfs.filter { case (k, _) =>
conf.isModifiable(k) && shouldCaptureConfig(k)
}
val props = new mutable.HashMap[String, String]
for ((key, value) <- modifiedConfs) {
props.put(s"$VIEW_SQL_CONFIG_PREFIX$key", value)
}
props.toMap
}
/**
* Remove the view SQL configs in `properties`.
*/
private def removeSQLConfigs(properties: Map[String, String]): Map[String, String] = {
// We can't use `filterKeys` here, as the map returned by `filterKeys` is not serializable,
// while `CatalogTable` should be serializable.
properties.filterNot { case (key, _) =>
key.startsWith(VIEW_SQL_CONFIG_PREFIX)
}
}
/**
* Convert the temporary object names to `properties`.
*/
private def referredTempNamesToProps(
viewNames: Seq[Seq[String]], functionsNames: Seq[String]): Map[String, String] = {
val viewNamesJson =
JArray(viewNames.map(nameParts => JArray(nameParts.map(JString).toList)).toList)
val functionsNamesJson = JArray(functionsNames.map(JString).toList)
val props = new mutable.HashMap[String, String]
props.put(VIEW_REFERRED_TEMP_VIEW_NAMES, compact(render(viewNamesJson)))
props.put(VIEW_REFERRED_TEMP_FUNCTION_NAMES, compact(render(functionsNamesJson)))
props.toMap
}
/**
* Remove the temporary object names in `properties`.
*/
private def removeReferredTempNames(properties: Map[String, String]): Map[String, String] = {
// We can't use `filterKeys` here, as the map returned by `filterKeys` is not serializable,
// while `CatalogTable` should be serializable.
properties.filterNot { case (key, _) =>
key.startsWith(VIEW_REFERRED_TEMP_VIEW_NAMES) ||
key.startsWith(VIEW_REFERRED_TEMP_FUNCTION_NAMES)
}
}
/**
* Generate the view properties in CatalogTable, including:
* 1. view default database that is used to provide the default database name on view resolution.
* 2. the output column names of the query that creates a view, this is used to map the output of
* the view child to the view output during view resolution.
* 3. the SQL configs when creating the view.
*
* @param properties the `properties` in CatalogTable.
* @param session the spark session.
* @param analyzedPlan the analyzed logical plan that represents the child of a view.
* @return new view properties including view default database and query column names properties.
*/
def generateViewProperties(
properties: Map[String, String],
session: SparkSession,
analyzedPlan: LogicalPlan,
fieldNames: Array[String],
tempViewNames: Seq[Seq[String]] = Seq.empty,
tempFunctionNames: Seq[String] = Seq.empty): Map[String, String] = {
// for createViewCommand queryOutput may be different from fieldNames
val queryOutput = analyzedPlan.schema.fieldNames
val conf = session.sessionState.conf
// Generate the query column names, throw an AnalysisException if there exists duplicate column
// names.
SchemaUtils.checkColumnNameDuplication(
fieldNames, "in the view definition", conf.resolver)
// Generate the view default catalog and namespace, as well as captured SQL configs.
val manager = session.sessionState.catalogManager
removeReferredTempNames(removeSQLConfigs(removeQueryColumnNames(properties))) ++
catalogAndNamespaceToProps(manager.currentCatalog.name, manager.currentNamespace) ++
sqlConfigsToProps(conf) ++
generateQueryColumnNames(queryOutput) ++
referredTempNamesToProps(tempViewNames, tempFunctionNames)
}
/**
* Recursively search the logical plan to detect cyclic view references, throw an
* AnalysisException if cycle detected.
*
* A cyclic view reference is a cycle of reference dependencies, for example, if the following
* statements are executed:
* CREATE VIEW testView AS SELECT id FROM tbl
* CREATE VIEW testView2 AS SELECT id FROM testView
* ALTER VIEW testView AS SELECT * FROM testView2
* The view `testView` references `testView2`, and `testView2` also references `testView`,
* therefore a reference cycle (testView -> testView2 -> testView) exists.
*
* @param plan the logical plan we detect cyclic view references from.
* @param path the path between the altered view and current node.
* @param viewIdent the table identifier of the altered view, we compare two views by the
* `desc.identifier`.
*/
def checkCyclicViewReference(
plan: LogicalPlan,
path: Seq[TableIdentifier],
viewIdent: TableIdentifier): Unit = {
plan match {
case v: View =>
val ident = v.desc.identifier
val newPath = path :+ ident
// If the table identifier equals to the `viewIdent`, current view node is the same with
// the altered view. We detect a view reference cycle, should throw an AnalysisException.
if (ident == viewIdent) {
throw QueryCompilationErrors.recursiveViewDetectedError(viewIdent, newPath)
} else {
v.children.foreach { child =>
checkCyclicViewReference(child, newPath, viewIdent)
}
}
case _ =>
plan.children.foreach(child => checkCyclicViewReference(child, path, viewIdent))
}
// Detect cyclic references from subqueries.
plan.expressions.foreach { expr =>
expr match {
case s: SubqueryExpression =>
checkCyclicViewReference(s.plan, path, viewIdent)
case _ => // Do nothing.
}
}
}
def verifyAutoGeneratedAliasesNotExists(
child: LogicalPlan, isTemporary: Boolean, name: TableIdentifier): Unit = {
if (!isTemporary && !conf.allowAutoGeneratedAliasForView) {
child.output.foreach { attr =>
if (attr.metadata.contains("__autoGeneratedAlias")) {
throw QueryCompilationErrors
.notAllowedToCreatePermanentViewWithoutAssigningAliasForExpressionError(name,
attr.name)
}
}
}
}
/**
* Permanent views are not allowed to reference temp objects, including temp function and views
*/
def verifyTemporaryObjectsNotExists(
catalog: SessionCatalog,
isTemporary: Boolean,
name: TableIdentifier,
child: LogicalPlan): Unit = {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
if (!isTemporary) {
val (tempViews, tempFunctions) = collectTemporaryObjects(catalog, child)
tempViews.foreach { nameParts =>
throw QueryCompilationErrors.notAllowedToCreatePermanentViewByReferencingTempViewError(
name, nameParts.quoted)
}
tempFunctions.foreach { funcName =>
throw QueryCompilationErrors.notAllowedToCreatePermanentViewByReferencingTempFuncError(
name, funcName)
}
}
}
/**
* Collect all temporary views and functions and return the identifiers separately.
*/
private def collectTemporaryObjects(
catalog: SessionCatalog, child: LogicalPlan): (Seq[Seq[String]], Seq[String]) = {
def collectTempViews(child: LogicalPlan): Seq[Seq[String]] = {
child.flatMap {
case view: View if view.isTempView =>
val ident = view.desc.identifier
Seq(ident.database.toSeq :+ ident.table)
case plan => plan.expressions.flatMap(_.flatMap {
case e: SubqueryExpression => collectTempViews(e.plan)
case _ => Seq.empty
})
}.distinct
}
def collectTempFunctions(child: LogicalPlan): Seq[String] = {
child.flatMap {
case plan =>
plan.expressions.flatMap(_.flatMap {
case e: SubqueryExpression => collectTempFunctions(e.plan)
case e: UserDefinedExpression
if catalog.isTemporaryFunction(FunctionIdentifier(e.name)) =>
Seq(e.name)
case _ => Seq.empty
})
}.distinct
}
(collectTempViews(child), collectTempFunctions(child))
}
/**
* Returns a [[TemporaryViewRelation]] that contains information about a temporary view
* to create, given an analyzed plan of the view. If a temp view is to be replaced and it is
* cached, it will be uncached before being replaced.
*
* @param name the name of the temporary view to create/replace.
* @param session the spark session.
* @param replace if true and the existing view is cached, it will be uncached.
* @param getRawTempView the function that returns an optional raw plan of the local or
* global temporary view.
* @param originalText the original SQL text of this view, can be None if this view is created via
* Dataset API or spark.sql.legacy.storeAnalyzedPlanForView is set to true.
* @param analyzedPlan the logical plan that represents the view; this is used to generate the
* logical plan for temporary view and the view schema.
* @param aliasedPlan the aliased logical plan based on the user specified columns. If there are
* no user specified plans, this should be same as `analyzedPlan`.
*/
def createTemporaryViewRelation(
name: TableIdentifier,
session: SparkSession,
replace: Boolean,
getRawTempView: String => Option[TemporaryViewRelation],
originalText: Option[String],
analyzedPlan: LogicalPlan,
aliasedPlan: LogicalPlan): TemporaryViewRelation = {
val uncache = getRawTempView(name.table).map { r =>
needsToUncache(r, aliasedPlan)
}.getOrElse(false)
if (replace && uncache) {
logDebug(s"Try to uncache ${name.quotedString} before replacing.")
checkCyclicViewReference(analyzedPlan, Seq(name), name)
CommandUtils.uncacheTableOrView(session, name.quotedString)
}
if (!conf.storeAnalyzedPlanForView && originalText.nonEmpty) {
TemporaryViewRelation(
prepareTemporaryView(
name,
session,
analyzedPlan,
aliasedPlan.schema,
originalText.get))
} else {
TemporaryViewRelation(
prepareTemporaryViewStoringAnalyzedPlan(name, aliasedPlan),
Some(aliasedPlan))
}
}
/**
* Checks if need to uncache the temp view being replaced.
*/
private def needsToUncache(
rawTempView: TemporaryViewRelation,
aliasedPlan: LogicalPlan): Boolean = rawTempView.plan match {
// Do not need to uncache if the to-be-replaced temp view plan and the new plan are the
// same-result plans.
case Some(p) => !p.sameResult(aliasedPlan)
// If TemporaryViewRelation doesn't store the analyzed view, always uncache.
case None => true
}
/**
* Returns a [[CatalogTable]] that contains information for temporary view.
* Generate the view-specific properties(e.g. view default database, view query output
* column names) and store them as properties in the CatalogTable, and also creates
* the proper schema for the view.
*/
private def prepareTemporaryView(
viewName: TableIdentifier,
session: SparkSession,
analyzedPlan: LogicalPlan,
viewSchema: StructType,
originalText: String): CatalogTable = {
val catalog = session.sessionState.catalog
val (tempViews, tempFunctions) = collectTemporaryObjects(catalog, analyzedPlan)
// TBLPROPERTIES is not allowed for temporary view, so we don't use it for
// generating temporary view properties
val newProperties = generateViewProperties(
Map.empty, session, analyzedPlan, viewSchema.fieldNames, tempViews, tempFunctions)
CatalogTable(
identifier = viewName,
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = viewSchema,
viewText = Some(originalText),
properties = newProperties)
}
/**
* Returns a [[CatalogTable]] that contains information for the temporary view storing
* an analyzed plan.
*/
private def prepareTemporaryViewStoringAnalyzedPlan(
viewName: TableIdentifier,
analyzedPlan: LogicalPlan): CatalogTable = {
CatalogTable(
identifier = viewName,
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = analyzedPlan.schema,
properties = Map((VIEW_STORING_ANALYZED_PLAN, "true")))
}
}
|
chuckchen/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
|
Scala
|
apache-2.0
| 28,510
|
package spark.storage
import java.util.LinkedHashMap
import java.util.concurrent.ArrayBlockingQueue
import spark.{SizeEstimator, Utils}
import java.nio.ByteBuffer
import collection.mutable.ArrayBuffer
/**
* Stores blocks in memory, either as ArrayBuffers of deserialized Java objects or as
* serialized ByteBuffers.
*/
private class MemoryStore(blockManager: BlockManager, maxMemory: Long)
extends BlockStore(blockManager) {
case class Entry(value: Any, size: Long, deserialized: Boolean, var dropPending: Boolean = false)
private val entries = new LinkedHashMap[String, Entry](32, 0.75f, true)
private var currentMemory = 0L
// Object used to ensure that only one thread is putting blocks and if necessary, dropping
// blocks from the memory store.
private val putLock = new Object()
logInfo("MemoryStore started with capacity %s.".format(Utils.memoryBytesToString(maxMemory)))
def freeMemory: Long = maxMemory - currentMemory
override def getSize(blockId: String): Long = {
entries.synchronized {
entries.get(blockId).size
}
}
override def putBytes(blockId: String, _bytes: ByteBuffer, level: StorageLevel) {
// Work on a duplicate - since the original input might be used elsewhere.
val bytes = _bytes.duplicate()
bytes.rewind()
if (level.deserialized) {
val values = blockManager.dataDeserialize(blockId, bytes)
val elements = new ArrayBuffer[Any]
elements ++= values
val sizeEstimate = SizeEstimator.estimate(elements.asInstanceOf[AnyRef])
tryToPut(blockId, elements, sizeEstimate, true)
} else {
tryToPut(blockId, bytes, bytes.limit, false)
}
}
override def putValues(
blockId: String,
values: ArrayBuffer[Any],
level: StorageLevel,
returnValues: Boolean)
: PutResult = {
if (level.deserialized) {
val sizeEstimate = SizeEstimator.estimate(values.asInstanceOf[AnyRef])
tryToPut(blockId, values, sizeEstimate, true)
PutResult(sizeEstimate, Left(values.iterator))
} else {
val bytes = blockManager.dataSerialize(blockId, values.iterator)
tryToPut(blockId, bytes, bytes.limit, false)
PutResult(bytes.limit(), Right(bytes.duplicate()))
}
}
override def getBytes(blockId: String): Option[ByteBuffer] = {
val entry = entries.synchronized {
entries.get(blockId)
}
if (entry == null) {
None
} else if (entry.deserialized) {
Some(blockManager.dataSerialize(blockId, entry.value.asInstanceOf[ArrayBuffer[Any]].iterator))
} else {
Some(entry.value.asInstanceOf[ByteBuffer].duplicate()) // Doesn't actually copy the data
}
}
override def getValues(blockId: String): Option[Iterator[Any]] = {
val entry = entries.synchronized {
entries.get(blockId)
}
if (entry == null) {
None
} else if (entry.deserialized) {
Some(entry.value.asInstanceOf[ArrayBuffer[Any]].iterator)
} else {
val buffer = entry.value.asInstanceOf[ByteBuffer].duplicate() // Doesn't actually copy data
Some(blockManager.dataDeserialize(blockId, buffer))
}
}
override def remove(blockId: String): Boolean = {
entries.synchronized {
val entry = entries.get(blockId)
if (entry != null) {
entries.remove(blockId)
currentMemory -= entry.size
logInfo("Block %s of size %d dropped from memory (free %d)".format(
blockId, entry.size, freeMemory))
true
} else {
false
}
}
}
override def clear() {
entries.synchronized {
entries.clear()
}
logInfo("MemoryStore cleared")
}
/**
* Return the RDD ID that a given block ID is from, or null if it is not an RDD block.
*/
private def getRddId(blockId: String): String = {
if (blockId.startsWith("rdd_")) {
blockId.split('_')(1)
} else {
null
}
}
/**
* Try to put in a set of values, if we can free up enough space. The value should either be
* an ArrayBuffer if deserialized is true or a ByteBuffer otherwise. Its (possibly estimated)
* size must also be passed by the caller.
*
* Locks on the object putLock to ensure that all the put requests and its associated block
* dropping is done by only on thread at a time. Otherwise while one thread is dropping
* blocks to free memory for one block, another thread may use up the freed space for
* another block.
*/
private def tryToPut(blockId: String, value: Any, size: Long, deserialized: Boolean): Boolean = {
// TODO: Its possible to optimize the locking by locking entries only when selecting blocks
// to be dropped. Once the to-be-dropped blocks have been selected, and lock on entries has been
// released, it must be ensured that those to-be-dropped blocks are not double counted for
// freeing up more space for another block that needs to be put. Only then the actually dropping
// of blocks (and writing to disk if necessary) can proceed in parallel.
putLock.synchronized {
if (ensureFreeSpace(blockId, size)) {
val entry = new Entry(value, size, deserialized)
entries.synchronized { entries.put(blockId, entry) }
currentMemory += size
if (deserialized) {
logInfo("Block %s stored as values to memory (estimated size %s, free %s)".format(
blockId, Utils.memoryBytesToString(size), Utils.memoryBytesToString(freeMemory)))
} else {
logInfo("Block %s stored as bytes to memory (size %s, free %s)".format(
blockId, Utils.memoryBytesToString(size), Utils.memoryBytesToString(freeMemory)))
}
true
} else {
// Tell the block manager that we couldn't put it in memory so that it can drop it to
// disk if the block allows disk storage.
val data = if (deserialized) {
Left(value.asInstanceOf[ArrayBuffer[Any]])
} else {
Right(value.asInstanceOf[ByteBuffer].duplicate())
}
blockManager.dropFromMemory(blockId, data)
false
}
}
}
/**
* Tries to free up a given amount of space to store a particular block, but can fail and return
* false if either the block is bigger than our memory or it would require replacing another
* block from the same RDD (which leads to a wasteful cyclic replacement pattern for RDDs that
* don't fit into memory that we want to avoid).
*
* Assumes that a lock is held by the caller to ensure only one thread is dropping blocks.
* Otherwise, the freed space may fill up before the caller puts in their new value.
*/
private def ensureFreeSpace(blockIdToAdd: String, space: Long): Boolean = {
logInfo("ensureFreeSpace(%d) called with curMem=%d, maxMem=%d".format(
space, currentMemory, maxMemory))
if (space > maxMemory) {
logInfo("Will not store " + blockIdToAdd + " as it is larger than our memory limit")
return false
}
if (maxMemory - currentMemory < space) {
val rddToAdd = getRddId(blockIdToAdd)
val selectedBlocks = new ArrayBuffer[String]()
var selectedMemory = 0L
// This is synchronized to ensure that the set of entries is not changed
// (because of getValue or getBytes) while traversing the iterator, as that
// can lead to exceptions.
entries.synchronized {
val iterator = entries.entrySet().iterator()
while (maxMemory - (currentMemory - selectedMemory) < space && iterator.hasNext) {
val pair = iterator.next()
val blockId = pair.getKey
if (rddToAdd != null && rddToAdd == getRddId(blockId)) {
logInfo("Will not store " + blockIdToAdd + " as it would require dropping another " +
"block from the same RDD")
return false
}
selectedBlocks += blockId
selectedMemory += pair.getValue.size
}
}
if (maxMemory - (currentMemory - selectedMemory) >= space) {
logInfo(selectedBlocks.size + " blocks selected for dropping")
for (blockId <- selectedBlocks) {
val entry = entries.synchronized { entries.get(blockId) }
// This should never be null as only one thread should be dropping
// blocks and removing entries. However the check is still here for
// future safety.
if (entry != null) {
val data = if (entry.deserialized) {
Left(entry.value.asInstanceOf[ArrayBuffer[Any]])
} else {
Right(entry.value.asInstanceOf[ByteBuffer].duplicate())
}
blockManager.dropFromMemory(blockId, data)
}
}
return true
} else {
return false
}
}
return true
}
override def contains(blockId: String): Boolean = {
entries.synchronized { entries.containsKey(blockId) }
}
}
|
baeeq/incubator-spark
|
core/src/main/scala/spark/storage/MemoryStore.scala
|
Scala
|
bsd-3-clause
| 8,885
|
package parse
import models.Coord
import play.api.libs.functional.syntax._
import play.api.libs.json._
/** GoogleJsonRead
* @author Stephen Johnson
*/
object GoogleJsonRead {
implicit val coordReads: Reads[Coord] =
((JsPath \\ "lat").read[Double] and (JsPath \\ "lng").read[Double]) (Coord.apply _)
}
|
peregin55/travel
|
app/parse/GoogleJsonRead.scala
|
Scala
|
gpl-3.0
| 310
|
package de.tu_berlin.impro3.spark.spatio_temporal_dynamics.metrics
import de.tu_berlin.impro3.spark.spatio_temporal_dynamics._
import geo.Location
import model.HashTag
import org.apache.spark.rdd.RDD
/** The spread metric. */
object Spread {
def apply(set: OccurSet): Double = {
val occurs = set.values.toStream
val total = occurs.map { _.size }.sum
val locations = occurs.flatMap { _.map { _.location } }
val midpoint = Location.midpoint(locations)
val distance = locations.map { _ <-> midpoint }.sum
distance / total
}
def apply(cluster: Cluster): Metric1[Double] =
cluster.measure(apply)
def byText(tags: RDD[HashTag]): Metric1[Double] =
measureByText(tags) { apply }
}
|
joroKr21/spatio-temporal-dynamics
|
impro3-ws14-spark/src/main/scala/de/tu_berlin/impro3/spark/spatio_temporal_dynamics/metrics/Spread.scala
|
Scala
|
apache-2.0
| 730
|
package controllers.s_self_employment
import play.api.Play._
import play.api.mvc._
import models.view.CachedClaim
import models.domain._
import models.view.Navigable
import models.view.ClaimHandling.ClaimResult
import play.api.i18n._
object SelfEmployment extends Controller with CachedClaim with Navigable with I18nSupport {
override val messagesApi: MessagesApi = current.injector.instanceOf[MMessages]
def completed = claimingWithCheck {implicit claim => implicit request => implicit request2lang =>
redirect
}
def completedSubmit = claimingWithCheck { implicit claim => implicit request => implicit request2lang =>
redirect
}
def presentConditionally(c: => ClaimResult)(implicit claim: Claim, request: Request[AnyContent]): ClaimResult = {
val beenInPreview = claim.checkYAnswers.previouslySavedClaim.isDefined
val emp = claim.questionGroup[YourIncomes].getOrElse(YourIncomes())
//Lazy because they are going to be lazily evaluated on usage only if we've been in preview.
lazy val previousEmp = claim.checkYAnswers.previouslySavedClaim.get.questionGroup[YourIncomes]
lazy val previousSEValue = previousEmp.get.beenSelfEmployedSince1WeekBeforeClaim
val SEValue = emp.beenSelfEmployedSince1WeekBeforeClaim
//This part of the condition has been removed due to review on user story about enabling changing your-income/employment/self-employment on check your answers
if (models.domain.SelfEmployment.visible ) c //&& (!beenInPreview || beenInPreview && SEValue == yes && previousSEValue == no)) c
else redirect
}
private def redirect()(implicit claim: Claim, lang: Lang, messages: Messages, request: Request[AnyContent]): ClaimResult =
claim -> Redirect(controllers.s_employment.routes.GEmploymentAdditionalInfo.present())
}
|
Department-for-Work-and-Pensions/ClaimCapture
|
c3/app/controllers/s_self_employment/SelfEmployment.scala
|
Scala
|
mit
| 1,839
|
package com.socrata.http.server.routing
import org.scalatest.{MustMatchers, FunSuite}
import scala.collection.LinearSeq
class PathTreeBuilderTest extends FunSuite with MustMatchers {
// It would be nice to just make sure that the builders produce the correct pathtrees.
// Unfortunately, they introduce an anonymous function to do the typecasting from the
// List[Any] that the pathtree produces to the types that the functions expect, so
// we can't just use == to compare automatically and manually built trees. Thus,
// check the behavior instead.
test("No patterns -- deprecated flexmatch marker") {
val pt = PathTreeBuilder[String]("/a/b") { "fix" } merge PathTreeBuilder[String]("/a/b/*") { xs:LinearSeq[String] => ("flex" +: xs).mkString(",") }
pt(List("a", "b")) must equal (Some("fix"))
pt(List("a", "b", "c")) must equal (Some("flex,c"))
pt(List("q", "w")) must equal (None)
}
test("No patterns") {
val pt = PathTreeBuilder[String]("/a/b") { "fix" } merge PathTreeBuilder[String]("/a/b/+") { xs:LinearSeq[String] => ("flex" +: xs).mkString(",") }
pt(List("a", "b")) must equal (Some("fix"))
pt(List("a", "b", "c")) must equal (Some("flex,c"))
pt(List("q", "w")) must equal (None)
}
test("Patterns") {
val pt = PathTreeBuilder[String]("/a/b") { "fix" } merge PathTreeBuilder[String]("/a/{Int}") { i:Int => "pat " + i }
pt(List("a", "b")) must equal (Some("fix"))
pt(List("a", "42")) must equal (Some("pat 42"))
pt(List("a", "c")) must equal (None)
}
test("? alias for String") {
val pt = PathTreeBuilder[String]("/a/?") (identity[String] _)
pt(List("a", "b")) must equal (Some("b"))
pt(List("a", "42")) must equal (Some("42"))
pt(List("a", "")) must equal (Some(""))
pt(List("a")) must equal (None)
}
test("empty") {
val pt = PathTreeBuilder[String]("/") { "x" }
pt(List("")) must equal (Some("x"))
}
test("initial flexmatch -- deprecated marker") {
val pt = PathTreeBuilder[Seq[String]]("/*")(identity[Seq[String]] _)
pt(List("q", "w")) must equal (Some(List("q","w")))
pt(List("")) must equal (Some(List("")))
pt(Nil) must equal (None)
}
test("initial flexmatch") {
val pt = PathTreeBuilder[Seq[String]]("/+")(identity[Seq[String]] _)
pt(List("q", "w")) must equal (Some(List("q","w")))
pt(List("")) must equal (Some(List("")))
pt(Nil) must equal (None)
}
test("empty component at the end") {
val pt = PathTreeBuilder[String]("/a/") { "a" }
pt(List("a")) must equal (None)
pt(List("a","")) must equal (Some("a"))
}
test("empty component in the middle") {
val pt = PathTreeBuilder[String]("/a//b") { "ab" }
pt(List("a","b")) must equal (None)
pt(List("a","","b")) must equal (Some("ab"))
}
test("optionally typed component -- default matcher") {
val pt = PathTreeBuilder[OptionallyTypedPathComponent[String]]("/{{String}}")(identity[OptionallyTypedPathComponent[String]] _)
pt(List("a")) must equal (Some(OptionallyTypedPathComponent("a", None)))
pt(List("a.txt")) must equal (Some(OptionallyTypedPathComponent("a", Some("txt"))))
pt(List("a.")) must equal (Some(OptionallyTypedPathComponent("a", Some(""))))
}
test("explicitly optionally typed component -- default matcher") {
val pt = PathTreeBuilder[OptionallyTypedPathComponent[String]]("/{{String:}}")(identity[OptionallyTypedPathComponent[String]] _)
pt(List("a")) must equal (Some(OptionallyTypedPathComponent("a", None)))
pt(List("a.txt")) must equal (Some(OptionallyTypedPathComponent("a", Some("txt"))))
pt(List("a.")) must equal (Some(OptionallyTypedPathComponent("a", Some(""))))
}
test("optionally typed component -- named function") {
def r(s: String) = "[a-z]+".r.pattern.matcher(s).matches
val pt = PathTreeBuilder[OptionallyTypedPathComponent[String]]("/{{String:r}}")(identity[OptionallyTypedPathComponent[String]] _)
pt(List("a")) must equal (Some(OptionallyTypedPathComponent("a", None)))
pt(List("a.txt")) must equal (Some(OptionallyTypedPathComponent("a", Some("txt"))))
pt(List("a.")) must equal (None)
pt(List("a.7")) must equal (None)
}
test("typed component -- default matcher") {
val pt = PathTreeBuilder[TypedPathComponent[String]]("/{{String!}}")(identity[TypedPathComponent[String]] _)
pt(List("a")) must equal (None)
pt(List("a.txt")) must equal (Some(TypedPathComponent("a", "txt")))
pt(List("a.")) must equal (Some(TypedPathComponent("a", "")))
}
test("typed component -- named function") {
def r(s: String) = "[a-z]+".r.pattern.matcher(s).matches
val pt = PathTreeBuilder[TypedPathComponent[String]]("/{{String!r}}")(identity[TypedPathComponent[String]] _)
pt(List("a")) must equal (None)
pt(List("a.txt")) must equal (Some(TypedPathComponent("a", "txt")))
pt(List("a.")) must equal (None)
pt(List("a.7")) must equal (None)
}
}
|
socrata-platform/socrata-http
|
socrata-http-server/src/test/scala/com/socrata/http/server/routing/PathTreeBuilderTest.scala
|
Scala
|
apache-2.0
| 4,933
|
package coder.simon.slots.common
object Utils {
type Substitute = (Symbol, Symbol) => Option[Symbol]
def toSymbolArray(reelsConfig: Array[Array[Int]]) = reelsConfig.map(l => l.map(Symbol))
def toSymbolPayTable(rawPayTable: Map[Int, Map[Int, Int]]) = rawPayTable.map { case (s, m) => (Symbol(s), m) }.toMap
def toLinesDef1(rawLines: Map[String, List[Int]]) = rawLines.mapValues(line => line.zipWithIndex.map { case (y, x) => Cell(x, y) })
def toLinesDef2(rawLines: Map[String, List[Int]]) = rawLines.mapValues(line => line.map { x => Cell(x, 0) })
def substSimple(first: Symbol, second: Symbol): Option[Symbol] = if (first == second) Some(second) else None
def substWild(subst: Substitute)(first: Symbol, second: Symbol) = substSimple(first, second).orElse(subst(first, second)).orElse(subst(second, first))
def parseLine(subst: Substitute)(symbols: List[Symbol]) = {
def loop(ss: List[Symbol], rep: Symbol, count: Int): (Symbol, Int) = ss match {
case head :: tail => subst(rep, head).fold((rep, count))(r => loop(tail, r, count + 1))
case Nil => (rep, count)
}
loop(symbols.tail, symbols.head, 1)
}
def parseScatter(isScatter: Symbol => Boolean)(reelArray: ReelArray) =
reelArray.foldLeft(Map.empty[Symbol, Int]) {
(map, reel) => reel.foldLeft(map)((acc, s) => if (isScatter(s)) acc.updated(s, acc.getOrElse(s, 0) + 1) else acc)
}
def parseFloat(symbol: Symbol)(symbols: List[Symbol]) = (symbol, symbols.count(_ == symbol))
def getMul(payTable: Map[Symbol, Map[Int, Int]])(symbol: Symbol, count: Int) = payTable.get(symbol).flatMap(m2 => m2.get(count))
}
|
erlangxk/fpscala
|
src/main/scala/coder/simon/slots/common/Utils.scala
|
Scala
|
mit
| 1,639
|
package sampler.empirical
import org.scalatest.FreeSpec
class EmpiricalTableTest extends FreeSpec {
"Observation count" in pending
"Probability table" in pending
}
|
tearne/Sampler
|
sampler-core/src/test/scala/sampler/empirical/EmpiricalTableTest.scala
|
Scala
|
apache-2.0
| 172
|
package com.twitter.finagle.http.codec
/**
* ChannelBufferUsageTracker tracks the channel buffer used by outstanding requests. An exception
* will be thrown if the total size exceeds a limit.
* ChannelBufferManager uses ChannelBufferUsageTracker.
*/
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import org.jboss.netty.channel._
import org.jboss.netty.buffer.ChannelBuffer
import com.twitter.finagle.ChannelBufferUsageException
import com.twitter.util.StorageUnit
import com.twitter.conversions.storage._
class ChannelBufferUsageTracker(
limit: StorageUnit,
statsReceiver: StatsReceiver = NullStatsReceiver
) {
private[this] object state {
var currentUsage = 0L
var maxUsage = 0L
var usageLimit = limit
}
// It is probably not necessary to use synchronized methods here. We
// can change this if there is a performance problem.
private[this] val currentUsageStat =
statsReceiver.addGauge("channel_buffer_current_usage") { currentUsage.inBytes }
private[this] val maxUsageStat =
statsReceiver.addGauge("channel_buffer_max_usage") { maxUsage.inBytes }
def currentUsage: StorageUnit = synchronized { state.currentUsage.bytes }
def maxUsage: StorageUnit = synchronized { state.maxUsage.bytes }
def usageLimit(): StorageUnit = synchronized { state.usageLimit }
def setUsageLimit(limit: StorageUnit) = synchronized { state.usageLimit = limit }
def increase(size: Long) = synchronized {
if (state.currentUsage + size > state.usageLimit.inBytes) {
throw new ChannelBufferUsageException(
"Channel buffer usage exceeded limit ("
+ currentUsage + ", " + size + " vs. " + usageLimit + ")")
} else {
state.currentUsage += size
if (currentUsage > maxUsage)
state.maxUsage = state.currentUsage
}
}
def decrease(size: Long) = synchronized {
if (state.currentUsage < size) {
throw new ChannelBufferUsageException(
"invalid ChannelBufferUsageTracker decrease operation ("
+ size + " vs. " + currentUsage + ")")
} else {
state.currentUsage -= size
}
}
}
class ChannelBufferManager(usageTracker: ChannelBufferUsageTracker)
extends SimpleChannelHandler
{
private[this] var bufferUsage = 0L
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
e.getMessage match {
case buffer: ChannelBuffer => increaseBufferUsage(buffer.capacity())
case _ => ()
}
super.messageReceived(ctx, e)
}
override def writeComplete(ctx: ChannelHandlerContext, e: WriteCompletionEvent) {
clearBufferUsage()
super.writeComplete(ctx, e)
}
override def channelClosed(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
clearBufferUsage()
super.channelClosed(ctx, e)
}
private[this] def increaseBufferUsage(size: Long) = {
// Don't change the order of the following statements, as usageTracker may throw an exception.
usageTracker.increase(size)
bufferUsage += size
}
private[this] def clearBufferUsage() = {
// Don't change the order of the following statements, as usageTracker may throw an exception.
usageTracker.decrease(bufferUsage)
bufferUsage = 0
}
}
|
travisbrown/finagle
|
finagle-http/src/main/scala/com/twitter/finagle/http/codec/ChannelBufferManager.scala
|
Scala
|
apache-2.0
| 3,234
|
package lila.tournament
import org.specs2.mutable.Specification
object ArenaTournamentColorHistoryTest {
def apply(s: String): ArenaTournamentColorHistory = {
s.foldLeft(ArenaTournamentColorHistory(None)) { (acc, c) =>
c match {
case 'W' => acc.incColor(1)
case 'B' => acc.incColor(-1)
}
}
}
def toTuple2(history: ArenaTournamentColorHistory): (Int, Int) = (history.strike, history.balance)
def unpack(s: String): (Int, Int) = toTuple2(apply(s))
def couldPlay(s1: String, s2: String, maxStreak: Int): Boolean = apply(s1).couldPlay(apply(s2), maxStreak)
def sameColors(s1: String, s2: String): Boolean = apply(s1).sameColors(apply(s2))
def firstGetsWhite(s1: String, s2: String): Boolean = apply(s1).firstGetsWhite(apply(s2))
}
class ArenaTournamentColorHistoryTest extends Specification {
import ArenaTournamentColorHistoryTest.{ apply, couldPlay, firstGetsWhite, sameColors, toTuple2, unpack }
"arena tournament color history" should {
"hand tests" in {
unpack("WWW") must be equalTo ((3, 3))
unpack("WWWB") must be equalTo ((-1, 2))
unpack("BBB") must be equalTo ((-3, -3))
unpack("BBBW") must be equalTo ((1, -2))
unpack("WWWBBB") must be equalTo ((-3, 0))
}
"couldPlay" in {
couldPlay("WWW", "WWW", 3) must beFalse
couldPlay("BBB", "BBB", 3) must beFalse
couldPlay("BB", "BB", 3) must beTrue
}
"sameColors" in {
sameColors("WWW", "W") must beTrue
sameColors("BBB", "B") must beTrue
}
"firstGetsWhite" in {
firstGetsWhite("WWW", "WW") must beFalse
firstGetsWhite("WW", "WWW") must beTrue
firstGetsWhite("BB", "B") must beTrue
firstGetsWhite("B", "BB") must beFalse
firstGetsWhite("WW", "BWW") must beFalse
firstGetsWhite("BB", "WBB") must beTrue
}
"serialization" in {
toTuple2(ArenaTournamentColorHistory(Some(-1))) must be equalTo ((0x7fff, 0x7fff))
toTuple2(ArenaTournamentColorHistory(Some(0))) must be equalTo ((-0x8000, -0x8000))
}
"min/(max)Value incColor" in {
val minh = ArenaTournamentColorHistory.minValue
toTuple2(minh.incColor(-1)) must be equalTo toTuple2(minh)
val maxh = ArenaTournamentColorHistory.maxValue
toTuple2(maxh.incColor(1)) must be equalTo toTuple2(maxh)
}
"equals" in {
apply("") must be equalTo apply("")
apply("WBW") must be equalTo apply("W")
}
}
}
|
antma/scala-tools
|
src/test/scala/ArenaTournamentColorHistoryTest.scala
|
Scala
|
mit
| 2,496
|
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
private[kafka] class DefaultPartitioner[T] extends Partitioner[T] {
private val random = new java.util.Random
def partition(key: T, numPartitions: Int): Int = {
if(key == null)
random.nextInt(numPartitions)
else
key.hashCode % numPartitions
}
}
|
tcrayford/hafka
|
kafka/core/src/main/scala/kafka/producer/DefaultPartitioner.scala
|
Scala
|
bsd-3-clause
| 889
|
package org.jetbrains.plugins.scala.lang.psi.impl.search
import com.intellij.openapi.util.Pair
import com.intellij.psi._
import com.intellij.psi.impl.search.JavaOverridingMethodsSearcher
import com.intellij.psi.impl.source.PsiMethodImpl
import com.intellij.psi.search.searches.{AllOverridingMethodsSearch, OverridingMethodsSearch}
import com.intellij.psi.util.PsiUtil
import com.intellij.util.{Processor, QueryExecutor}
import org.jetbrains.plugins.scala.extensions.{PsiElementExt, PsiMemberExt, PsiTypeExt, inReadAction}
import org.jetbrains.plugins.scala.finder.ScalaSourceFilterScope
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.impl.search.JavaRawOverridingSearcher._
import org.jetbrains.plugins.scala.lang.psi.light.{PsiMethodWrapper, ScFunctionWrapper}
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.util.ScEquivalenceUtil
/**
* Nikolay.Tropin
* 24-May-17
*/
/*
* Raw types from java are viewed as existential types by scalac, but java overrider search doesn't know about that.
* */
class JavaRawOverridingSearcher extends QueryExecutor[PsiMethod, OverridingMethodsSearch.SearchParameters] {
override def execute(qParams: OverridingMethodsSearch.SearchParameters, consumer: Processor[PsiMethod]): Boolean = {
val method = qParams.getMethod
method match {
case m: PsiMethodImpl if hasRawTypeParam(m) =>
val cClass = inReadAction(m.getContainingClass)
if (cClass == null) return true
val wrapper = rawMethodWrapper(m, cClass)
val scalaScope = ScalaSourceFilterScope(wrapper.getProject, qParams.getScope)
val newParams = new OverridingMethodsSearch.SearchParameters(wrapper, scalaScope, qParams.isCheckDeep)
val newProcessor = new Processor[PsiMethod] {
override def process(t: PsiMethod): Boolean = {
if (isSuperMethodForScala(m, t)) consumer.process(t)
else true
}
}
new JavaOverridingMethodsSearcher().execute(newParams, newProcessor)
case _ =>
true
}
}
}
class JavaRawAllOverridingSearcher extends QueryExecutor[Pair[PsiMethod, PsiMethod], AllOverridingMethodsSearch.SearchParameters] {
override def execute(qParams: AllOverridingMethodsSearch.SearchParameters,
consumer: Processor[Pair[PsiMethod, PsiMethod]]): Boolean = {
val clazz = qParams.getPsiClass
val potentials = inReadAction {
clazz.getMethods.collect {
case m: PsiMethodImpl if hasRawTypeParam(m) && PsiUtil.canBeOverriden(m) => m
}
}
for (superMethod <- potentials) {
inReadAction {
val wrapper = rawMethodWrapper(superMethod, clazz)
val scalaScope = ScalaSourceFilterScope(wrapper.getProject, qParams.getScope)
val params = new OverridingMethodsSearch.SearchParameters(wrapper, scalaScope, /*checkDeep*/ true)
val processor = new Processor[PsiMethod] {
override def process(t: PsiMethod): Boolean = {
if (isSuperMethodForScala(superMethod, t))
consumer.process(new Pair(superMethod, t))
else true
}
}
val continue = new JavaOverridingMethodsSearcher().execute(params, processor)
if (!continue) return false
}
}
true
}
}
private[search] object JavaRawOverridingSearcher {
def hasRawTypeParam(method: PsiMethodImpl): Boolean = inReadAction {
val parameters = method.getParameterList.getParameters
parameters.map(_.getType).exists(isRaw)
}
def isRaw(t: PsiType): Boolean = t match {
case ct: PsiClassType => ct.isRaw
case _ => false
}
def rawMethodWrapper(m: PsiMethod, cClass: PsiClass): PsiMethod = {
val methodCopy = inReadAction(m.copy.asInstanceOf[PsiMethod])
new PsiMethodWrapper(m.getManager, methodCopy, cClass) {
override protected def returnType: ScType = {
Option(m.getReturnType).map(_.toScType()).orNull
}
override protected def parameterListText: String = {
val params = m.getParameterList.getParameters.map(withExistentials)
params.mkString("(", ", ", ")")
}
}
}
private def withExistentials(p: PsiParameter): String = {
val paramType: PsiType = p.getType
if (!isRaw(paramType)) return p.getText
implicit val pc: ProjectContext = p.projectContext
val asViewedFromScala = paramType.toScType().toPsiType
val typeText = asViewedFromScala.getCanonicalText
s"$typeText ${p.getName}"
}
def isSuperMethodForScala(superMethod: PsiMethod, subMethod: PsiMethod): Boolean = {
val scFun = subMethod match {
case wr: ScFunctionWrapper => wr.delegate
case fun: ScFunction => fun
case _ => return false
}
inReadAction {
val superMethodClasses = scFun.superMethods.map(_.containingClass)
superMethodClasses.exists(ScEquivalenceUtil.areClassesEquivalent(_, superMethod.containingClass))
}
}
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/impl/search/JavaRawOverridingSearcher.scala
|
Scala
|
apache-2.0
| 5,065
|
package org.jetbrains.plugins.scala
package annotator
package gutter
import com.intellij.codeInsight.navigation.actions.GotoDeclarationHandler
import com.intellij.openapi.actionSystem.DataContext
import com.intellij.openapi.editor.Editor
import com.intellij.psi.{PsiElement, PsiFile, PsiMethod, ResolveResult}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScAssignStmt, ScReferenceExpression, ScSelfInvocation}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.lang.resolve.processor.DynamicResolveProcessor.{isDynamicReference, resolveDynamic}
/**
* User: Alexander Podkhalyuzin
* Date: 22.11.2008
*/
class ScalaGoToDeclarationHandler extends GotoDeclarationHandler {
def getActionText(context: DataContext): String = null
def getGotoDeclarationTargets(_sourceElement: PsiElement, offset: Int, editor: Editor): Array[PsiElement] = {
if (_sourceElement == null) return null
val containingFile: PsiFile = _sourceElement.getContainingFile
if (containingFile == null) return null
val sourceElement = containingFile.findElementAt(offset)
if (sourceElement == null) return null
if (!sourceElement.getLanguage.isKindOf(ScalaLanguage.INSTANCE)) return null
if (sourceElement.getNode.getElementType == ScalaTokenTypes.tASSIGN) {
return sourceElement.getParent match {
case assign: ScAssignStmt =>
val elem = assign.assignNavigationElement
Option(elem).toArray
case _ => null
}
}
if (sourceElement.getNode.getElementType == ScalaTokenTypes.kTHIS) {
sourceElement.getParent match {
case self: ScSelfInvocation =>
self.bind match {
case Some(elem) => return Array(elem)
case None => return null
}
case _ => return null
}
}
/**
* Extra targets:
*
* actualElement type alias used to access a constructor.
* See also [[org.jetbrains.plugins.scala.findUsages.TypeAliasUsagesSearcher]]
* innerResolveResult#element apply method
*/
def handleScalaResolveResult(scalaResolveResult: ScalaResolveResult): Seq[PsiElement] = {
val all = Seq(scalaResolveResult.getActualElement, scalaResolveResult.element) ++ scalaResolveResult.innerResolveResult.map(_.getElement)
scalaResolveResult.element match {
case f: ScFunction if f.isSynthetic =>
val actualElement =
f.syntheticCaseClass.getOrElse(scalaResolveResult.getActualElement)
Seq(actualElement).flatMap(goToTargets)
case c: PsiMethod if c.isConstructor =>
val clazz = c.containingClass
if (clazz == scalaResolveResult.getActualElement) Seq(scalaResolveResult.element).flatMap(goToTargets)
else all.distinct flatMap goToTargets
case _ =>
all.distinct flatMap goToTargets
}
}
def handleDynamicResolveResult(dynamicResolveResult: Seq[ResolveResult]): Seq[PsiElement] = {
dynamicResolveResult.distinct.map(_.getElement).flatMap(goToTargets)
}
if (sourceElement.getNode.getElementType == ScalaTokenTypes.tIDENTIFIER) {
val file = sourceElement.getContainingFile
val ref = file.findReferenceAt(sourceElement.getTextRange.getStartOffset)
if (ref == null) return null
val targets = ref match {
case expression: ScReferenceExpression if isDynamicReference(expression) =>
handleDynamicResolveResult(resolveDynamic(expression))
case resRef: ScReferenceElement =>
resRef.bind() match {
case Some(x) => handleScalaResolveResult(x)
case None => return null
}
case r =>
Set(r.resolve()) flatMap goToTargets
}
return targets.toArray
}
null
}
private def goToTargets(element: PsiElement): Seq[PsiElement] = {
element match {
case null => Seq.empty
case fun: ScFunction =>
Seq(fun.getSyntheticNavigationElement.getOrElse(element))
case td: ScTypeDefinition if td.isSynthetic =>
td.syntheticContainingClass match {
case Some(containingClass) => Seq(containingClass)
case _ => Seq(element)
}
case o: ScObject if o.isSyntheticObject =>
Seq(ScalaPsiUtil.getCompanionModule(o).getOrElse(element))
case param: ScParameter =>
ScalaPsiUtil.parameterForSyntheticParameter(param).map(Seq[PsiElement](_)).getOrElse(Seq[PsiElement](element))
case _ => Seq(element)
}
}
}
|
triplequote/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaGoToDeclarationHandler.scala
|
Scala
|
apache-2.0
| 5,051
|
package ca.aretex.labs.data
import java.io.InputStream
import ca.aretex.labs.data.jsonmodel.Person
import org.junit.Test
/**
* Created by Choungmo Fofack on 5/4/17.
*/
class LazyJsonArrayReaderEngineTest {
val expectedSmallSize = 7
val expectedLargeSize = 10000
@Test
def testReadStreamSmallFile() = {
val relativeResourceFilepath = "/small_jsonarray_data.json"
case class Color(color: String, value: String)
val inputStream : InputStream = this.getClass.getResourceAsStream(relativeResourceFilepath)
val lazyJsonEngine = new LazyJsonArrayReaderEngine[Color]()
val colors = lazyJsonEngine.readStream(inputStream)
assert(expectedSmallSize == colors.size)
}
@Test
def testReadStreamLargeFile() = {
val relativeResourceFilepath = "/large_jsonarray_data.json"
val inputStream : InputStream = this.getClass.getResourceAsStream(relativeResourceFilepath)
val lazyJsonEngine = new LazyJsonArrayReaderEngine[Person]()
val persons = lazyJsonEngine.readStream(inputStream)
assert(expectedLargeSize == persons.size)
}
}
|
nicaiseeric/lazy-json
|
src/test/scala/ca/aretex/labs/data/LazyJsonArrayReaderEngineTest.scala
|
Scala
|
apache-2.0
| 1,087
|
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.chill
import scala.collection.mutable.{Map => MMap}
class EnumerationSerializer extends KSerializer[Enumeration#Value] {
private val enumMethod = "scala$Enumeration$$outerEnum"
private val outerMethod = classOf[Enumeration#Value].getMethod(enumMethod)
// Cache the enum lookup:
private val enumMap = MMap[Enumeration#Value, Enumeration]()
private def enumOf(v: Enumeration#Value): Enumeration =
enumMap.synchronized {
// TODO: hacky, but not clear how to fix:
enumMap.getOrElseUpdate(
v,
outerMethod
.invoke(v)
.asInstanceOf[scala.Enumeration]
)
}
def write(kser: Kryo, out: Output, obj: Enumeration#Value): Unit = {
val enum = enumOf(obj)
// Note due to the ObjectSerializer, this only really writes the class.
kser.writeClassAndObject(out, enum)
// Now, we just write the ID:
out.writeInt(obj.id)
}
def read(kser: Kryo, in: Input, cls: Class[Enumeration#Value]): Enumeration#Value = {
// Note due to the ObjectSerializer, this only really writes the class.
val enum = kser.readClassAndObject(in).asInstanceOf[Enumeration]
enum(in.readInt).asInstanceOf[Enumeration#Value]
}
}
|
twitter/chill
|
chill-scala/src/main/scala/com/twitter/chill/EnumerationSerializer.scala
|
Scala
|
apache-2.0
| 1,772
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.model.classes.HeroicCharacterClass._
import io.truthencode.ddo.support.requisite.{ClassRequisiteImpl, FeatRequisiteImpl, FreeFeat}
/**
* This feat negates the penalties from using bucklers, and small and Large Shields.
*/
protected[feats] trait ShieldProficiency
extends FeatRequisiteImpl with ClassRequisiteImpl with Passive with FreeFeat {
self: GeneralFeat =>
override def grantToClass: Seq[(HeroicCharacterClass, Int)] = firstLevelClasses
private def firstLevelClasses =
List(Barbarian, Cleric, FavoredSoul, Fighter, Paladin, Ranger, Rogue).map((_, 1))
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/ShieldProficiency.scala
|
Scala
|
apache-2.0
| 1,382
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package bootstrap.liftweb
import net.liftweb.common._
import javax.servlet.{ServletContextEvent,ServletContextListener}
import org.springframework.web.context.{WebApplicationContext,ContextLoaderListener}
import org.springframework.web.context.support.WebApplicationContextUtils
import org.springframework.core.io.{ClassPathResource => CPResource,FileSystemResource => FSResource}
import java.io.File
import com.typesafe.config.ConfigException
/**
* A context loader listener for initializing Spring webapp context
* and logging.
*
* Spring application context is initialized here because:
* - Java Annotation based web application context can ONLY be initialized thanks to a context param
* of filter (no comment on that...), see:
* http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/htmlsingle/spring-framework-reference.html#beans-java-instantiating-container-web
* - Its one of the first thing to be done on servlet loading, and it can access ServletContext,
* which is necessary to be able to call WebApplicationContextUtils.getWebApplicationContext
*
*/
class LiftInitContextListener extends ContextLoaderListener {
//choose what Logback.xml file to use
val JVM_CONFIG_FILE_KEY = "logback.configurationFile"
val DEFAULT_CONFIG_FILE_NAME = "logback.xml"
val logbackFile = System.getProperty(JVM_CONFIG_FILE_KEY) match {
case null | "" => //use default location in classpath
val path = new CPResource(DEFAULT_CONFIG_FILE_NAME).getURL
println("JVM property -D%s is not defined, use configuration file in classpath: /%s".format(JVM_CONFIG_FILE_KEY, path))
path
case x => //so, it should be a full path, check it
val config = new FSResource(new File(x))
if(config.exists && config.isReadable) {
println("Use configuration file defined by JVM property -D%s : %s".format(JVM_CONFIG_FILE_KEY, config.getPath))
config.getURL
} else {
println("ERROR: Can not find configuration file specified by JVM property %s: %s ; abort".format(JVM_CONFIG_FILE_KEY, config.getPath))
throw new javax.servlet.UnavailableException("Configuration file not found: %s".format(config.getPath))
}
}
override def contextInitialized(sce:ServletContextEvent) : Unit = {
Logger.setup = Full(Logback.withFile(logbackFile))
/// init all our non-spring services ///
val ms = System.currentTimeMillis()
RudderConfig.init
//init Spring
super.contextInitialized(sce)
//initializing webapp context
WebApplicationContextUtils.getWebApplicationContext(sce.getServletContext) match {
//it's really an error here !
case null => sys.error("Error when getting the application context from the web context. Missing ContextLoaderListener.")
case c => LiftSpringApplicationContext.setToNewContext(c)
}
}
override def contextDestroyed(sce:ServletContextEvent) : Unit = {
//nothing special to do for us, only call super
super.contextDestroyed(sce)
}
}
|
armeniaca/rudder
|
rudder-web/src/main/scala/bootstrap/liftweb/LiftInitContextListener.scala
|
Scala
|
gpl-3.0
| 4,717
|
package com.betfair.robots.racing
import akka.actor.Actor
import com.betfair.domain.{LimitOrder, PersistenceType, PlaceInstruction, Side}
import com.betfair.service.BetfairServiceNG
import org.joda.time
import org.joda.time.{DateTime, DateTimeZone}
import scala.collection.mutable
import scala.concurrent._
import scala.language.postfixOps
import scala.util.{Failure, Success}
class MonitorPrices(betfairServiceNG: BetfairServiceNG, sessionToken: String,
marketId: String, marketStartTime: Option[DateTime])
(implicit executionContext: ExecutionContext) extends Actor {
def receive = {
case _ => {
println((new time.DateTime(DateTimeZone.UTC)) + " - monitoring prices starting - " + marketId)
var preRace = true
var count = 1
// monitor market until the off
while (count < 600) {
betfairServiceNG.getPriceBoundRunners(sessionToken, marketId = marketId,
lowerPrice = 1.00, higherPrice = 12.0
) onComplete {
case Success(Some(runners)) if (runners.size > 0) =>
for (runner <- runners) {
println((new time.DateTime(DateTimeZone.UTC))
+ "," + marketId
+ "," + runner.selectionId
+ "," + count
+ "," + runner.ex.get.availableToBack.toList.reverse.mkString
+ "," + runner.ex.get.availableToLay.mkString)
}
count += 1
case _ =>
preRace = false
}
Thread.sleep(500)
}
println((new time.DateTime(DateTimeZone.UTC)) + " - monitoring prices ending - " + marketId)
}
}
}
|
city81/betfair-service-ng
|
src/main/scala/com/betfair/robots/racing/MonitorPrices.scala
|
Scala
|
bsd-2-clause
| 1,711
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.collection.JavaConverters._
import org.apache.spark.{broadcast, TaskContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, SortOrder, SpecializedGetters, UnsafeProjection}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.execution.vectorized.{OffHeapColumnVector, OnHeapColumnVector, WritableColumnVector}
import org.apache.spark.sql.types._
import org.apache.spark.sql.vectorized.{ColumnarBatch, ColumnVector}
import org.apache.spark.util.Utils
/**
* Holds a user defined rule that can be used to inject columnar implementations of various
* operators in the plan. The [[preColumnarTransitions]] [[Rule]] can be used to replace
* [[SparkPlan]] instances with versions that support a columnar implementation. After this
* Spark will insert any transitions necessary. This includes transitions from row to columnar
* [[RowToColumnarExec]] and from columnar to row [[ColumnarToRowExec]]. At this point the
* [[postColumnarTransitions]] [[Rule]] is called to allow replacing any of the implementations
* of the transitions or doing cleanup of the plan, like inserting stages to build larger batches
* for more efficient processing, or stages that transition the data to/from an accelerator's
* memory.
*/
class ColumnarRule {
def preColumnarTransitions: Rule[SparkPlan] = plan => plan
def postColumnarTransitions: Rule[SparkPlan] = plan => plan
}
/**
* A trait that is used as a tag to indicate a transition from columns to rows. This allows plugins
* to replace the current [[ColumnarToRowExec]] with an optimized version and still have operations
* that walk a spark plan looking for this type of transition properly match it.
*/
trait ColumnarToRowTransition extends UnaryExecNode
/**
* Provides a common executor to translate an [[RDD]] of [[ColumnarBatch]] into an [[RDD]] of
* [[InternalRow]]. This is inserted whenever such a transition is determined to be needed.
*
* The implementation is based off of similar implementations in
* [[org.apache.spark.sql.execution.python.ArrowEvalPythonExec]] and
* [[MapPartitionsInRWithArrowExec]]. Eventually this should replace those implementations.
*/
case class ColumnarToRowExec(child: SparkPlan) extends ColumnarToRowTransition with CodegenSupport {
// supportsColumnar requires to be only called on driver side, see also SPARK-37779.
assert(Utils.isInRunningSparkTask || child.supportsColumnar)
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
// `ColumnarToRowExec` processes the input RDD directly, which is kind of a leaf node in the
// codegen stage and needs to do the limit check.
protected override def canCheckLimitNotReached: Boolean = true
override lazy val metrics: Map[String, SQLMetric] = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numInputBatches" -> SQLMetrics.createMetric(sparkContext, "number of input batches")
)
override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
val numInputBatches = longMetric("numInputBatches")
// This avoids calling `output` in the RDD closure, so that we don't need to include the entire
// plan (this) in the closure.
val localOutput = this.output
child.executeColumnar().mapPartitionsInternal { batches =>
val toUnsafe = UnsafeProjection.create(localOutput, localOutput)
batches.flatMap { batch =>
numInputBatches += 1
numOutputRows += batch.numRows()
batch.rowIterator().asScala.map(toUnsafe)
}
}
}
/**
* Generate [[ColumnVector]] expressions for our parent to consume as rows.
* This is called once per [[ColumnVector]] in the batch.
*/
private def genCodeColumnVector(
ctx: CodegenContext,
columnVar: String,
ordinal: String,
dataType: DataType,
nullable: Boolean): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
val value = CodeGenerator.getValueFromVector(columnVar, dataType, ordinal)
val isNullVar = if (nullable) {
JavaCode.isNullVariable(ctx.freshName("isNull"))
} else {
FalseLiteral
}
val valueVar = ctx.freshName("value")
val str = s"columnVector[$columnVar, $ordinal, ${dataType.simpleString}]"
val code = code"${ctx.registerComment(str)}" + (if (nullable) {
code"""
boolean $isNullVar = $columnVar.isNullAt($ordinal);
$javaType $valueVar = $isNullVar ? ${CodeGenerator.defaultValue(dataType)} : ($value);
"""
} else {
code"$javaType $valueVar = $value;"
})
ExprCode(code, isNullVar, JavaCode.variable(valueVar, dataType))
}
/**
* Produce code to process the input iterator as [[ColumnarBatch]]es.
* This produces an [[org.apache.spark.sql.catalyst.expressions.UnsafeRow]] for each row in
* each batch.
*/
override protected def doProduce(ctx: CodegenContext): String = {
// PhysicalRDD always just has one input
val input = ctx.addMutableState("scala.collection.Iterator", "input",
v => s"$v = inputs[0];")
// metrics
val numOutputRows = metricTerm(ctx, "numOutputRows")
val numInputBatches = metricTerm(ctx, "numInputBatches")
val columnarBatchClz = classOf[ColumnarBatch].getName
val batch = ctx.addMutableState(columnarBatchClz, "batch")
val idx = ctx.addMutableState(CodeGenerator.JAVA_INT, "batchIdx") // init as batchIdx = 0
val columnVectorClzs = child.vectorTypes.getOrElse(
Seq.fill(output.indices.size)(classOf[ColumnVector].getName))
val (colVars, columnAssigns) = columnVectorClzs.zipWithIndex.map {
case (columnVectorClz, i) =>
val name = ctx.addMutableState(columnVectorClz, s"colInstance$i")
(name, s"$name = ($columnVectorClz) $batch.column($i);")
}.unzip
val nextBatch = ctx.freshName("nextBatch")
val nextBatchFuncName = ctx.addNewFunction(nextBatch,
s"""
|private void $nextBatch() throws java.io.IOException {
| if ($input.hasNext()) {
| $batch = ($columnarBatchClz)$input.next();
| $numInputBatches.add(1);
| $numOutputRows.add($batch.numRows());
| $idx = 0;
| ${columnAssigns.mkString("", "\\n", "\\n")}
| }
|}""".stripMargin)
ctx.currentVars = null
val rowidx = ctx.freshName("rowIdx")
val columnsBatchInput = (output zip colVars).map { case (attr, colVar) =>
genCodeColumnVector(ctx, colVar, rowidx, attr.dataType, attr.nullable)
}
val localIdx = ctx.freshName("localIdx")
val localEnd = ctx.freshName("localEnd")
val numRows = ctx.freshName("numRows")
val shouldStop = if (parent.needStopCheck) {
s"if (shouldStop()) { $idx = $rowidx + 1; return; }"
} else {
"// shouldStop check is eliminated"
}
s"""
|if ($batch == null) {
| $nextBatchFuncName();
|}
|while ($limitNotReachedCond $batch != null) {
| int $numRows = $batch.numRows();
| int $localEnd = $numRows - $idx;
| for (int $localIdx = 0; $localIdx < $localEnd; $localIdx++) {
| int $rowidx = $idx + $localIdx;
| ${consume(ctx, columnsBatchInput).trim}
| $shouldStop
| }
| $idx = $numRows;
| $batch = null;
| $nextBatchFuncName();
|}
""".stripMargin
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
Seq(child.executeColumnar().asInstanceOf[RDD[InternalRow]]) // Hack because of type erasure
}
override protected def withNewChildInternal(newChild: SparkPlan): ColumnarToRowExec =
copy(child = newChild)
}
/**
* Provides an optimized set of APIs to append row based data to an array of
* [[WritableColumnVector]].
*/
private[execution] class RowToColumnConverter(schema: StructType) extends Serializable {
private val converters = schema.fields.map {
f => RowToColumnConverter.getConverterForType(f.dataType, f.nullable)
}
final def convert(row: InternalRow, vectors: Array[WritableColumnVector]): Unit = {
var idx = 0
while (idx < row.numFields) {
converters(idx).append(row, idx, vectors(idx))
idx += 1
}
}
}
/**
* Provides an optimized set of APIs to extract a column from a row and append it to a
* [[WritableColumnVector]].
*/
private object RowToColumnConverter {
private abstract class TypeConverter extends Serializable {
def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit
}
private final case class BasicNullableTypeConverter(base: TypeConverter) extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
if (row.isNullAt(column)) {
cv.appendNull
} else {
base.append(row, column, cv)
}
}
}
private final case class StructNullableTypeConverter(base: TypeConverter) extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
if (row.isNullAt(column)) {
cv.appendStruct(true)
} else {
base.append(row, column, cv)
}
}
}
private def getConverterForType(dataType: DataType, nullable: Boolean): TypeConverter = {
val core = dataType match {
case BinaryType => BinaryConverter
case BooleanType => BooleanConverter
case ByteType => ByteConverter
case ShortType => ShortConverter
case IntegerType | DateType | _: YearMonthIntervalType => IntConverter
case FloatType => FloatConverter
case LongType | TimestampType | _: DayTimeIntervalType => LongConverter
case DoubleType => DoubleConverter
case StringType => StringConverter
case CalendarIntervalType => CalendarConverter
case at: ArrayType => ArrayConverter(getConverterForType(at.elementType, at.containsNull))
case st: StructType => new StructConverter(st.fields.map(
(f) => getConverterForType(f.dataType, f.nullable)))
case dt: DecimalType => new DecimalConverter(dt)
case mt: MapType => MapConverter(getConverterForType(mt.keyType, nullable = false),
getConverterForType(mt.valueType, mt.valueContainsNull))
case unknown => throw QueryExecutionErrors.unsupportedDataTypeError(unknown.toString)
}
if (nullable) {
dataType match {
case CalendarIntervalType => new StructNullableTypeConverter(core)
case st: StructType => new StructNullableTypeConverter(core)
case _ => new BasicNullableTypeConverter(core)
}
} else {
core
}
}
private object BinaryConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
val bytes = row.getBinary(column)
cv.appendByteArray(bytes, 0, bytes.length)
}
}
private object BooleanConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendBoolean(row.getBoolean(column))
}
private object ByteConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendByte(row.getByte(column))
}
private object ShortConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendShort(row.getShort(column))
}
private object IntConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendInt(row.getInt(column))
}
private object FloatConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendFloat(row.getFloat(column))
}
private object LongConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendLong(row.getLong(column))
}
private object DoubleConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit =
cv.appendDouble(row.getDouble(column))
}
private object StringConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
val data = row.getUTF8String(column).getBytes
cv.appendByteArray(data, 0, data.length)
}
}
private object CalendarConverter extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
val c = row.getInterval(column)
cv.appendStruct(false)
cv.getChild(0).appendInt(c.months)
cv.getChild(1).appendInt(c.days)
cv.getChild(2).appendLong(c.microseconds)
}
}
private case class ArrayConverter(childConverter: TypeConverter) extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
val values = row.getArray(column)
val numElements = values.numElements()
cv.appendArray(numElements)
val arrData = cv.arrayData()
for (i <- 0 until numElements) {
childConverter.append(values, i, arrData)
}
}
}
private case class StructConverter(childConverters: Array[TypeConverter]) extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
cv.appendStruct(false)
val data = row.getStruct(column, childConverters.length)
for (i <- 0 until childConverters.length) {
childConverters(i).append(data, i, cv.getChild(i))
}
}
}
private case class DecimalConverter(dt: DecimalType) extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
val d = row.getDecimal(column, dt.precision, dt.scale)
if (dt.precision <= Decimal.MAX_INT_DIGITS) {
cv.appendInt(d.toUnscaledLong.toInt)
} else if (dt.precision <= Decimal.MAX_LONG_DIGITS) {
cv.appendLong(d.toUnscaledLong)
} else {
val integer = d.toJavaBigDecimal.unscaledValue
val bytes = integer.toByteArray
cv.appendByteArray(bytes, 0, bytes.length)
}
}
}
private case class MapConverter(keyConverter: TypeConverter, valueConverter: TypeConverter)
extends TypeConverter {
override def append(row: SpecializedGetters, column: Int, cv: WritableColumnVector): Unit = {
val m = row.getMap(column)
val keys = cv.getChild(0)
val values = cv.getChild(1)
val numElements = m.numElements()
cv.appendArray(numElements)
val srcKeys = m.keyArray()
val srcValues = m.valueArray()
for (i <- 0 until numElements) {
keyConverter.append(srcKeys, i, keys)
valueConverter.append(srcValues, i, values)
}
}
}
}
/**
* A trait that is used as a tag to indicate a transition from rows to columns. This allows plugins
* to replace the current [[RowToColumnarExec]] with an optimized version and still have operations
* that walk a spark plan looking for this type of transition properly match it.
*/
trait RowToColumnarTransition extends UnaryExecNode
/**
* Provides a common executor to translate an [[RDD]] of [[InternalRow]] into an [[RDD]] of
* [[ColumnarBatch]]. This is inserted whenever such a transition is determined to be needed.
*
* This is similar to some of the code in ArrowConverters.scala and
* [[org.apache.spark.sql.execution.arrow.ArrowWriter]]. That code is more specialized
* to convert [[InternalRow]] to Arrow formatted data, but in the future if we make
* [[OffHeapColumnVector]] internally Arrow formatted we may be able to replace much of that code.
*
* This is also similar to
* [[org.apache.spark.sql.execution.vectorized.ColumnVectorUtils.populate()]] and
* [[org.apache.spark.sql.execution.vectorized.ColumnVectorUtils.toBatch()]] toBatch is only ever
* called from tests and can probably be removed, but populate is used by both Orc and Parquet
* to initialize partition and missing columns. There is some chance that we could replace
* populate with [[RowToColumnConverter]], but the performance requirements are different and it
* would only be to reduce code.
*/
case class RowToColumnarExec(child: SparkPlan) extends RowToColumnarTransition {
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def outputOrdering: Seq[SortOrder] = child.outputOrdering
override def doExecute(): RDD[InternalRow] = {
child.execute()
}
override def doExecuteBroadcast[T](): broadcast.Broadcast[T] = {
child.doExecuteBroadcast()
}
override def supportsColumnar: Boolean = true
override lazy val metrics: Map[String, SQLMetric] = Map(
"numInputRows" -> SQLMetrics.createMetric(sparkContext, "number of input rows"),
"numOutputBatches" -> SQLMetrics.createMetric(sparkContext, "number of output batches")
)
override def doExecuteColumnar(): RDD[ColumnarBatch] = {
val enableOffHeapColumnVector = conf.offHeapColumnVectorEnabled
val numInputRows = longMetric("numInputRows")
val numOutputBatches = longMetric("numOutputBatches")
// Instead of creating a new config we are reusing columnBatchSize. In the future if we do
// combine with some of the Arrow conversion tools we will need to unify some of the configs.
val numRows = conf.columnBatchSize
// This avoids calling `schema` in the RDD closure, so that we don't need to include the entire
// plan (this) in the closure.
val localSchema = this.schema
child.execute().mapPartitionsInternal { rowIterator =>
if (rowIterator.hasNext) {
new Iterator[ColumnarBatch] {
private val converters = new RowToColumnConverter(localSchema)
private val vectors: Seq[WritableColumnVector] = if (enableOffHeapColumnVector) {
OffHeapColumnVector.allocateColumns(numRows, localSchema)
} else {
OnHeapColumnVector.allocateColumns(numRows, localSchema)
}
private val cb: ColumnarBatch = new ColumnarBatch(vectors.toArray)
TaskContext.get().addTaskCompletionListener[Unit] { _ =>
cb.close()
}
override def hasNext: Boolean = {
rowIterator.hasNext
}
override def next(): ColumnarBatch = {
cb.setNumRows(0)
vectors.foreach(_.reset())
var rowCount = 0
while (rowCount < numRows && rowIterator.hasNext) {
val row = rowIterator.next()
converters.convert(row, vectors.toArray)
rowCount += 1
}
cb.setNumRows(rowCount)
numInputRows += rowCount
numOutputBatches += 1
cb
}
}
} else {
Iterator.empty
}
}
}
override protected def withNewChildInternal(newChild: SparkPlan): RowToColumnarExec =
copy(child = newChild)
}
/**
* Apply any user defined [[ColumnarRule]]s and find the correct place to insert transitions
* to/from columnar formatted data.
*
* @param columnarRules custom columnar rules
* @param outputsColumnar whether or not the produced plan should output columnar format.
*/
case class ApplyColumnarRulesAndInsertTransitions(
columnarRules: Seq[ColumnarRule],
outputsColumnar: Boolean)
extends Rule[SparkPlan] {
/**
* Inserts an transition to columnar formatted data.
*/
private def insertRowToColumnar(plan: SparkPlan): SparkPlan = {
if (!plan.supportsColumnar) {
// The tree feels kind of backwards
// Columnar Processing will start here, so transition from row to columnar
RowToColumnarExec(insertTransitions(plan, outputsColumnar = false))
} else if (!plan.isInstanceOf[RowToColumnarTransition]) {
plan.withNewChildren(plan.children.map(insertRowToColumnar))
} else {
plan
}
}
/**
* Inserts RowToColumnarExecs and ColumnarToRowExecs where needed.
*/
private def insertTransitions(plan: SparkPlan, outputsColumnar: Boolean): SparkPlan = {
if (outputsColumnar) {
insertRowToColumnar(plan)
} else if (plan.supportsColumnar && !plan.supportsRowBased) {
// `outputsColumnar` is false but the plan only outputs columnar format, so add a
// to-row transition here.
ColumnarToRowExec(insertRowToColumnar(plan))
} else if (!plan.isInstanceOf[ColumnarToRowTransition]) {
plan.withNewChildren(plan.children.map(insertTransitions(_, outputsColumnar = false)))
} else {
plan
}
}
def apply(plan: SparkPlan): SparkPlan = {
var preInsertPlan: SparkPlan = plan
columnarRules.foreach(r => preInsertPlan = r.preColumnarTransitions(preInsertPlan))
var postInsertPlan = insertTransitions(preInsertPlan, outputsColumnar)
columnarRules.reverse.foreach(r => postInsertPlan = r.postColumnarTransitions(postInsertPlan))
postInsertPlan
}
}
|
holdenk/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/Columnar.scala
|
Scala
|
apache-2.0
| 22,532
|
package com.fingerco
package processors
// Spark
import org.apache.spark.streaming.kinesis._
import org.apache.spark.streaming.{ Duration, StreamingContext }
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.storage.StorageLevel
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream
// JSON Parsing
import scala.util.parsing.json.JSON
// Scraping
import org.jsoup.Jsoup
import models.WebContent
object ScrapeStreamProcessor {
val userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36"
def setup(appConfig: AppConfig, streamingCtx: StreamingContext) = {
val scrapeStream = getScrapeStream(appConfig, streamingCtx)
scrapeStream.foreachRDD { rdd =>
rdd.foreach{ scrape_req =>
val url = scrape_req("url").asInstanceOf[String]
val contents = Jsoup.connect(url).userAgent(userAgent).get().html()
val identifier = scrape_req
.getOrElse("identifier", java.util.UUID.randomUUID.toString)
.asInstanceOf[String]
WebContent(
url = url,
startUrl = scrape_req.getOrElse("startUrl", url).asInstanceOf[String],
identifier = identifier,
contents = contents
).save()
println(s"Scraped: $url")
}
}
}
private def getScrapeStream(
appConfig: AppConfig,
streamingCtx: StreamingContext): DStream[Map[String, Any]] = {
val stream = KinesisUtils.createStream(
streamingCtx,
appConfig.appName,
appConfig.streamName,
s"kinesis.${appConfig.regionName}.amazonaws.com",
appConfig.regionName,
InitialPositionInStream.LATEST,
Duration(appConfig.checkpointInterval),
StorageLevel.MEMORY_ONLY
)
stream
.map { byteArray => new String(byteArray) }
.map { jsonStr => JSON.parseFull(jsonStr).get.asInstanceOf[Map[String, Any]] }
}
}
|
fingerco/watcher-url-scraper
|
src/main/scala/com/fingerco/processors/ScrapeStreamProcessor.scala
|
Scala
|
apache-2.0
| 1,958
|
import sbt._
import Keys._
object MyBuild extends Build {
lazy val buildSettings = Defaults.defaultSettings ++ Seq(
version := "0.1-SNAPSHOT",
organization := "com.simba",
scalaVersion := Option(System.getProperty("scala.version")).getOrElse("2.10.4")
)
lazy val sprayactorsnippets = Project(
id = "SprayActorSnippets",
base = file("."),
settings = Defaults.defaultSettings ++
sbtassembly.Plugin.assemblySettings
)
}
|
ShiZhan/live-triples
|
project/Build.scala
|
Scala
|
apache-2.0
| 455
|
package xsbt.test
import java.io.File
import org.specs2.mutable.Specification
import org.specs2.matcher.FileMatchers
import sbt._
import sbt.io.IO
import sbt.io.Path._
object FileCommandsSpec extends Specification with FileMatchers {
"The touch command" should {
"touch a file that doesn't exist" in withTmpDir { dir =>
fileCommands(dir)("touch", List("foo"))
dir / "foo" must exist
}
"update the timestamp of a file that does exist" in withTmpDir { dir =>
val file = dir / "foo"
IO.write(file, "x")
file.setLastModified(1000L)
fileCommands(dir)("touch", List("foo"))
file.lastModified() must beGreaterThan(1000L)
}
}
"The delete command" should {
"delete a file" in withTmpDir { dir =>
IO.write(dir / "foo", "x")
fileCommands(dir)("delete", List("foo"))
dir / "foo" must not(exist)
}
"delete a directory" in withTmpDir { dir =>
IO.write(dir / "foo" / "bar", "x")
fileCommands(dir)("delete", List("foo"))
dir / "foo" must not(exist)
}
}
"The exists command" should {
"succeed if a file exists" in withTmpDir { dir =>
IO.write(dir / "foo", "x")
fileCommands(dir)("exists", List("foo"))
ok
}
"fail if a file doesn't exist" in withTmpDir { dir =>
fileCommands(dir)("exists", List("foo")) must throwAn[Exception]
}
}
"The mkdir command" should {
"make a directory" in withTmpDir { dir =>
fileCommands(dir)("mkdir", List("foo"))
dir / "foo" must beADirectory
}
"make all directories" in withTmpDir { dir =>
fileCommands(dir)("mkdir", List("foo/bar"))
dir / "foo" / "bar" must beADirectory
}
}
"The absent command" should {
"succeed if a file is absent" in withTmpDir { dir =>
fileCommands(dir)("absent", List("foo"))
ok
}
"fail if a file is not absent" in withTmpDir { dir =>
IO.write(dir / "foo", "x")
fileCommands(dir)("absent", List("foo")) must throwAn[Exception]
}
}
"The newer command" should {
"succeed if a file is newer" in withTmpDir { dir =>
val file1 = dir / "foo"
IO.write(file1, "x")
file1.setLastModified(1000L)
val file2 = dir / "bar"
IO.write(file2, "x")
file2.setLastModified(2000L)
fileCommands(dir)("newer", List("bar", "foo"))
ok
}
"fail if a file is not newer" in withTmpDir { dir =>
val file1 = dir / "foo"
IO.write(file1, "x")
file1.setLastModified(1000L)
val file2 = dir / "bar"
IO.write(file2, "x")
file2.setLastModified(2000L)
fileCommands(dir)("newer", List("foo", "bar")) must throwAn[Exception]
}
"fail if the tested file doesn't exist" in withTmpDir { dir =>
val file1 = dir / "foo"
IO.write(file1, "x")
file1.setLastModified(1000L)
fileCommands(dir)("newer", List("bar", "foo")) must throwAn[Exception]
}
"succeed if the target file doesn't exist" in withTmpDir { dir =>
val file1 = dir / "foo"
IO.write(file1, "x")
file1.setLastModified(1000L)
fileCommands(dir)("newer", List("foo", "bar"))
ok
}
}
"The copy-file command" should {
"copy a file" in withTmpDir { dir =>
IO.write(dir / "foo", "x")
fileCommands(dir)("copy-file", List("foo", "bar"))
dir / "bar" must exist
IO.read(dir / "bar") must_== "x"
}
}
def fileCommands(dir: File) = new FileCommands(dir)
def withTmpDir[A](block: File => A): A = {
val tmpDir = File.createTempFile("filecommands", "")
try {
tmpDir.delete()
tmpDir.mkdir()
block(tmpDir)
} finally {
IO.delete(tmpDir)
}
}
}
|
mdedetrich/sbt
|
scripted/base/src/test/scala/xsbt/test/FileCommandsSpec.scala
|
Scala
|
bsd-3-clause
| 3,699
|
/**
* Copyright 2015 Ram Sriharsha
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package magellan
import org.scalatest.FunSuite
class PointSuite extends FunSuite {
test("touches") {
val point = new Point(0.0, 0.0)
assert(!point.touches(point))
}
test("intersects") {
val point = new Point(0.0, 0.0)
assert(point.intersects(point))
}
test("distance between points") {
val point1 = new Point(0.0,0.0);
val point2 = new Point(1.0,1.0);
assert(point1.distance(point2).equals(Math.sqrt(2)))
}
}
|
simonellistonball/magellan
|
src/test/scala/magellan/PointSuite.scala
|
Scala
|
apache-2.0
| 1,049
|
package spinoco.protocol.kafka
/**
* Reponse from kafka broker
* @param correlationId Correlation id as presented in request
* @param response Response received from kafka
*/
case class ResponseMessage (
correlationId: Int
, response: Response
)
|
Spinoco/protocol
|
kafka/src/main/scala/spinoco/protocol/kafka/ResponseMessage.scala
|
Scala
|
mit
| 270
|
/* File: Gen.scala (Ch 8)
* Authors: Paul Chiusano and Runar Bjarnason
* Url: https://github.com/fpinscala/fpinscala
*
* Description: This is a modified version of the file Gen.scala
* that accompanies the book "Functional Programming in Scala" by
* Chiusano and Bjarnason. This version of the file includes
* solutions to some of the exercises in
*
* CHAPTER 8: Property-based testing
*
* The solutions herein are by William DeMeo <williamdemeo@gmail.com>.
* They are at best imperfect, and possibly wrong. Official solutions by
* Chiusano and Bjarnason are available in the github repo mentioned above.
*/
package fpinscala.testing
import fpinscala.laziness.Stream
import fpinscala.state._
import fpinscala.parallelism._
import fpinscala.parallelism.Par.Par
import Gen._
import Prop._
import java.util.concurrent.{Executors,ExecutorService}
//==== begin: Prop class ============================================================================
case class Prop( run: (MaxTestSize, NumberOfTests, RNG) => Result ) {
// Ex 8.9a Implement && for composing Prop values.
def &&(that: Prop): Prop = Prop {
(maxsize, numtests, rng) => this.run(maxsize, numtests, rng) match {
case Passed | Proved => that.run(maxsize, numtests, rng)
case notPassedOrProved => notPassedOrProved //
}
}
// Ex 8.9b Implement || for composing Prop values.
def ||(that: Prop): Prop = Prop {
(maxsize, numtests, rng) => this.run(maxsize, numtests, rng) match {
case Falsified(msg, _) => that.tag(msg).run(maxsize, numtests, rng)
case notFalsified => notFalsified
}
}
// The tag method provides a way of testing a Prop and, upon failure, prepend
// the given message to the Prop's own fail message.
def tag(msg: String) = Prop {
(maxsize, numtests, rng) => this.run(maxsize, numtests, rng) match {
case Falsified(mymsg, numsuc) => Falsified(msg+"\\n" + mymsg, numsuc)
case notFalsified => notFalsified
}
}
// Notice that in the case of failure we don't know which property was responsible, the left or
// the right. Can you devise a way of handling this? perhaps by allowing Prop values to be
// assigned a tag or label which gets displayed in the event of a failure?
}
//===== end: Prop class =======================================================================
//===== BEGIN: Prop companion object ===========================================================
object Prop {
type FailedCase = String // (type alias) strings that describe a test failure (indicate why a test failed)
type SuccessCount = Int // (type alias) how many tests have passed
type NumberOfTests = Int
type MaxTestSize = Int
// Justification for new type called `Return`: (cf. p.133, fpinscala)
// (notes about this have been moved to bottom of this file)
// Result type is the return type of a Prop's run method.
sealed trait Result { def isFalsified: Boolean }
// A Prop's run method returns Proved if there is only one thing to test and it passes.
case object Proved extends Result { def isFalsified = false }
// A Prop's run method will return the Passed object if all tests succeed.
case object Passed extends Result { def isFalsified = false }
// A Prop's run method will return a Falsified object in case of failure.
case class Falsified (failure_string: FailedCase, num_successful: SuccessCount)
extends Result {
def isFalsified = true
}
def apply(f: (NumberOfTests,RNG) => Result): Prop = Prop { (_,n,rng) => f(n,rng) }
// run helper method
def run(p: Prop,
maxSize: Int = 100,
testCases: Int = 100,
rng: RNG = RNG.Simple(System.currentTimeMillis)): Unit =
p.run(maxSize, testCases, rng) match {
case Falsified(msg, n) =>
println(s"! Falsified after $n passed tests:\\n $msg")
case Passed =>
println(s"+ OK, passed $testCases tests.")
case Proved =>
println(s"+ OK, proved property.")
}
//--- First implementation of forAll, for "static" test sizes ---
/* Produce an infinite random stream from a `Gen` and a starting `RNG`. */
def randomStream[A](g: Gen[A])(rng: RNG): Stream[A] =
Stream.unfold(rng)(rng => Some(g.sample.run(rng)))
def forAll[A](as: Gen[A])(f: A => Boolean) = Prop {
(n,rng) => randomStream(as)(rng).zip(Stream.from(0)).take(n).map {
case (a, i) => try {
if (f(a)) Passed else Falsified(a.toString, i)
} catch { case e: Exception => Falsified(buildMsg(a, e), i) }
}.find(_.isFalsified).getOrElse(Passed)
}
def buildMsg[A](s: A, e: Exception): String =
s"test case: $s\\n" +
s"generated an exception: ${e.getMessage}\\n" +
s"stack trace:\\n ${e.getStackTrace.mkString("\\n")}"
//--- We could also add a random predicate generator...
def randomStreamWithPredicate[A,B](agen: Gen[A])(pred: Gen[A=>Boolean])(rng: RNG):
Stream[(A, A=>Boolean)] = Stream.unfold(rng){rng =>
val (a, rng2) = agen.sample.run(rng)
val (p, rng3) = pred.sample.run(rng2)
Some((a,p),rng3)
}
// ...this would allow us to run forAll on generated A's *and* generated predicates.
// It might seem weird because, usually, we have a specific prediate in mind that
// we're trying to test on lots of randomly generated data. However... (this remark
// is continued at bottom of this file)
def forAll2[A](as: Gen[A])(fs: Gen[A => Boolean]) = Prop {
(n,rng) => randomStreamWithPredicate(as)(fs)(rng).zip(Stream.from(0)).take(n).map {
case ((a,f), i) => try {
if (f(a)) Passed else Falsified((a,f).toString, i)
} catch { case e: Exception => Falsified(buildMsg2(a,f,e), i) }
}.find(_.isFalsified).getOrElse(Passed)
}
def forAll3[A](as: Gen[A])(preds: Gen[A => Boolean]) = Prop { (n,rng) =>
// Start by generating a stream of properties to test:
val pStream = randomStream(as)(rng) map (a => forAll(preds)(p => p(a)) ) // each case returns a Prop
// Take the first n and, using && combinator, reduce them to a single conjunction:
val pConj = pStream.take(n).toList.reduce(_ && _)
// Test the conjunction of n properties:
pConj.run(n, n, rng) // todo: get rid of dependence on MaxTestSize
}
def buildMsg2[A](a: A, p: A=>Boolean, e: Exception): String =
s"test case val: $a\\n" +
s"test case fun: $p\\n" +
s"generated an exception: ${e.getMessage}\\n" +
s"stack trace:\\n ${e.getStackTrace.mkString("\\n")}"
//--- Second implementation of forAll, for "dynamic" test sizes ---
def forAll[A](sg: SGen[A])(predicate: A => Boolean): Prop = forAll(sg.forSize)(predicate)
def forAll[A](g: Int => Gen[A])(predicate: A => Boolean): Prop = Prop {
(maxSize, numTests, rng) =>
val casesPerSize = (numTests + (maxSize - 1 )) / maxSize
val props: Stream[Prop] =
Stream.from(0).take((numTests min maxSize) + 1).map( i => forAll(g(i))(predicate))
val prop: Prop = props.map( p =>
Prop { (mx, _, rng) => p.run(mx, casesPerSize, rng) } ).toList.reduce(_ && _)
prop.run(maxSize, numTests, rng)
}
//--- BEGIN: Helper methods for streamlining Par tests -------------------------------
val ES: ExecutorService = Executors.newCachedThreadPool
// check is for proving properties or laws.
def check(p: => Boolean) = Prop { (_, _, _) => if (p) Proved else Falsified("()", 0) }
// Lift equality relation into Par using map2.
def equal[A](p1: Par[A], p2: Par[A]): Par[Boolean] = Par.map2(p1,p2)(_ == _)
val S = weighted(
(choose(1,4).map(Executors.newFixedThreadPool), .75), // create fixed thread pool (TP) 75% of the time
(unit(Executors.newCachedThreadPool), .25) // create unbounded TP 25% of the time.
)
def forAllPar[A](g: Gen[A]) (f: A => Par[Boolean]): Prop =
forAll(S ** g) { case (s,a) => f(a)(s).get }
def checkPar(p: Par[Boolean]): Prop = forAllPar(Gen.unit(()))(_ => p)
def provePar(p: Par[Boolean]) = Prop { (_,_,_) =>
if (p(ES).get) Proved else Falsified("()", 0) }
//---- END: Helper methods for streamlining Par tests ----------------------------
//====== END: Prop companion object =============================================================
}
case class Gen[+A](sample: State[RNG, A]){
/* Gen[A] is something that knows how to generate values of type A. It could randomly generate these values.
* We already developed an interface for a purely functional random number generator RNG (Ch. 6), and we
* showed how to make it convenient to combine computations that made use of it.
*
* We let `Gen` be a type that wraps a state transition over a random number generator:
*
* case class Gen[A](sample: State[RNG,A])
*
* Recall `case class State[S,A](run: S => (A,S))` so the `sample` object wrapped inside a Gen
* has a run function that takes a rng1:RNG and gives back a pair (a, rng2): (A, RNG).
*/
def map[B](f: A => B): Gen[B] = Gen(this.sample.map(f))
// Recall, the map method of State[S,A] class is
// def map[B](f: A => B): State[S, B] = flatMap(a => unit(f(a)))
def map2[B,C](g: Gen[B])(f: (A,B) => C): Gen[C] = Gen(this.sample.map2(g.sample)(f))
// Recall, the map2 method of State[S,A] class has signature
// def map2[B,C](sb: State[S, B])(f: (A, B) => C): State[S, C]
// Ex 8.6 In the Gen class, implement `flatMap`, then use it to implement a more dynamic `listOfN`.
def flatMap[B](f: A => Gen[B]): Gen[B] = Gen(this.sample.flatMap[B](a => f(a).sample))
// f ** g is syntactic sugar for combining two generators f and g to produce a pair generator.
def **[B] (g: Gen[B]): Gen[(A,B)] = this.map2(g)((_,_))
// Generate a Gen[List[A]] with length of list given by input parameter `n`
def listOfLength(n: Int): Gen[List[A]] = Gen.listOfLength(n, this)
// alias to listOfLength function of the companion object
// Generate a Gen[List[A]] with length of list generated by the given int generator.
def listOfGeneratedLength(glen: Gen[Int]): Gen[List[A]] =
glen flatMap(n => this.listOfLength(n))
// The book calls this `listOfN`
// Ex 8.10 Implement helper functions for converting Gen to SGen.
def unsized: SGen[A] = SGen(_ => this)
}
//==== begin: Gen companion object ======================================
object Gen {
// Ex 8.4 Implement Gen.choose which generates ints in the range [start, stopExclusive).
def choose(start: Int, stopExclusive: Int): Gen[Int] =
Gen ( State(RNG.nonNegativeLessThan(stopExclusive -start)).map(_ + start))
// RNG.nonNegativeLessThan returns a Rand[Int], which is an alias for RNG => (Int, RNG).
// nonNegativeLessThan(b-a) gives a random int in [0, b-a).
// map(_+a) take that int to the interval [a, b), as desired.
// This is simpler than the official solution.
// Ex 8.5 Let's see what else we can implement using this representation of Gen .
// Try implementing unit, boolean, and listOfN.
def unit[A](a: => A): Gen[A] = Gen(State.unit[RNG,A](a)) // (Scala will infer the type of State.unit here.)
def integer: Gen[Int] = Gen(State(RNG.int))
def boolean: Gen[Boolean] = Gen(State(RNG.boolean))
def double: Gen[Double] = Gen(State(RNG.double))
// Generate a Gen[List[A]] by repeated use of the given generator `g`.
// Length of list is given by input parameter `n`
def listOfLength[A](n: Int, g: Gen[A]): Gen[List[A]]=
Gen(State.sequence(List.fill(n)(g.sample)))
// Here, List.fill(n)(g.sample) results in (g.sample, g.sample, ..., g.sample): List[State[S,A]].
// Ex 8.12 Implement a listOf combinator that doesn't accept an explicit length. It should return an
// SGen instead of a Gen. The implementation should generate lists of the requested size.
def listOf[A](g: Gen[A]): SGen[List[A]] = SGen(n => g.listOfLength(n))
// Ex 8.13 Define listOf1 for generating nonempty lists, and then update your
// specification of max to use this generator.
def nonEmptyListOf[A](g: Gen[A]): SGen[List[A]] = SGen(n => g.listOfLength(n max 1))
// Not the most efficient implementation, but it's simple.
// This generates ASCII strings.
def stringN(n: Int): Gen[String] =
listOfLength(n, choose(0,127)).map(_.map(_.toChar).mkString)
// This generates ASCII strings of random length.
def stringGenN(g: Gen[Int]): Gen[String] =
(choose(0,127).listOfGeneratedLength(g)).map(_.map(_.toChar).mkString)
// generates strings of varying length
val string: SGen[String] = SGen(stringN)
// p.131: If we can generate a single Int in some range, do we need a new primitive to
// generate an (Int,Int) pair in some range? Answer: No, we don't need new primitives.
// It's very easy to get a list of two random integers in the interval [a,b):
def intListOfTwoInts(a: Int, b: Int): Gen[List[Int]] = listOfLength[Int](2, choose(a,b))
// It's only slightly harder to get a pair of two random integers in the interval [a,b):
def intPair(a: Int, b: Int): Gen[(Int,Int)] =
Gen(listOfLength[Int](2, choose(a,b)).sample.map{ case List(x,y) => (x,y) })
// Generate triples of type A.
def triple[A](g: Gen[A]): Gen[(A,A,A)] =
Gen(listOfLength[A](3, g).sample.map{ case List(x,y,z) => (x,y,z) })
// p.131: Can we produce a Gen[Option[A]] from a Gen[A]? Answer: yes.
def genToOpt[A](g: Gen[A]): Gen[Option[A]] = Gen(g.sample.map[Option[A]](a => Some(a)))
// p.131: What about a Gen[A] from a Gen[Option[A]]?
// Answer: Yes, if we know what to do with None cases. Here's one possibility:
def genFromOpt[A](g: Gen[Option[A]]): Gen[A] =
Gen(g.sample.map[A]{
case None => sys.error("None")
case Some(a) => a
}
)
// Ex 8.7 Implement union, for combining two generators of the same type into one, by pulling
// values from each generator with equal likelihood.
def union_first_try[A](g1: Gen[A], g2: Gen[A]): Gen[A] =
Gen ( State ( rng => RNG.boolean(rng) match {
case (true, _) => g1.sample.run(rng)
case (false, _) => g2.sample.run(rng)
} ) )
// Actually, it's much easier than this. We simply want to return g1 (or g2) itself, depending
// on the value of a randomly generated boolean, so...
def union[A](g1: Gen[A], g2: Gen[A]): Gen[A] = boolean.flatMap[A](x => if(x) g1 else g2)
// Ex 8.8 Implement weighted, a version of union that accepts a weight for each Gen and
// generates values from each Gen with probability proportional to its weight.
def weighted[A](t1: (Gen[A],Double), t2: (Gen[A],Double)): Gen[A] =
double.flatMap[A]( x => if( x < t1._2 ) t1._1 else t2._1 )
///////////////////////////////////////////////////////
// //
// How to Generate Functions at Random //
// //
///////////////////////////////////////////////////////
/* Ex 8.18 Come up with some other properties that takeWhile should satisfy.
* Can you think of a property expressing the relationship between takeWhile and dropWhile?
* Solution:
* First, let's record the example given in the book:
* Ex 1. For all s: List[A] and all f: A => Boolean, the following evaluates to true:
* s.takeWhile(f).forall(f)
* Here's my example:
* Ex 2. For all s: List[A] and all f: A => Boolean, the following evaluates to true:
* s.takeWhile(f) append s.dropWhile(f) == s
*/
// We could take the approach of only examining particular arguments--i.e., specific
// higher-order functions. For instance, here's a more specific property for takeWhile:
val isEven = (i: Int) => i%2 == 0
val takeWhileProp =
Prop.forAll(Gen.listOf(Gen.integer))(ns => ns.takeWhile(isEven).forall(isEven))
/* This works, but is there a way we could let the testing framework handle generating
* functions to use with takeWhile? To make this concrete, let's suppose we have a Gen[Int]
* and would like to produce a Gen[String => Int]. What are some ways we could do that?
* Well, we could produce String => Int functions that simply ignore their input string and
* delegate to the underlying Gen[Int] :
*/
def genStringIntConstFn(g: Gen[Int]): Gen[String => Int] =
g map (i => (s => i))
/* This approach isn't sufficient though. We're simply generating constant functions that ignore
* their input. In the case of takeWhile, where we need a function that returns a Boolean, this
* will be a function that always returns true or always returns false---clearly not very
* interesting for testing the behavior of our function.
*/
/* Ex 8.19 (Hard) We want to generate a function that uses its argument in some way to select which
* Int to return. Can you think of a good way of expressing this? This is a very open-ended and
* challenging design exercise. See what you can discover about this problem and if there's a nice
* general solution that you can incorporate into the library we've developed so far.
*
* _Solution_ One very simple solution that's a little better than the constant function
* approach above would be to take as input a Gen[Int], as well as specific function mapping
* String to Int, and then use both of these inputs to randomly generate a function; e.g., we
* could multiply the return value of that function by the randomly generated Int, as follows:
*/
def genStringIntFn(g: Gen[Int])(f: String => Int): Gen[String => Int] =
g map (i => (s => i*f(s)))
// Here's an example use:
val fn: Gen[String => Int] = genStringIntFn(choose(-10,10))(_.length)
//(added type signature here just so Scala will complain if the type of fn is not as I expected.)
// Another approach suggested in the hint given in the textbook companion is similar.
// We could set the seed of the random int generator equal to the hashcode of the given string.
// Slightly more general, instead of using hashcode, specify the function you want here:
def h[A](a:A): Long = ???
def genFn[A,B](g: Gen[B]): Gen[A => B] = Gen {
State { (rng: RNG) =>
val (seed, rng2) = rng.nextInt
val f = (a:A) => g.sample.run(RNG.Simple(seed.toLong ^ h(a)))._1
(f, rng2)
}
}
// We could make a trait to abstract out the h function needed in the implementation above.
// This is simple-minded/easy-to-understand. We'll generalize it later.
trait Seeder[-A] {
def seedFn : A => Long
}
def genFn_from_seedFn[A,B](in: Seeder[A])(out: Gen[B]): Gen[A => B] = Gen {
State { (rng: RNG) =>
val (seed, rng2) = rng.nextInt
val f = (a:A) => out.sample.run(RNG.Simple(seed.toLong ^ in.seedFn(a)))._1
(f, rng2)
}
}
// Example use:
val seeder = new Seeder[String]{
def seedFn = (s:String) => s.hashCode.toLong
}
// This should give the same function generator as that discussed in the book companion.
def genStringFn[B](g: Gen[B]): Gen[String => B] = genFn_from_seedFn[String,B](seeder)(g)
// And finally, for the more general version,
trait CoGen[-A] {
def sample(a: A, rng: RNG): RNG
}
def fn[A,B](in: CoGen[A])(out: Gen[B]): Gen[A => B] = Gen {
State { (rng: RNG) =>
val (seed, rng2) = rng.nextInt
val f = (a: A) => out.sample.run(in.sample(a, rng))._1
(f,rng2) }
}
} //==== end: Gen companion object ======================================
case class SGen[+A](forSize: Int => Gen[A]){
def apply(n: Int): Gen[A] = forSize(n)
// Ex 8.11 SGen supports many of the same ops as Gen. Define some convenience functions on SGen that
// simply delegate to the corresponding functions on Gen.
def flatMap[B](f: A => SGen[B]): SGen[B] =
SGen(n => this.forSize(n).flatMap { x => f(x).forSize(n) })
// Not sure if flatMap is correct. Better check it.
}
/* Ex 8.1 To get used to thinking about testing in this way, come up with properties that
* specify the implementation of a sum: List[Int] => Int function. You don't have to write
* your properties down as executable ScalaCheck code--an informal description is fine.
* Here are some ideas to get you started:
* Reversing a list and summing it should give the same as summing the original list.
* What should the sum be if all elements of the list are the same value?
* Can you think of other properties?
*
* Possible Answers:
* - check commutative and associative laws?
* - summing a list of 0's should equal 0.
* - summing an n-element list where all elements are c should equal n*c.
* - sum(h::tail) should equal h + sum(tail)
* - sum(Nil) = sys.error?
* - sum(l1 append l2) = sum(l1) + sum(l2)
* - sum(l1 append (l2 append l3)) = sum(l1) + sum(l2) + sum(l3) = sum((l1 append l2) append l3)
* (This test wouldn't be necessary if we have already tested append such that the
* two arguments to sum that appear here give the same list.)
*/
/* Ex 8.2 What properties specify a function that finds the maximum of a List[Int]?
*
* Answer:
* l.forAll(x => (x <= maximum)) // every element of the list is bounded above by maximum
* && l.exists(x => (x == maximum)) // the maximum occurs in the list.
*
*/
/* Ex 8.3 Assuming the following representation of Prop, implement && as a method of Prop.
*
* trait Prop { def check: Boolean }
*
* Answer:
*
* trait Prop {
* def check: Boolean
* def &&(that: Prop): Prop = new Prop {
* def check = Prop.this.check && that.check
* }
*/
// =========vvvvvvvvvv MISCELLANEOUS NOTES AND COMMENTS vvvvvvvvvvv=========
/* Initially, the return type of a Prop was Option, with None returned in case of failure.
* This seemed reasonable, but it was refined so we could get a message indicating reason for
* failure and an int showing how many tests passed before the failure. The Either type seem
* appropriate for this purpose. Refining further, we decided that the number of tests to run should
* be passed in to the Prop type itself. Therefore, if all tests pass, there's no reason to report
* the number of successes, since it will equal the input parameter in all such cases. So, we are
* back to the Option type. But it's strange to use the Option type when we want None to denote
* success and Some((message, n)) to denote failure. We want the return type of Prop to clearly
* represent our intention and the Option type seems to be the opposite of our intention. So we give
* up on the built-in types and create our own that is the most appropriate return type for Prop.
* We name this type `Result.`
*/
/* Chapter 8 is clear up to Section 8.3, where the authors discuss test case minimization.
* They begin by referring to an abstract notion of size of a test case, and refer to the
* "smallest" or "simplest" test case. Then they introduce a new case class
*
* case class SGen[+A](forSize: Int => Gen[A])
*
* which has a function forSize that accepts a test "size" and produces a generator of tests of that size.
* The problem is that the authors never clarify what size really means.
*/
/* Old forAll code:
* Use randomStream to get random stream of A's, then zip that with a stream of ints to get a stream of
* pairs of type (A, Int). Then, take(n) from the stream of pairs apply the map which tests the predicate.
*
* def forAll[A](g: Int => Gen[A])(predicate: A => Boolean): Prop = Prop {
* (n, rng) => randomStream(g)(rng).zip(Stream.from(0)).take(n).map {
* case (a, i) => try {
* if (predicate(a)) Passed else Falsified(a.toString, i)
* } catch { case e: Exception => Falsified(buildMsg(a, e), i) }
* }.find(_.isFalsified).getOrElse(Passed)
* }
*
* def randomStream[A](g: Gen[A])(rng: RNG): Stream[A] = Stream.unfold(rng)(rng => Some(g.sample.run(rng)))
*/
/* It seems all these type aliases are obfuscating matters. Wouldn't it be easier if we simply let
* Gen wrap a function of type RNG => (A, RNG), rather than have it wrap a State that wraps a
* function of type RNG => (A, RNG)? The problem with this seemingly simpler solution is that it
* doesn't allow us to use the State class methods (like the map method we used above).
*/
/* **Sec 8.2.4 Generators that depend on generated values**
* Suppose we'd like a `Gen[(String,String)]` that generates pairs where the second string contains
* only characters from the first. Or that we had a `Gen[Int]` that chooses an integer between 0 and 11,
* and we'd like to make a `Gen[List[Double]]` that then generates lists of whatever length is chosen.
* In both of these cases there's a dependency--we generate a value, and then use that value to determine
* what generator to use next. For this we need `flatMap`, which lets one generator depend on another.
*/
/* Remarks on motivation for random generation of higher-order functions. As mentioned above,
* the purpose of forAll2 is to run forAll on generated A's *and* generated predicates.
* It might seem weird because, usually, we have a specific prediate in mind that
* we're trying to test on lots of randomly generated data. However, consider the example on
* page 142. To test the takeWhile function, we want to check that for every list
* ls: List[A], and for every predicate f: A => Boolean, the expression
* ls.takeWhile(f).forall(f)
* results in true. In this case, a random generator of higher-order functions, like
* f: A => Boolean, is precisely what we need.
*/
|
williamdemeo/fpinscala_wjd
|
exercises/src/main/scala/fpinscala/testing/Gen.scala
|
Scala
|
mit
| 25,465
|
/*
* Copyright (c) 2013-2014 Sanoma Oyj. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.sanoma.cda.geoip
import org.scalatest.FunSuite
import org.scalatest.prop.PropertyChecks
import org.scalatest.matchers.ShouldMatchers._
import com.sanoma.cda.geo._
class IpLocation_test extends FunSuite with PropertyChecks {
test("jDoubleOptionify") {
val jNull: java.lang.Double = null
val jNull_expected: Option[Double] = None
IpLocation.jDoubleOptionify(jNull) should be === jNull_expected
val jOk: java.lang.Double = 2.5
val jOk_expected: Option[Double] = Some(2.5)
IpLocation.jDoubleOptionify(jOk) should be === jOk_expected
}
test("combineLatLong") {
IpLocation.combineLatLong(None, None) should be === None
IpLocation.combineLatLong(Some(2.5), None) should be === None
IpLocation.combineLatLong(None, Some(3.6)) should be === None
IpLocation.combineLatLong(Some(2.5), Some(3.6)) should be === Some(Point(2.5, 3.6))
}
test("implicit conversions") {
def Point2List(p: Point): List[Double] = List(p.latitude, p.longitude)
def Tuple2List(p: (Double, Double)): List[Double] = List(p._1, p._2)
val p1 = Point(62,10)
val t2 = (62.0,10.0)
Point2List(p1) should be === Tuple2List(t2)
Point2List(t2) should be === Tuple2List(p1)
}
}
|
ornicar/maxmind-geoip2-scala
|
src/test/scala/com/sanoma/cda/geoip/IpLocation_test.scala
|
Scala
|
apache-2.0
| 1,920
|
val cfg = osc.UDP.Config()
cfg.localAddress = "192.168.0.77"
val t = osc.UDP.Transmitter(cfg)
t.connect()
t.localPort
t.isConnected
t.dump()
val tgt = new java.net.InetSocketAddress("192.168.0.25", 0x4C69)
t.send(osc.Message("/foo"), tgt)
t.send(osc.Message("/led", 0x080808, 0x080808, 0x080808, 0xFFFFFF), tgt)
t.send(osc.Message("/led", 0x080007, 0x080407, 0x080607, 0x00A0), tgt)
t.send(osc.Message("/led", 0xFF0000, 0x00FF00, 0x0000FF, 0x1F1F1F, 0x3F3F3F, 0x5F5F5F, 0x7F7F7F, 0x9F9F9F, 0xBFBFBF, 0xDFDFDF, 0xFFFFFF), tgt)
t.send(osc.Message("/led", 0,0,0,0,0,0,0,0,0,0,0), tgt)
t.send(osc.Message("/led", 0xDFDFDF, 0xFFFFFF), tgt)
var running = true
var delay = 10
val th = new Thread {
override def run(): Unit = {
val rnd = new util.Random
while (running) {
val rgb1 = rnd.nextInt & 0xFFFFFF
val rgb2 = rnd.nextInt & 0xFFFFFF
t.send(osc.Message("/led", rgb1, rgb2), tgt)
val d = math.max(1, math.min(1000, delay))
Thread.sleep(d)
}
}
}
t.dump(osc.Dump.Off)
th.start()
delay = 100
running = false
running = true
var power = 2.0
val th = new Thread {
override def run(): Unit = {
val rnd = new util.Random
while (running) {
@inline def mkRGB(): Int = {
val v = (rnd.nextDouble.pow(power) * 0xFF).toInt
val rgb = (v << 16) | (v << 8) | v
rgb
}
val rgb1 = mkRGB()
val rgb2 = mkRGB()
t.send(osc.Message("/led", rgb1, rgb2), tgt)
val d = math.max(1, math.min(1000, delay))
Thread.sleep(d)
}
}
}
th.start()
delay = 10
power = 4
power = 8
running = false
t.send(osc.Message("/led", 0,0), tgt)
|
Sciss/AnemoneActiniaria
|
notes/OscLightTest.scala
|
Scala
|
gpl-3.0
| 1,634
|
package code
package commons
import org.joda.time._
import java.util.Date
import java.util.Calendar
import java.util.GregorianCalendar
import java.util.TimeZone
import java.text.SimpleDateFormat
import net.liftweb.http.S
import scala.language.reflectiveCalls
/**
* Common time handling functions.
*
* @author David Csakvari
*/
object TimeUtils {
val TIME_FORMAT = "HH:mm"
val ISO_DATE_FORMAT = "yyyy-MM-dd"
val YEAR_FORMAT = "yyyy"
def format(format: String, time: Long) = {
new SimpleDateFormat(format).format(time).toString
}
def parse(format: String, data: String) = {
new SimpleDateFormat(format).parse(data)
}
def currentTime = new Date().getTime
def currentDayStartInMs: Long = currentDayStartInMs(0)
def getOffset(time: Long) = {
deltaInDays(new Date(dayStartInMs(time)), new Date(currentDayStartInMs))
}
def currentDayStartInMs(offsetInDays: Int): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.getTime().getTime
}
def currentDayEndInMs(offsetInDays: Int): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 23)
cal.set(Calendar.MINUTE, 59)
cal.set(Calendar.SECOND, 59)
cal.set(Calendar.MILLISECOND, 999)
cal.add(Calendar.DATE, offsetInDays)
cal.getTime().getTime
}
def dayStartInMs(time: Long): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date(time))
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.getTime().getTime
}
def dayEndInMs(time: Long): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date(time))
cal.set(Calendar.HOUR_OF_DAY, 23)
cal.set(Calendar.MINUTE, 59)
cal.set(Calendar.SECOND, 59)
cal.set(Calendar.MILLISECOND, 999)
cal.getTime().getTime
}
def getDeltaFrom(hour: Int, min: Int): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date())
val timeNow = cal.getTimeInMillis
cal.set(Calendar.HOUR_OF_DAY, hour)
cal.set(Calendar.MINUTE, min)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
timeNow - cal.getTimeInMillis
}
def getDeltaFrom(hour: Int, min: Int, offsetInDays: Int): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date())
val timeNow = cal.getTimeInMillis
cal.set(Calendar.HOUR_OF_DAY, hour)
cal.set(Calendar.MINUTE, min)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DAY_OF_YEAR, offsetInDays)
timeNow - cal.getTimeInMillis
}
def chopToMinute(time: Long): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date(time))
val timeNow = cal.getTimeInMillis
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.getTimeInMillis
}
def deltaInDays(from: Date, to: Date): Int = {
Days.daysBetween(new DateTime(from), new DateTime(to)).getDays()
}
def currentMonthStartInMs(offsetInDays: Int): Long = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.set(Calendar.DAY_OF_MONTH, 1)
cal.getTimeInMillis
}
def currentMonthStartInOffset(offsetInDays: Int): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.set(Calendar.DAY_OF_MONTH, 1)
deltaInDays(new Date(currentDayStartInMs(offsetInDays)), cal.getTime())
}
def currentMonthEndInOffset(offsetInDays: Int): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMaximum(Calendar.DAY_OF_MONTH))
deltaInDays(new Date(currentDayStartInMs(offsetInDays)), cal.getTime())
}
def currentMonth(offsetInDays: Int): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMaximum(Calendar.DAY_OF_MONTH))
cal.get(Calendar.MONTH)
}
def currentYear(offsetInDays: Int): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMaximum(Calendar.DAY_OF_MONTH))
cal.get(Calendar.YEAR)
}
def currentDayOfWeek(offsetInDays: Int): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.set(Calendar.HOUR_OF_DAY, 0)
cal.set(Calendar.MINUTE, 0)
cal.set(Calendar.SECOND, 0)
cal.set(Calendar.MILLISECOND, 0)
cal.add(Calendar.DATE, offsetInDays)
cal.get(Calendar.DAY_OF_WEEK)
}
def currentHour(): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.get(Calendar.HOUR_OF_DAY)
}
def currentMinute(): Int = {
val cal: Calendar = new GregorianCalendar
cal.setTime(new Date)
cal.get(Calendar.MINUTE)
}
def isWeekend(dt: DateTime): Boolean = {
(dt.getDayOfWeek() == DateTimeConstants.SATURDAY || dt.getDayOfWeek() == DateTimeConstants.SUNDAY)
}
def isWeekend(offsetInDays: Int): Boolean = {
isWeekend(new DateTime().withTime(0, 0, 0, 0).plusDays(offsetInDays))
}
def getLastDayOfMonth(dt: DateTime): Int = {
dt.dayOfMonth().withMaximumValue().getDayOfMonth();
}
def getPreviousMonthOffset: Int = {
val today: DateTime = new DateTime(currentTime).withDayOfMonth(1);
deltaInDays(today.toDate, today.minusMonths(1).toDate);
}
def offsetToDailyInterval(offset: Int): Interval = new Interval(currentDayStartInMs(offset), currentDayEndInMs(offset))
}
|
dodie/time-admin
|
src/main/scala/code/service/TimeUtils.scala
|
Scala
|
apache-2.0
| 6,656
|
package org.randi3.web.snippet
import scala.xml._
import scala.xml.NodeSeq
import org.randi3.web.lib.DependencyFactory
import net.liftweb.http.SHtml._
import net.liftweb.http._
import js.JsCmds.Replace
import net.liftweb.util.Helpers._
import scalaz.NonEmptyList
import org.randi3.model.criterion._
import constraint._
import org.randi3.web.util.{Utility, CurrentLoggedInUser, CurrentEDCTrial}
import collection.mutable.{HashMap, ListBuffer}
import collection.mutable
import net.liftweb.common.Empty
import org.randi3.randomization.RandomizationMethod
import org.randi3.model._
import org.apache.commons.math3.random.MersenneTwister
import org.joda.time.LocalDate
import scala.Left
import org.randi3.randomization.configuration.OrdinalConfigurationType
import org.randi3.randomization.configuration.IntegerConfigurationType
import org.randi3.randomization.configuration.BooleanConfigurationType
import scala.Some
import xml.Node
import scalaz._
import Scalaz._
import scala.Right
import org.randi3.randomization.configuration.DoubleConfigurationType
import org.randi3.web.model._
import org.randi3.edc.model.openClinica._
class EdcEditSnippet extends StatefulSnippet {
def dispatch: EdcEditSnippet#DispatchIt = {
case "add" => add _
case "edit" => edit _
}
private val armsTmp = new ListBuffer[TreatmentArmTmp]()
private val randomizationPluginManager = DependencyFactory.get.randomizationPluginManager
private val randomizationMethods = randomizationPluginManager.getPluginNames
private val randomizationMethodSelect = {
randomizationMethods map {
s => (s, s)
} toSeq
}
private var randomizationMethodTmp = generateEmptyRandomizationMethodConfig(randomizationMethods.head)
private val criterionsTmp = new ListBuffer[CriterionTmp]()
private var isStratifiedByTrialSite = false
private var treatmentCriterion: Option[Criterion[Any, Constraint[Any]]] = None
private var dataSetId = -1
private def add(nodeSeq: NodeSeq): NodeSeq = {
if (CurrentEDCTrial.isEmpty) S.redirectTo("/edcTrial/listRemote")
val trial = CurrentEDCTrial.get.get
CurrentEDCTrial.set(None)
clearFields
val selectedCriterions = new mutable.HashSet[Criterion[Any, Constraint[Any]]] ()
def save() {
Trial(
name = trial.identifier,
abbreviation = trial.identifier,
description = trial.description,
startDate = new LocalDate(),
endDate = new LocalDate(),
status = TrialStatus.IN_PREPARATION,
treatmentArms = createTreatmentArms(armsTmp),
criterions = createCriterionsList(criterionsTmp),
participatingSites = List(CurrentLoggedInUser.get.get.site),
randomizationMethod = None,
stages = Map(),
identificationCreationType = TrialSubjectIdentificationCreationType.EXTERNAL,
isTrialOpen = true,
isStratifiedByTrialSite = isStratifiedByTrialSite
).toEither match {
case Left(x) =>S.error("edcTrialMsg", x.toString)
case Right(newTrial) => {
//TODO Random Config
val randomMethod = randomizationPluginManager.getPlugin(randomizationMethodTmp.name).get.randomizationMethod(new MersenneTwister(), newTrial, randomizationMethodTmp.getConfigurationProperties).toOption.get
val trialWithMethod = newTrial.copy(randomizationMethod = Some(randomMethod))
val treatmentItem = if (treatmentCriterion.isDefined) trial.getMappedElementsFromCriteria(treatmentCriterion.get) else None
DependencyFactory.get.openClinicaService.createNewLocalTrial(trial.copy(trial = Some(trialWithMethod), treatmentItem = treatmentItem, connection = trial.connection.copy(dataSetId = dataSetId))).toEither match {
case Left(x) => S.error("edcTrialMsg", x)
case Right(trialOC) => S.redirectTo("/edcTrial/list")
}
}
}
}
def identifierField(failure: Boolean = false): Elem = {
val id = "identifier"
generateEntry(id, failure, {
<span id={id}>{trial.identifier}</span>
})
}
def nameField(failure: Boolean = false): Elem = {
val id = "name"
generateEntry(id, failure, {
<span id={id}>{trial.name}</span>
})
}
def descriptionField(failure: Boolean = false): Elem = {
val id = "description"
generateEntry(id, failure, {
<div id={id}>{trial.description}</div>
})
}
def dataSetIdField(failure: Boolean = false): Elem = {
val id = "dataSetId"
generateEntry(id, failure, {
ajaxText(dataSetId.toString, v => {
dataSetId = v.toInt
}, "id" -> id)
})
}
bind("edcTrial", nodeSeq,
"identifier" -> identifierField(),
"name" -> nameField(),
"description" -> descriptionField(),
"treatmentItem" -> generateTreatmentArmsSelect(nodeSeq, trial),
"treatmentArms" -> generateTreatmentArms(nodeSeq),
"items" -> <table class="randi2Table">
<thead>
<tr>
<th></th>
<th>Name</th>
<th>Description</th>
<th>Type</th>
</tr>
</thead>
<tfoot>
</tfoot>
<tbody>
{
trial.getAllCriteria().flatMap(criterion =>
<tr>
<td>{ajaxCheckbox(false, check => {
if (check){
selectedCriterions.add(criterion)
}else {
selectedCriterions.remove(criterion)
}
clearAndGenerateCriterionTmp(selectedCriterions.toList)
})}</td>
<td>{criterion.name}</td>
<td>{criterion.description}</td>
<td>{criterion.getClass.getSimpleName}</td>
<td>{criterion.toString}</td>
</tr>
)
}
</tbody>
</table>
,
"dataSetId" -> dataSetIdField(),
"randomizationMethodSelect" -> randomizationMethodSelectField,
"randomizationConfig" -> generateRandomizationConfigField,
"cancel" -> submit(S.?("cancel"), () => S.redirectTo("/"), "class" -> "btnCancel"),
"save" -> submit(S.?("save"), save _, "class" -> "btnSend")
)
}
private def generateTreatmentArmsSelect(nodeSeq: NodeSeq, trial: TrialOC): NodeSeq = {
val criterionSeq = trial.getAllCriteria().filter(criterion => criterion.isInstanceOf[OrdinalCriterion]).map(criteria => (criteria, criteria.name))
generateEntry("treatmentArmItem", false, {
ajaxSelectObj(criterionSeq, Empty, (criterion: Criterion[Any, Constraint[Any]]) => {
if (criterion.isInstanceOf[OrdinalCriterion]){
treatmentCriterion = Some(criterion)
armsTmp.clear()
criterion.asInstanceOf[OrdinalCriterion].values.foreach(
value => {
armsTmp.append(new TreatmentArmTmp(id = Int.MinValue, version = 0, name = value, description = criterion.description + ": " + value, plannedSize = 0))
})
}
Replace("treatmentArms", generateTreatmentArms(nodeSeq))
})
})
}
private def generateTreatmentArms(xhtml: NodeSeq): NodeSeq = {
<div id="treatmentArms">
{val result = new ListBuffer[Node]()
for (i <- armsTmp.indices) {
val arm = armsTmp(i)
result += <div class="singleField">
<fieldset>
<legend>Treatment arm
</legend>
<ul>
<li>
<label for={"armName" + i}>Name</label>{arm.name}
</li>
<li>
<label for={"armDescription" + i}>Description</label>{arm.description}
</li>
<li>
<label for={"armPlannedSize" + i}>Planned size</label>{ajaxText(arm.plannedSize.toString, (v) => arm.plannedSize = v.toInt, "id" -> ("armPlannedSize" + i)) /*TODO check toInt */}
</li>
</ul>
</fieldset>
</div>
}
NodeSeq fromSeq result}
</div>
}
def randomizationMethodSelectField: NodeSeq = {
ajaxSelect(randomizationMethodSelect, Empty, v => {
randomizationMethodTmp = generateEmptyRandomizationMethodConfig(v)
Replace("randomizationConfig", generateRandomizationConfigField)
})
}
private def edit(nodeSeq: NodeSeq): NodeSeq = {
<dev>add</dev>
}
private def clearFields {
}
def clearAndGenerateCriterionTmp(criteria: List[Criterion[Any, Constraint[Any]]]) {
criterionsTmp.clear()
criteria.foreach( criterion =>
if (criterion.isInstanceOf[OrdinalCriterion]) {
val values = new ListBuffer[String]()
criterion.asInstanceOf[OrdinalCriterion].values.foreach(s => values += s)
criterionsTmp += new CriterionTmp(criterion.id, criterion.version, "OrdinalCriterion", criterion.name, criterion.description, Some(values), getInclusionConstraintTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]), getStrataTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]))
} else if (criterion.isInstanceOf[DateCriterion])
criterionsTmp += new CriterionTmp(criterion.id, criterion.version, "DateCriterion", criterion.name, criterion.description, None, getInclusionConstraintTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]), getStrataTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]))
else if (criterion.isInstanceOf[IntegerCriterion])
criterionsTmp += new CriterionTmp(criterion.id, criterion.version, "IntegerCriterion", criterion.name, criterion.description, None, getInclusionConstraintTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]), getStrataTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]))
else if (criterion.isInstanceOf[DoubleCriterion])
criterionsTmp += new CriterionTmp(criterion.id, criterion.version, "DoubleCriterion", criterion.name, criterion.description, None, getInclusionConstraintTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]), getStrataTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]))
else if (criterion.isInstanceOf[FreeTextCriterion])
criterionsTmp += new CriterionTmp(criterion.id, criterion.version, "FreeTextCriterion", criterion.name, criterion.description, None, getInclusionConstraintTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]), getStrataTmp(criterion.asInstanceOf[Criterion[Any, Constraint[Any]]]))
)
}
private def generateEmptyRandomizationMethodConfig(randomizationMethodName: String): RandomizationMethodConfigTmp = {
val plugin = randomizationPluginManager.getPlugin(randomizationMethodName).get
val stages = plugin.randomizationConfigurationOptions()._2
val configurations = plugin.randomizationConfigurationOptions()._1
val methodConfigsTmp = configurations.map(config => {
if (config.getClass == classOf[BooleanConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[BooleanConfigurationType], true)
} else if (config.getClass == classOf[DoubleConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[DoubleConfigurationType], 0.0)
} else if (config.getClass == classOf[IntegerConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[IntegerConfigurationType], 0)
} else if (config.getClass == classOf[OrdinalConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[OrdinalConfigurationType], config.asInstanceOf[OrdinalConfigurationType].options.head)
}
})
new RandomizationMethodConfigTmp(name = plugin.name, i18nName = plugin.i18nName, description = plugin.description, canBeUsedWithStratification = plugin.canBeUsedWithStratification, configurationEntries = methodConfigsTmp.asInstanceOf[List[RandomizationMethodConfigEntryTmp[Any]]], stages = stages)
}
private def generateRandomizationMethodConfig(randomizationMethod: Option[RandomizationMethod]): RandomizationMethodConfigTmp = {
if (randomizationMethod.isEmpty) generateEmptyRandomizationMethodConfig(randomizationMethods.head)
else {
val method = randomizationMethod.get
val plugin = randomizationPluginManager.getPluginForMethod(method).get
val stages = plugin.randomizationConfigurationOptions()._2
val configurations = plugin.getRandomizationConfigurations(method.id)
val methodConfigsTmp = configurations.map(configProp => {
val config = configProp.configurationType
if (config.getClass == classOf[BooleanConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[BooleanConfigurationType], configProp.value)
} else if (config.getClass == classOf[DoubleConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[DoubleConfigurationType], configProp.value)
} else if (config.getClass == classOf[IntegerConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[IntegerConfigurationType], configProp.value)
} else if (config.getClass == classOf[OrdinalConfigurationType]) {
new RandomizationMethodConfigEntryTmp(config.asInstanceOf[OrdinalConfigurationType], configProp.value)
}
})
new RandomizationMethodConfigTmp(name = plugin.name, i18nName = plugin.i18nName, description = plugin.description, canBeUsedWithStratification = plugin.canBeUsedWithStratification, configurationEntries = methodConfigsTmp.asInstanceOf[List[RandomizationMethodConfigEntryTmp[Any]]], stages = stages)
}
}
private def generateRandomizationConfigField: Elem = {
<div id="randomizationConfig">
<fieldset>
<legend>General informations</legend>
<ul>
<li>
<label for="randomizationMethodName">Name:
</label>
<span id="randomizationMethodName">
{randomizationMethodTmp.name}
</span>
</li>
<li>
<label for="randomizationMethodDescription">Description:
</label>
<span id="randomizationMethodDescription">
{randomizationMethodTmp.description}
</span>
</li>
</ul>
</fieldset>{if (!randomizationMethodTmp.configurationEntries.isEmpty) {
<fieldset>
<legend>Configurations</legend>
<ul>
{randomizationMethodTmp.configurationEntries.flatMap(configuration => {
<li>
<label for={configuration.configurationType.name}>
{configuration.configurationType.name}
:
<span class="tooltip">
<img src="/images/icons/help16.png" alt={configuration.configurationType.description} title={configuration.configurationType.description}/> <span class="info">
{configuration.configurationType.description}
</span>
</span>
</label>{ajaxText(configuration.value.toString, v => {
//TODO check value
configuration.value = v
}, "id" -> configuration.configurationType.name)}
</li>
})}
</ul>
</fieldset>
} else <div></div>}{if (randomizationMethodTmp.canBeUsedWithStratification) {
val criterionList = criterionsTmp
<fieldset>
<legend>{S.?("trial.stratification")}</legend>
<ul>
<li>
<label for="trialSiteStratification" >{S.?("trial.trialSiteStratification")}:</label>
{ checkbox(isStratifiedByTrialSite, value => isStratifiedByTrialSite = value, "id" -> "trialSiteStratification")}
</li>
{val result = new ListBuffer[Node]()
for (i <- criterionList.indices) {
val criterion = criterionList(i)
result += generateStratumConfig("stratum-" + criterion.name.replace(' ', '_'), criterion)
}
NodeSeq fromSeq result}
</ul>
</fieldset>
} else <div></div>}
</div>
}
private def getInclusionConstraintTmp(crit: Criterion[Any, Constraint[Any]]): Option[ConstraintTmp] = {
if (crit.inclusionConstraint.isDefined) {
getConstraintTmp(crit.inclusionConstraint.get, crit)
} else None
}
private def getStrataTmp(crit: Criterion[Any, Constraint[Any]]): ListBuffer[ConstraintTmp] = {
val result = new ListBuffer[ConstraintTmp]()
crit.strata.foreach(constraint => {
val constrTmp = getConstraintTmp(constraint, crit)
if (constrTmp.isDefined) result.append(constrTmp.get)
})
result
}
private def getConstraintTmp(constraint: Constraint[Any], crit: Criterion[Any, Constraint[Any]]): Option[ConstraintTmp] = {
if (constraint.isInstanceOf[OrdinalConstraint]) {
val actConstraint = constraint.asInstanceOf[OrdinalConstraint]
val values = new mutable.HashSet[(Boolean, String)]()
actConstraint.expectedValues.foreach(element => {
values.add(true, element)
})
val actCriterion = crit.asInstanceOf[OrdinalCriterion]
actCriterion.values.foreach(value => {
if(!values.map(elem =>elem._2).contains(value))
values.add(false, value)
})
Some(new ConstraintTmp(id = actConstraint.id, version = actConstraint.version, ordinalValues = values))
} else if (constraint.isInstanceOf[IntegerConstraint]) {
val actConstraint = constraint.asInstanceOf[IntegerConstraint]
val firstValue = actConstraint.firstValue match {
case None => None
case Some(value) => Some(value.toString)
}
val secondValue = actConstraint.secondValue match {
case None => None
case Some(value) => Some(value.toString)
}
Some(new ConstraintTmp(id = actConstraint.id, version = actConstraint.version, minValue = firstValue, maxValue = secondValue))
} else if (constraint.isInstanceOf[DoubleConstraint]) {
val actConstraint = constraint.asInstanceOf[DoubleConstraint]
val firstValue = actConstraint.firstValue match {
case None => None
case Some(value) => Some(value.toString)
}
val secondValue = actConstraint.secondValue match {
case None => None
case Some(value) => Some(value.toString)
}
Some(new ConstraintTmp(id = actConstraint.id, version = actConstraint.version, minValue = firstValue, maxValue = secondValue))
} else if (constraint.isInstanceOf[DateConstraint]) {
val actConstraint = constraint.asInstanceOf[DateConstraint]
val firstValue = actConstraint.firstValue match {
case None => None
case Some(value) => Some(value.toString)
}
val secondValue = actConstraint.secondValue match {
case None => None
case Some(value) => Some(value.toString)
}
Some(new ConstraintTmp(id = actConstraint.id, version = actConstraint.version, minValue = firstValue, maxValue = secondValue))
} else if (constraint.isInstanceOf[FreeTextConstraintNotEmpty]) {
val actConstraint = constraint.asInstanceOf[FreeTextConstraintNotEmpty]
Some(new ConstraintTmp(id = actConstraint.id, version = actConstraint.version, minValue = None, maxValue = None))
} else None
}
private def generateStratumConfig(id: String, criterion: CriterionTmp): Elem = {
if(criterion.typ != "FreeTextCriterion"){
<div class="singleField" id={id}>
<fieldset>
<legend>
{criterion.typ}
</legend>
<ul>
<li>
<label>Name</label>{criterion.name}
</li>
<li>
<label>Description</label>{criterion.description}
</li>
</ul>
<div>
{ajaxButton("add stratum", () => {
val constraint = new ConstraintTmp()
if (criterion.typ == "OrdinalCriterion") {
constraint.ordinalValues.clear()
criterion.values.get.foreach(value => {
constraint.ordinalValues.add((false, value))
})
}
criterion.strata.append(constraint)
Replace(id, generateStratumConfig(id, criterion))
})}
{ajaxButton("remove stratum", () => {
criterion.strata.remove(criterion.strata.size-1)
Replace(id, generateStratumConfig(id, criterion))
})}
</div>{val result = new ListBuffer[Node]()
for (i <- criterion.strata.indices) {
val constraint = criterion.strata(i)
result += <div class="singleField">
{//TODO stratum configuration
generateStratumElement(id + i, criterion, constraint)}
</div>
}
NodeSeq fromSeq result}
</fieldset>
</div>
}else <div></div>
}
private def generateStratumElement(id: String, criterion: CriterionTmp, constraint: ConstraintTmp): Elem = {
<fieldset id={id} class="inclusionConstraint">
<legend>Constraint</legend>{if (criterion.typ != "OrdinalCriterion") {
<ul>
<li>
{ajaxCheckbox(constraint.minValue.isDefined, v => {
if (!v) {
constraint.minValue = None
} else {
constraint.minValue = Some("")
}
Replace(id, generateStratumElement(id, criterion, constraint))
}, "style" -> "width: 20px;")}
lower boundary?
{if (constraint.minValue.isDefined) {
ajaxText(constraint.minValue.get, v => {
constraint.minValue = Some(v)
})
}}
</li>
<li>
{ajaxCheckbox(constraint.maxValue.isDefined, v => {
if (!v) {
constraint.maxValue = None
} else {
constraint.maxValue = Some("")
}
Replace(id, generateStratumElement(id, criterion, constraint))
}, "style" -> "width: 20px;")}
upper boundary?
{if (constraint.maxValue.isDefined) {
ajaxText(constraint.maxValue.get, v => {
constraint.maxValue = Some(v)
})
}}
</li>
</ul>
} else {
val ordinalValues = constraint.ordinalValues
ordinalValues.toList.sortWith((elem1, elem2) => elem1._2.compareTo(elem2._2) < 0).flatMap(value => {
<div>
{ajaxCheckbox(value._1, v => {
ordinalValues.remove(value)
ordinalValues.add((v, value._2))
Replace(id, generateStratumElement(id, criterion, constraint))
})}<span>
{value._2}
</span>
</div>
})
}}
</fieldset>
}
private def generateEntry(id: String, failure: Boolean, element: Elem): Elem = {
<li id={id + "Li"} class={if (failure) "errorHint" else ""}>
<label for={id}>
{id}
</label>{element}<lift:msg id={id + "Msg"} errorClass="err"/>
</li>
}
private def showErrorMessage(id: String, errors: NonEmptyList[String]) {
S.error(id + "Msg", "<-" + errors.list.reduce((acc, el) => acc + ", " + el))
}
private def clearErrorMessage(id: String) {
S.error(id + "Msg", "")
}
//TODO Refactor duplicated code
private def createTreatmentArms(arms: ListBuffer[TreatmentArmTmp]): List[TreatmentArm] = {
val result = ListBuffer[TreatmentArm]()
arms.foreach(arm =>
TreatmentArm(id = arm.id, version = arm.version, name = arm.name, description = arm.description, plannedSize = arm.plannedSize).toEither match {
case Left(x) => S.error(x.toString()) //TODO error handling
case Right(treatmentArm) => result += treatmentArm
}
)
result.toList
}
private def createCriterionsList(criterions: ListBuffer[CriterionTmp]): List[Criterion[Any, Constraint[Any]]] = {
val result = ListBuffer[Criterion[Any, Constraint[Any]]]()
criterions.foreach(criterionTmp => (criterionTmp.typ match {
case "DateCriterion" => DateCriterion(id = criterionTmp.id, version = criterionTmp.version, name = criterionTmp.name, description = criterionTmp.description, inclusionConstraint = createInclusionConstraint(criterionTmp), strata = createStrata(criterionTmp))
case "IntegerCriterion" => IntegerCriterion(id = criterionTmp.id, version = criterionTmp.version, name = criterionTmp.name, description = criterionTmp.description, inclusionConstraint = createInclusionConstraint(criterionTmp), strata = createStrata(criterionTmp))
case "DoubleCriterion" => DoubleCriterion(id = criterionTmp.id, version = criterionTmp.version, name = criterionTmp.name, description = criterionTmp.description, inclusionConstraint = createInclusionConstraint(criterionTmp), strata = createStrata(criterionTmp))
case "FreeTextCriterion" => FreeTextCriterion(id = criterionTmp.id, version = criterionTmp.version, name = criterionTmp.name, description = criterionTmp.description, inclusionConstraint = createInclusionConstraint(criterionTmp), strata = createStrata(criterionTmp))
case "OrdinalCriterion" => OrdinalCriterion(id = criterionTmp.id, version = criterionTmp.version, name = criterionTmp.name, description = criterionTmp.description, values = criterionTmp.values.get.toSet, inclusionConstraint = createInclusionConstraint(criterionTmp), strata = createStrata(criterionTmp))
}).asInstanceOf[ValidationNel[String, Criterion[Any, Constraint[Any]]]].toEither match {
case Left(x) => S.error(x.toString()) //TODO error handling
case Right(criterion) => result += criterion
}
)
result.toList
}
private def createInclusionConstraint[T](criterionTmp: CriterionTmp): Option[T] = {
if (criterionTmp.inclusionConstraint.isDefined) {
createConstraint(criterionTmp, criterionTmp.inclusionConstraint.get)
} else {
None
}
}
private def createStrata[T <: Constraint[Any]](criterionTmp: CriterionTmp): List[T] = {
val list: List[T] = criterionTmp.strata.toList.
map(constraintTmp => createConstraint(criterionTmp, constraintTmp).asInstanceOf[Option[T]]).
filter(elem => elem.isDefined).map(elem => elem.get)
list
}
private def createConstraint[T](criterionTmp: CriterionTmp, constraint: ConstraintTmp): Option[T] = {
criterionTmp.typ match {
case "DateCriterion" => {
val min = if (constraint.minValue.isDefined) Some(new LocalDate(Utility.slashDate.parse(constraint.minValue.get).getTime)) else None
val max = if (constraint.maxValue.isDefined) Some(new LocalDate(Utility.slashDate.parse(constraint.maxValue.get).getTime)) else None
Some(DateConstraint(constraint.id, constraint.version, List(min, max)).toOption.get.asInstanceOf[T])
}
case "IntegerCriterion" => {
val min = if (constraint.minValue.isDefined) Some(constraint.minValue.get.toInt) else None
val max = if (constraint.maxValue.isDefined) Some(constraint.maxValue.get.toInt) else None
Some(IntegerConstraint(constraint.id, constraint.version, List(min, max)).toOption.get.asInstanceOf[T])
}
case "DoubleCriterion" => {
val min = if (constraint.minValue.isDefined) Some(constraint.minValue.get.toDouble) else None
val max = if (constraint.maxValue.isDefined) Some(constraint.maxValue.get.toDouble) else None
Some(DoubleConstraint(constraint.id, constraint.version, List(min, max)).toOption.get.asInstanceOf[T])
}
case "FreeTextCriterion" => {
Some(FreeTextConstraintNotEmpty(constraint.id, constraint.version).toOption.get.asInstanceOf[T])
}
case "OrdinalCriterion" => {
Some(OrdinalConstraint(constraint.id, constraint.version, constraint.ordinalValues.toList.filter(entry => entry._1).map(entry => Some(entry._2))).toOption.get.asInstanceOf[T])
}
case _ => None
}
}
}
|
dschrimpf/randi3-web
|
src/main/scala/org/randi3/web/snippet/EdcEditSnippet.scala
|
Scala
|
gpl-3.0
| 27,645
|
package dbtarzan.gui
import akka.actor.ActorRef
import dbtarzan.db._
import dbtarzan.gui.browsingtable._
import dbtarzan.gui.info.{ColumnsTable, IndexesInfo, Info, QueryInfo}
import dbtarzan.gui.orderby.OrderByEditorStarter
import dbtarzan.gui.tabletabs.TTableForMapWithId
import dbtarzan.gui.util.JFXUtil
import dbtarzan.localization.Localization
import dbtarzan.messages._
import scalafx.Includes._
import scalafx.event.ActionEvent
import scalafx.scene.Parent
import scalafx.scene.control.{Menu, MenuBar, MenuItem}
import scalafx.scene.layout.BorderPane
import scalafx.stage.Stage
/* table + constraint input box + foreign keys */
class BrowsingTable(dbActor : ActorRef, guiActor : ActorRef, structure : DBTableStructure, queryId : QueryId, localization: Localization)
extends TControlBuilder with TTableForMapWithId {
private val log = new Logger(guiActor)
private val foreignKeyList = new ForeignKeyList(log)
private val foreignKeyListWithTitle = JFXUtil.withTitle(foreignKeyList.control, localization.foreignKeys)
private val columnsTable = new ColumnsTable(structure.columns, guiActor, localization)
private val queryInfo = new QueryInfo(SqlBuilder.buildSql(structure), localization)
private val indexInfo = new IndexesInfo(guiActor, localization)
private val info = new Info(columnsTable, queryInfo, indexInfo, localization, () => {
dbActor ! QueryIndexes(queryId)
})
private val dbTable = new DBTable(structure)
private val table = new Table(dbActor, guiActor, queryId, dbTable, localization)
private val foreignKeysInfoSplitter = new ForeignKeysInfoSplitter(foreignKeyListWithTitle, info)
private val splitter = new BrowsingTableSplitter(table, foreignKeysInfoSplitter)
private var useNewTable : (DBTableStructure, Boolean) => Unit = (table, closeCurrentTab) => {}
private var rowDetailsView : Option[RowDetailsView] = None
private val rowDetailsApplicant = new RowDetailsApplicant(structure)
private val queryText = new QueryText(structure.columns) {
onEnter((text, closeCurrentTab) => {
val tableWithFilters = dbTable.withAdditionalFilter(Filter(text))
useNewTable(tableWithFilters, closeCurrentTab)
})
}
table.setRowClickListener(row => openRowDisplay(row))
table.setRowDoubleClickListener(row => {
switchRowDetailsView()
openRowDisplay(row)
})
splitter.splitPanelWithoutRowDetailsView()
private val progressBar = new TableProgressBar(removeProgressBar)
private val layout = new BorderPane {
top = buildTop()
center = splitter.control
bottom = progressBar.control
}
foreignKeyList.onForeignKeySelected(openTableConnectedByForeignKey)
private def openRowDisplay(row: Row): Unit = {
rowDetailsView.foreach(details => {
if (noMaxFieldSize())
details.displayRow(row)
else
requestRowToDisplayInDetailsView(row, details)
})
}
private def requestRowToDisplayInDetailsView(row: Row, details: RowDetailsView): Unit = {
val query = rowDetailsApplicant.buildRowQueryFromRow(row)
query match {
case Some(rowStructure) => dbActor ! QueryOneRow(queryId, rowStructure)
case None => {
log.warning(localization.warningNoPrimaryKeyInTable(structure.description.name))
details.displayRow(row)
}
}
}
private def noMaxFieldSize(): Boolean =
structure.attributes.maxFieldSize.isEmpty
def orderByField(field : Field) : Unit = {
val orderByFields = OrderByFields(List(OrderByField(field, OrderByDirection.ASC)))
val newStructure = dbTable.withOrderByFields(orderByFields)
useNewTable(newStructure, false)
}
def startOrderByEditor() : Unit = {
OrderByEditorStarter.openOrderByEditor(stage(), dbTable, useNewTable, localization)
}
private def removeProgressBar() : Unit =
layout.bottom = null
private def stage() : Stage =
new Stage(layout.scene.window().asInstanceOf[javafx.stage.Stage])
private def buildOrderByMenu() = new Menu(localization.orderBy) {
items = dbTable.fields.map(f =>
new MenuItem(f.name) {
onAction = { e: ActionEvent => guiActor ! RequestOrderByField(queryId, f) }
}) :+ new MenuItem(localization.more) {
onAction = { e: ActionEvent => guiActor ! RequestOrderByEditor(queryId) }
}
}
private def openTableConnectedByForeignKey(key : ForeignKey, closeCurrentTab : Boolean) : Unit = {
log.debug("Selected "+key)
if(closeCurrentTab)
guiActor ! RequestRemovalThisTab(queryId)
val checkedRows = table.getCheckedRows
val foreignTableId = TableId(queryId.tableId.databaseId, key.to.table)
if(checkedRows.nonEmpty) {
dbActor ! QueryColumnsFollow(foreignTableId, FollowKey(dbTable.fields, key, checkedRows))
} else {
dbActor ! QueryColumns(foreignTableId)
log.warning(localization.noRowsFromForeignKey(key.name, key.to.table))
}
}
private def buildTop() : BorderPane = new BorderPane {
stylesheets += "orderByMenuBar.css"
left = TableMenu.buildMainMenu(guiActor, queryId, localization)
center = JFXUtil.withLeftTitle(queryText.textBox, localization.where+":")
right =new MenuBar {
menus = List(buildOrderByMenu())
stylesheets += "orderByMenuBar.css"
}
}
def switchRowDetailsView() : Unit = {
rowDetailsView match {
case None =>
showRowDetailsView()
case Some(_) => {
splitter.splitPanelWithoutRowDetailsView()
rowDetailsView = None
}
}
}
private def showRowDetailsView(): Unit = {
table.selectOneIfNoneSelected()
table.firstSelectedRow().foreach(row => {
val view = new RowDetailsView(dbTable)
splitter.splitPanelWithRowDetailsView(view)
rowDetailsView = Some(view)
openRowDisplay(row)
})
}
/* if someone enters a query in the text box on the top of the table it creates a new table that depends by this query */
def onNewTable(useTable : (DBTableStructure, Boolean) => Unit) : Unit = {
useNewTable = useTable
}
/* adds the following rows to the table */
def addRows(rows : ResponseRows) : Unit = {
table.addRows(rows.rows)
progressBar.receivedRows()
}
def addOneRow(oneRow: ResponseOneRow): Unit =
rowDetailsView.foreach(details => details.displayRow(oneRow.row))
def rowsError(ex : Exception) : Unit = queryText.showError()
/* adds the foreign keys to the foreign key list */
def addForeignKeys(keys : ResponseForeignKeys) : Unit = {
foreignKeyList.addForeignKeys(keys.keys)
table.addForeignKeys(keys.keys)
progressBar.receivedForeignKeys()
}
/* adds the foreign keys to the foreign key list */
def addPrimaryKeys(keys : ResponsePrimaryKeys) : Unit = {
table.addPrimaryKeys(keys.keys)
progressBar.receivedPrimaryKeys()
rowDetailsApplicant.addPrimaryKeys(keys.keys)
}
def addIndexes(indexes: ResponseIndexes): Unit =
indexInfo.addRows(indexes.indexes.indexes)
def copySelectionToClipboard(includeHeaders : Boolean) : Unit =
table.copySelectionToClipboard(includeHeaders)
def checkAllTableRows() : Unit =
table.checkAll(true)
def checkNoTableRows() : Unit =
table.checkAll(false)
def getId : QueryId = queryId
def rowsNumber: Int = table.rowsNumber
def control : Parent = layout
}
|
aferrandi/dbtarzan
|
src/main/scala/dbtarzan/gui/BrowsingTable.scala
|
Scala
|
apache-2.0
| 7,337
|
package controllers
import play.api._
import play.api.i18n.Messages
import play.api.mvc._
object Application extends Controller {
def index = Action { implicit request =>
request.session.get("userId") match {
case None => Ok(views.html.index())
case Some(userId) =>
val userSession = request.session + ("userId" -> userId.toString)
Ok(views.html.index(request.session.get(userId).getOrElse("")))
}
}
def home = Action { implicit request =>
request.session.get("userId") match {
case None => Redirect(routes.Application.index)
case Some(userId) =>
val userSession = request.session + ("userId" -> userId.toString)
Ok(views.html.index(request.session.get(userId).getOrElse("")))
}
}
/**
* Redirect To Login Page When Login Failed Via Social Networks
*/
def loginFailureViaSocialNetworks: Action[play.api.mvc.AnyContent] = Action { implicit request =>
Redirect("/user/signIn").flashing("error" -> Messages("error"))
}
/**
* JScript Routes
*/
def javascriptRoutes = Action { implicit request =>
Ok(
Routes.javascriptRouter("jsRoutes")(
routes.javascript.UserController.sendMailOnForgotPassword
)).as("text/javascript")
}
}
|
knoldus/Play-Starter-Template
|
app/controllers/Application.scala
|
Scala
|
apache-2.0
| 1,265
|
package justin.httpapi
import java.util.UUID
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.testkit.ScalatestRouteTest
import justin.db.client.{ActorRefStorageNodeClient, GetValueResponse, WriteValueResponse}
import justin.db.Data
import justin.db.actors.StorageNodeActorRef
import justin.db.replica.{R, W}
import justin.httpapi.HttpRouter.PutValue
import org.scalatest.{FlatSpec, Matchers}
import spray.json.{JsString, _}
import scala.concurrent.Future
class HttpRouterTest extends FlatSpec with Matchers with ScalatestRouteTest {
behavior of "Http Router"
/**
* GET part
*/
it should "get \\"OK\\" http code for successful read result" in {
val id = UUID.randomUUID()
val data = Data(id, "value")
val r = 1
val router = new HttpRouter(getFound(data))
Get(s"/get?id=${id.toString}&r=$r") ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.OK
responseAs[String].parseJson shouldBe JsObject("value" -> JsString(data.value))
header[VectorClockHeader] shouldBe Some(VectorClockHeader(data.vclock))
}
}
it should "get \\"NotFound\\" http code for missed searched data" in {
val value = "value"
val id = UUID.randomUUID().toString
val r = 1
val router = new HttpRouter(notFound(value))
Get(s"/get?id=$id&r=$r") ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.NotFound
responseAs[String].parseJson shouldBe JsObject("value" -> JsString(s"Couldn't found value with id $id"))
header[VectorClockHeader] shouldBe None
}
}
it should "get \\"BadRequest\\" http code for unsuccessful read result" in {
val value = "value"
val id = UUID.randomUUID().toString
val r = 1
val error = "Bad Request"
val router = new HttpRouter(badRequest(error))
Get(s"/get?id=$id&r=$r") ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.BadRequest
responseAs[String].parseJson shouldBe JsObject("value" -> JsString(error))
header[VectorClockHeader] shouldBe None
}
}
it should "get \\"InternalServerError\\" http code for unsuccessful read result" in {
val value = "value"
val id = UUID.randomUUID().toString
val r = 1
val error = "Internal Server Error"
val router = new HttpRouter(internalServerError(error))
Get(s"/get?id=$id&r=$r") ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.InternalServerError
responseAs[String].parseJson shouldBe JsObject("value" -> JsString(error))
header[VectorClockHeader] shouldBe None
}
}
it should "get \\"MultipleChoices\\" http code for conflicted read result" in {
val id = UUID.randomUUID()
val data1 = Data(id, "value-1")
val data2 = data1.copy(value = "value-2")
val r = 1
val router = new HttpRouter(mutlipleChoices(List(data1, data2)))
Get(s"/get?id=${id.toString}&r=$r") ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.MultipleChoices
header[VectorClockHeader] shouldBe None
responseAs[String].parseJson shouldBe JsArray(
JsObject("id" -> JsString(id.toString), "value" -> JsString("value-1"), "vclock" -> JsString("W10=")),
JsObject("id" -> JsString(id.toString), "value" -> JsString("value-2"), "vclock" -> JsString("W10="))
)
}
}
private def getFound(data: Data) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def get(id: UUID, r: R): Future[GetValueResponse] = Future.successful(GetValueResponse.Found(data))
}
private def notFound(value: String) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def get(id: UUID, r: R): Future[GetValueResponse] = Future.successful(GetValueResponse.NotFound(id))
}
private def badRequest(error: String) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def get(id: UUID, r: R): Future[GetValueResponse] = Future.successful(GetValueResponse.Failure(error))
}
private def mutlipleChoices(conflicts: List[Data]) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def get(id: UUID, r: R): Future[GetValueResponse] = Future.successful(GetValueResponse.Conflicts(conflicts))
}
private def internalServerError(error: String) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def get(id: UUID, r: R): Future[GetValueResponse] = Future.failed(new Exception(error))
}
/**
* PUT part
*/
it should "get \\"NoContent\\" http code for successful write result" in {
val putValue = PutValue(id = UUID.randomUUID(), value = "value", w = 3)
val router = new HttpRouter(successfulWrite(putValue))
Post("/put", putValue) ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.NoContent
}
}
it should "get \\"BadRequest\\" http code for unsuccessful write result" in {
val putValue = PutValue(id = UUID.randomUUID(), value = "value", w = 3)
val error = "unsuccessfully written data"
val router = new HttpRouter(unsuccessfulWrite(error))
Post("/put", putValue) ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.BadRequest
responseAs[String].parseJson shouldBe JsObject("value" -> JsString(error))
}
}
it should "get \\"MultipleChoices\\" http code for conflicted write result" in {
val putValue = PutValue(id = UUID.randomUUID(), value = "value", w = 3)
val router = new HttpRouter(conclictedWrite)
Post("/put", putValue) ~> Route.seal(router.routes) ~> check {
status shouldBe StatusCodes.MultipleChoices
responseAs[String].parseJson shouldBe JsObject("value" -> JsString("Multiple Choices"))
}
}
private def successfulWrite(putValue: PutValue) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def write(data: Data, w: W): Future[WriteValueResponse] = Future.successful(WriteValueResponse.Success(data.id))
}
private def unsuccessfulWrite(error: String) = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def write(data: Data, w: W): Future[WriteValueResponse] = Future.successful(WriteValueResponse.Failure(error))
}
private def conclictedWrite = new ActorRefStorageNodeClient(StorageNodeActorRef(null)) {
override def write(data: Data, w: W): Future[WriteValueResponse] = Future.successful(WriteValueResponse.Conflict)
}
}
|
speedcom/JustinDB
|
justin-http-api/src/test/scala/justin/httpapi/HttpRouterTest.scala
|
Scala
|
apache-2.0
| 6,744
|
package com.ignition.script
import scala.xml.{ Elem, Node }
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{ DataType, StructType }
import org.json4s.{ JString, JValue, jvalue2monadic }
import com.ignition.types.TypeUtils
/**
* Row expression processor. It evaluates a data row and returns the result.
*
* @author Vlad Orzhekhovskiy
*/
trait RowExpression[T <: DataType] extends Serializable {
/**
* The static return type of the expression. If None, this means that it cannot be
* determined statically.
*/
def targetType: Option[T]
/**
* Computes the dynamic return type of the expression.
*/
def computeTargetType(schema: StructType) = evaluate(schema) _ andThen TypeUtils.typeForValue
/**
* Evaluates the data row and computes the result.
*/
def evaluate(schema: StructType)(row: Row): Any
/**
* Converts the expression into XML.
*/
def toXml: Elem
/**
* Converts the expression into JSON.
*/
def toJson: JValue
}
/**
* Row Expression companion object.
*/
object RowExpression {
def fromXml(xml: Node) = xml match {
case <xpath>{ _* }</xpath> => XPathExpression.fromXml(xml)
case <json>{ _* }</json> => JsonPathExpression.fromXml(xml)
case <mvel>{ _* }</mvel> => MvelExpression.fromXml(xml)
}
def fromJson(json: JValue) = json \\ "type" match {
case JString("xpath") => XPathExpression.fromJson(json)
case JString("mvel") => MvelExpression.fromJson(json)
case JString("json") => JsonPathExpression.fromJson(json)
case x @ _ => throw new IllegalArgumentException(s"Unknown expression type: $x")
}
}
|
uralian/ignition
|
src/main/scala/com/ignition/script/RowExpression.scala
|
Scala
|
apache-2.0
| 1,625
|
// -*- mode: Scala;-*-
// Filename: JSONAMQPDispatcher.scala
// Authors: lgm
// Creation: Tue May 26 06:40:08 2009
// Copyright: Not supplied
// Description:
// ------------------------------------------------------------------------
package net.liftweb.amqp
//import _root_.com.eaio.uuid.UUID
import _root_.com.rabbitmq.client._
import _root_.scala.actors.Actor
import _root_.scala.collection.mutable.Stack
import _root_.java.io.ObjectInputStream
import _root_.java.io.ByteArrayInputStream
import _root_.java.util.Timer
import _root_.java.util.TimerTask
import java.util.UUID
import _root_.com.thoughtworks.xstream.XStream
import _root_.com.thoughtworks.xstream.io.json.JettisonMappedXmlDriver
import javax.persistence.EntityManager
import javax.persistence.EntityManagerFactory
import javax.persistence.Persistence
import com.biosimilarity.lift.lib.amqp.RabbitFactory
//import org.apache.log4j.Logger
//import org.apache.log4j.PropertyConfigurator
import java.util.Properties
import java.io.FileInputStream
import java.io.IOException
class JSONAMQPDispatcher[T](
factory: ConnectionFactory,
host: String,
port: Int
) extends AMQPDispatcher[T](RabbitFactory.getConnection(factory, host, port)) {
override def configure(channel: Channel) {
// Set up the exchange and queue
channel.exchangeDeclare("mult", "direct")
channel.queueDeclare("mult_queue", true, false, false, null);
channel.queueBind("mult_queue", "mult", "routeroute")
// Use the short version of the basicConsume method for convenience.
channel.basicConsume("mult_queue", false, new SerializedConsumer(channel, this))
}
}
trait JSONHandler {
def handle( contents: String ) : Unit = {
new XStream( new JettisonMappedXmlDriver() ).fromXML( contents )
}
}
trait JSONToScalaHandler {
def handle( dummy : String )( contents: String ) : Unit = {
new XStream( new JettisonMappedXmlDriver() ).fromXML( contents )
}
}
trait JSONToSQLHandler {
self : IdSupplier =>
var _objectCaching : Boolean = true
def objectCachingOn : Boolean = _objectCaching
def turnObjectCachingOn : Unit = {
_objectCaching = true
}
def turnObjectCachingOff : Unit = {
_objectCaching = false
}
var _incoming : Option[Stack[java.lang.Object]] = None
def inComing : Stack[java.lang.Object] = {
_incoming match {
case Some( inC ) => inC
case None => {
val inC = new Stack[java.lang.Object] ()
_incoming = Some( inC )
inC
}
}
}
def recent : Option[java.lang.Object] = {
if ( inComing.isEmpty ) {
None
}
else {
Some( inComing.pop )
}
}
def acquire( obj : java.lang.Object ) = {
inComing.push( obj )
}
var _emf : Option[EntityManagerFactory] = None
def entityMgrFactory( db : String ) : EntityManagerFactory = {
_emf match {
case Some( emf ) => emf
case None => {
val emf = Persistence.createEntityManagerFactory( db )
_emf = Some( emf )
emf
}
}
}
var _em : Option[EntityManager] = None
def entityManager( db : String ) : EntityManager = {
_em match {
case Some( em ) => em
case None => {
val em = entityMgrFactory( db ).createEntityManager()
_em = Some( em )
em
}
}
}
def handle( db : String )( contents: String ) : Unit = {
var obj : java.lang.Object = null;
try {
obj =
new XStream(
new JettisonMappedXmlDriver()
).fromXML(
contents.replace(
"Absyn", "Absyn.persistence.sql"
)
);
if ( objectCachingOn ) {
acquire( obj );
}
//generateIds( obj );
try {
entityManager( db ).getTransaction().begin();
entityManager( db ).persist( obj );
entityManager( db ).getTransaction().commit();
}
catch {
case e : Throwable => {
println( "persistence error attempting to store " + obj )
e.printStackTrace
}
}
}
catch {
case e : Throwable => {
println( "marshaling error" )
e.printStackTrace
}
}
}
}
class JSONAMQPListener( host : String ) {
val LOG_PROPERTIES_FILE : String =
"src/main/resources/Log4J.properties";
val factory = new ConnectionFactory()
factory.setUsername("guest")
factory.setPassword("guest")
factory.setVirtualHost("/")
factory.setRequestedHeartbeat(0)
val amqp =
new JSONAMQPDispatcher[String](
factory,
//"localhost",
host,
5672
)
def configureLogging() {
// val logProperties : Properties = new Properties();
// val log : Logger =
// Logger.getLogger(classOf[JSONAMQPListener]);
// try {
// logProperties.load(new FileInputStream(LOG_PROPERTIES_FILE));
// PropertyConfigurator.configure(logProperties);
// log.info("Logging initialized.");
// }
// catch {
// case e => e.printStackTrace
// }
}
def testHandle = {
val jal = new net.liftweb.amqp.JSONAMQPListener( "localhost" )
jal.amqp ! net.liftweb.amqp.AMQPReconnect( 4 )
jal.jsonListener.handle( "rlambda_production" )(
new com.thoughtworks.xstream.XStream(
new com.thoughtworks.xstream.io.json.JettisonMappedXmlDriver
).toXML(
//(new com.biosimilarity.reflection.model.REPL).read( "lambda x.x" )
"fix me"
)
)
}
amqp.start
// JSON Listener
class JSONListener(
logging : Boolean,
rcrs : Boolean,
fOUT : Boolean
)
extends Actor
//with JSONToSQLHandler
with JSONToScalaHandler
with IdSupplier {
// ID Generation
override def recurse() = rcrs
override def hasDepth( pojo : java.lang.Object ) : Boolean = {
altHasDepth( pojo )
}
override def failOnUnknownType() = fOUT
override def stdAction() : Action = {
( subject : ActedOn ) => {
if (reallyHasSetId( subject ))
reallyCallSetId(
subject,
getNextId()
);
for (field <- subject.getClass.getDeclaredFields
if (field.getName.contains( "idSuper" )
|| field.getName.contains( "uuid" )))
yield {
// reflectively break java access control mechanisms
val accessible = field.isAccessible;
field.setAccessible( true );
field.set( subject, getNextId() )
// put java access mechanisms back in place
field.setAccessible( accessible );
}
}
}
override def getNextId() = {
UUID.randomUUID + ""
}
def act = {
react {
case msg@AMQPMessage( contents : String ) => {
if ( logging ) {
println("received: " + msg)
};
handle( "stockholm" )( contents );
act
}
}
}
}
val jsonListener =
new JSONListener( true, true, false )
jsonListener.start
amqp ! AMQPAddListener( jsonListener )
}
|
leithaus/strategies
|
src/main/scala/net/liftweb/amqp/JSONAMQPDispatcher.scala
|
Scala
|
cc0-1.0
| 6,713
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.dataSet
import org.apache.calcite.plan.{RelOptRule, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.dataset.DataSetValues
import org.apache.flink.table.plan.nodes.logical.FlinkLogicalValues
class DataSetValuesRule
extends ConverterRule(
classOf[FlinkLogicalValues],
FlinkConventions.LOGICAL,
FlinkConventions.DATASET,
"DataSetValuesRule")
{
def convert(rel: RelNode): RelNode = {
val values: FlinkLogicalValues = rel.asInstanceOf[FlinkLogicalValues]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.DATASET)
new DataSetValues(
rel.getCluster,
traitSet,
rel.getRowType,
values.getTuples,
description)
}
}
object DataSetValuesRule {
val INSTANCE: RelOptRule = new DataSetValuesRule
}
|
ueshin/apache-flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/rules/dataSet/DataSetValuesRule.scala
|
Scala
|
apache-2.0
| 1,776
|
/*
* Backup.scala
* (Backup)
*
* Copyright (c) 2014-2018 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.backup
import java.awt.Color
import java.awt.datatransfer.DataFlavor
import java.io.{ByteArrayOutputStream, FileOutputStream}
import javax.swing.TransferHandler
import javax.swing.TransferHandler.TransferSupport
import de.sciss.desktop.impl.{SwingApplicationImpl, WindowHandlerImpl, WindowImpl}
import de.sciss.desktop.{Desktop, FileDialog, LogPane, Menu, OptionPane, Window, WindowHandler}
import de.sciss.file._
import scala.concurrent.{ExecutionContext, Future, blocking}
import scala.swing.Swing._
import scala.swing.{BorderPanel, Button, FlowPanel, Label}
import scala.util.matching.Regex
import scala.util.{Failure, Success}
object Backup extends SwingApplicationImpl[Unit]("Backup") {
val initVolume : String = "Mnemo1" // Mnemo1 or Mnemo2
val volumes : File = if (Desktop.isMac) file("/Volumes") else file("/media") / sys.props("user.name")
val initTargetDir: File = volumes / initVolume / "CDs"
val ejectDVD : Boolean = true
val dvdDrive : String = "cdrom"
val dvdDir : File = file("/media") / dvdDrive // / "*"
val shell : String = "/bin/bash"
val askPass : String = "/usr/bin/ssh-askpass"
val sudo : String = "sudo"
val cp : String = "cp"
val touch : String = "touch"
val chmod : String = "chmod"
val eject : String = "eject"
val volName : String = "volname"
val VolNameExp : Regex = "(\\\\S+)\\\\s*\\\\n*".r // somehow 'volname' has trailing spaces and a line feed
lazy val menuFactory = Menu.Root()
private lazy val wh = new WindowHandlerImpl(this, menuFactory)
private var ongoing: Future[Int] = Future.successful(0)
private val colrFg = Color.white
private val colrBg = Color.darkGray
private var targetDir = initTargetDir
private var batch = List.empty[File]
override def init(): Unit = {
de.sciss.submin.Submin.install(true)
val initText = "Drop Volume to Backup"
lazy val ggSink = new Label(initText)
val log = LogPane(rows = 20, columns = 80)
def process(source: File, name: String, callEject: Boolean): Boolean = {
if (!ongoing.isCompleted) return false
val target = targetDir / name
val title = s"Archiving '$name'"
def bail(s: String): Unit = {
val opt = OptionPane.message(s, OptionPane.Message.Error)
opt.show(Some(fr), title)
}
if (!targetDir.isDirectory) {
bail(s"Backup directory '$targetDir' not found")
ggSink.text = initText
return false
}
if (target.exists()) {
bail(s"Target '$target' already exists")
ggSink.text = initText
return false
}
ggSink.text = s"Processing '$name'..."
val scr = s"""#!$shell
|$cp -Rpv \\"$source\\" \\"$target\\"
|$chmod -R u-w \\"$target\\"
|$touch -amr \\"$source\\" \\"$target\\"
|${if (callEject) s"$eject $dvdDrive" else ""}
|""".stripMargin
val scrF = File.createTemp(suffix = ".sh")
val scrS = new FileOutputStream(scrF)
scrS.write(scr.getBytes("utf8"))
scrS.close()
import ExecutionContext.Implicits.global
import sys.process._
val pb = Process(List(sudo, "-A", "sh", scrF.path), None, "SUDO_ASKPASS" -> askPass) #> log.outputStream
log.clear()
ggCD.enabled = false
ongoing = Future(blocking(pb.!))
val strFut = ongoing.map {
case 0 => s"Backup of '$name' succeeded."
case code => s"Backup of '$name' failed with code $code."
} recover {
case ex => s"${ex.getClass.getSimpleName} - ${ex.getMessage}"
}
strFut.foreach(s => onEDT {
ggSink.text = s
ggCD.enabled = true
})
ongoing.onComplete {
case Success(code) if code != 0 =>
onEDT {
bail(s"Copy process returned with code $code")
}
case Failure(ex) =>
onEDT {
wh.showDialog(Some(fr), ex -> title)
}
case _ =>
onEDT {
batch match {
case head :: tail =>
batch = tail
process(source = head, name = head.base, callEject = callEject)
case _ =>
}
}
}
true
}
lazy val ggCD: Button = Button("DVD-ROM") {
import sys.process._
val os = new ByteArrayOutputStream
val code = (volName #> os).!
os.close()
if (code == 0) {
val VolNameExp(name) = os.toString("utf8")
batch = Nil
process(dvdDir.getCanonicalFile, name, callEject = ejectDVD)
} else {
val opt = OptionPane.message("No DVD in drive", OptionPane.Message.Error)
opt.show(Some(fr))
}
}
// NOTE: on Linux must use JDK 7 to offer javaFileListFlavor!
lazy val th: TransferHandler = new TransferHandler {
override def canImport(support: TransferSupport): Boolean =
support.isDataFlavorSupported(DataFlavor.javaFileListFlavor)
override def importData(support: TransferSupport): Boolean =
canImport(support) && {
import scala.collection.JavaConverters._
val data: List[File] = support.getTransferable.getTransferData(DataFlavor.javaFileListFlavor)
.asInstanceOf[java.util.List[File]].asScala.toList.sortBy(_.lastModified())
data match {
case head :: tail =>
batch = tail
process(head, head.base, callEject = false)
case _ => false
}
}
}
lazy val dSink = ggSink.preferredSize
ggSink.border = CompoundBorder(CompoundBorder(EmptyBorder(8), BeveledBorder(Lowered)), EmptyBorder(8))
dSink.width += 36
dSink.height += 36
ggSink.preferredSize = dSink // bug in WebLaF
ggSink.peer.setTransferHandler(th)
ggSink.foreground = colrFg
ggSink.background = colrBg
ggCD.focusable = false
log.background = colrBg
log.foreground = colrFg
log.component.focusable = false
lazy val lbTarget = new Label(targetDir.path)
lazy val ggTarget = Button("Change...") {
FileDialog.folder(init = Some(targetDir)).show(Some(fr)).foreach { f =>
targetDir = f
lbTarget.text = f.path
}
}
lazy val pTarget = new FlowPanel(new Label("Target Directory:"), lbTarget, ggTarget)
lazy val pTop: BorderPanel = new BorderPanel {
add(pTarget, BorderPanel.Position.North )
add(ggSink , BorderPanel.Position.Center)
add(ggCD , BorderPanel.Position.East )
foreground = colrFg
background = colrBg
}
lazy val fr: Window = new WindowImpl {
def handler: WindowHandler = wh
title = "Backup | Archiving"
alwaysOnTop = true
contents = new BorderPanel {
background = colrBg
add(pTop , BorderPanel.Position.North )
add(log.component, BorderPanel.Position.Center)
}
pack()
closeOperation = Window.CloseIgnore
reactions += {
case Window.Closing(_) =>
val doQuit = ongoing.isCompleted || {
val opt = OptionPane.confirmation("Ongoing backup process. Really abort and quit?")
opt.show(Some(fr), "Quit") == OptionPane.Result.Yes
}
if (doQuit) sys.exit(0)
}
}
fr.front()
}
}
|
Sciss/Backup
|
src/main/scala/de/sciss/backup/Backup.scala
|
Scala
|
gpl-3.0
| 7,701
|
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell
import com.typesafe.scalalogging.LazyLogging
/**
* Created by gilad on 8/6/15.
*/
package object domain extends LazyLogging {
def addIndexTime(infoton: Infoton, indexTime: Option[Long], force: Boolean = false): Infoton = infoton match {
// format: off
case i:ObjectInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime))
case i:FileInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime))
case i:LinkInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime))
case i:DeletedInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime))
// format: on
case i if i.systemFields.indexTime.isDefined => {
logger.warn(
s"was asked to add indextime, but one is already supplied! uuid=${i.uuid}, path=${i.systemFields.path}, indexTime=${i.systemFields.indexTime.get}"
)
i
}
case _ => ???
}
def addIndexInfo(infoton: Infoton, indexTime: Option[Long], indexName: String, force: Boolean = false): Infoton =
infoton match {
// format: off
case i:ObjectInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime, indexName = indexName))
case i:FileInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime, indexName = indexName))
case i:LinkInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime, indexName = indexName))
case i:DeletedInfoton if force || i.systemFields.indexTime.isEmpty => i.copy(i.systemFields.copy(indexTime = indexTime, indexName = indexName))
// format: on
case i if i.systemFields.indexTime.isDefined => {
logger.warn(
s"was asked to add indextime, but one is already supplied! uuid=${i.uuid}, path=${i.systemFields.path}, indexTime=${i.systemFields.indexTime.get}"
)
i
}
case _ => ???
}
def addDc(infoton: Infoton, dc: String, force: Boolean = false): Infoton = infoton match {
// format: off
case i:ObjectInfoton if force || i.systemFields.dc=="na" => i.copy(i.systemFields.copy(dc = dc))
case i:FileInfoton if force || i.systemFields.dc=="na" => i.copy(i.systemFields.copy(dc = dc))
case i:LinkInfoton if force || i.systemFields.dc=="na" => i.copy(i.systemFields.copy(dc = dc))
case i:DeletedInfoton if force || i.systemFields.dc=="na" => i.copy(i.systemFields.copy(dc = dc))
// format: on
case i if i.systemFields.dc != "na" => {
logger.warn(s"was asked to add dc, but one is already supplied! uuid=${i.uuid}, path=${i.systemFields.path}, dc=${i.systemFields.dc}")
i
}
case _ => ???
}
def addDcAndIndexTimeForced(infoton: Infoton, dc: String, indexTime: Long): Infoton = infoton match {
// format: off
case i:ObjectInfoton => i.copy(i.systemFields.copy(dc = dc, indexTime = Some(indexTime)))
case i:FileInfoton => i.copy(i.systemFields.copy(dc = dc, indexTime = Some(indexTime)))
case i:LinkInfoton => i.copy(i.systemFields.copy(dc = dc, indexTime = Some(indexTime)))
case i:DeletedInfoton => i.copy(i.systemFields.copy(dc = dc, indexTime = Some(indexTime)))
// format: on
case _ => ???
}
def autoFixDcAndIndexTime(i: Infoton, dcIfNeeded: String): Option[Infoton] = {
if (i.systemFields.dc == "na" || i.systemFields.indexTime.isEmpty) {
val idxT = i.systemFields.indexTime.orElse(Some(i.systemFields.lastModified.getMillis))
val dc = if (i.systemFields.dc == "na") dcIfNeeded else i.systemFields.dc
i match {
// format: off
case i: ObjectInfoton => Some(i.copy(i.systemFields.copy(dc = dc, indexTime = idxT)))
case i: FileInfoton => Some(i.copy(i.systemFields.copy(dc = dc, indexTime = idxT)))
case i: LinkInfoton => Some(i.copy(i.systemFields.copy(dc = dc, indexTime = idxT)))
case i: DeletedInfoton => Some(i.copy(i.systemFields.copy(dc = dc, indexTime = idxT)))
// format: on
case _ => ???
}
} else None
}
}
|
e-orz/CM-Well
|
server/cmwell-domain/src/main/scala/cmwell/domain/package.scala
|
Scala
|
apache-2.0
| 4,881
|
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.serving.utils
import org.apache.logging.log4j.LogManager
trait Supportive {
def timing[T](name: String)(f: => T): T = {
val begin = System.nanoTime()
val result = f
val end = System.nanoTime()
val cost = (end - begin)
LogManager.getLogger(getClass).info(s"$name time elapsed [${cost / 1e6} ms].")
result
}
}
|
intel-analytics/analytics-zoo
|
zoo/src/main/scala/com/intel/analytics/zoo/serving/utils/Supportive.scala
|
Scala
|
apache-2.0
| 977
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.featurespec.composingbeforeandaftereach
import org.scalatest._
import collection.mutable.ListBuffer
trait Builder extends BeforeAndAfterEach { this: Suite =>
val builder = new StringBuilder
override def beforeEach() {
builder.append("ScalaTest is designed to ")
super.beforeEach() // To be stackable, must call super.beforeEach
}
override def afterEach() {
try super.afterEach() // To be stackable, must call super.afterEach
finally builder.clear()
}
}
trait Buffer extends BeforeAndAfterEach { this: Suite =>
val buffer = new ListBuffer[String]
override def afterEach() {
try super.afterEach() // To be stackable, must call super.afterEach
finally buffer.clear()
}
}
class ExampleSpec extends FeatureSpec with Builder with Buffer {
feature("Simplicity") {
scenario("User needs to read test code written by others") {
builder.append("encourage clear code!")
assert(builder.toString === "ScalaTest is designed to encourage clear code!")
assert(buffer.isEmpty)
buffer += "clear"
}
scenario("User needs to understand what the tests are doing") {
builder.append("be easy to reason about!")
assert(builder.toString === "ScalaTest is designed to be easy to reason about!")
assert(buffer.isEmpty)
buffer += "easy"
}
}
}
|
dotty-staging/scalatest
|
examples/src/test/scala/org/scalatest/examples/featurespec/composingbeforeandaftereach/ExampleSpec.scala
|
Scala
|
apache-2.0
| 1,964
|
package com.themillhousegroup.edn
import org.specs2.mutable.Specification
import com.themillhousegroup.edn.test.{ StreamChecking, EDNParsing }
import java.io.FileNotFoundException
class ParseableSourceSpec extends Specification with EDNParsing with StreamChecking {
"The ParseableSource object" should {
"Allow me to supply a scala.io.Source as the EDN source" in {
val p = EDNParser()
val src = scala.io.Source.fromURL(
getClass.getResource("/config.edn"))
import com.themillhousegroup.edn.ParseableSource._
val stream = p.asStream(src)
val s = stream.toSeq
s must haveSize(1)
val innerStream = s.head._2.asInstanceOf[Stream[(String, AnyRef)]]
keyStreamMustHave(
innerStream,
"http-server", "logging", "icarus", "environments", "private-file")
}
"Throw a FileNotFound if I ask for the impossible" in {
import com.themillhousegroup.edn.ParseableSource._
val p = EDNParser()
p.asStream("nonexistent.edn") must throwA[FileNotFoundException]
}
"Allow me to supply a simple filename as the EDN source" in {
import com.themillhousegroup.edn.ParseableSource._
val p = EDNParser()
val stream = p.asStream("/config.edn")
val s = stream.toSeq
s must haveSize(1)
val innerStream = s.head._2.asInstanceOf[Stream[(String, AnyRef)]]
keyStreamMustHave(
innerStream,
"http-server", "logging", "icarus", "environments", "private-file")
}
}
}
|
themillhousegroup/edn-scala
|
src/test/scala/com/themillhousegroup/edn/ParseableSourceSpec.scala
|
Scala
|
gpl-2.0
| 1,525
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel
package scala.dsl
import builder.{RouteBuilderSupport, RouteBuilder}
import org.apache.camel.processor.MulticastParallelTest
class ExplicitMulticastTest extends ScalaTestSupport {
def testExplicitMulticast = {
"mock:a" expect { _.count = 3 }
"mock:b" expect { _.count = 3 }
"mock:c" expect { _.count = 3 }
"direct:a" ! ("<hello/>", "<hallo/>", "<bonjour/>")
"mock:a" assert()
"mock:b" assert()
"mock:c" assert()
}
val builder = new RouteBuilder {
// START SNIPPET: multicast
"direct:a" ==> {
multicast {
to ("mock:a")
to ("mock:b")
to ("mock:c")
}
}
// END SNIPPET: multicast
}
}
/**
* Scala DSL equivalent for the org.apache.camel.processor.MulticastParallelTest
*/
class SMulticastParallelTest extends MulticastParallelTest with RouteBuilderSupport {
override def createRouteBuilder = new RouteBuilder {
val appendBodies = (oldExchange: Exchange, newExchange: Exchange) => {
if (oldExchange == null) {
newExchange
} else {
oldExchange.in= oldExchange.in[String] + newExchange.in[String]
oldExchange
}
}
"direct:start" ==> {
multicast.strategy(appendBodies).parallel {
to("direct:a")
to("direct:b")
}
to("mock:result")
}
"direct:a" delay(100 ms) setbody("A")
"direct:b" setbody("B")
}
}
|
kingargyle/turmeric-bot
|
components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/MulticastTest.scala
|
Scala
|
apache-2.0
| 2,221
|
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.util.stream
import akka.stream.{Attributes, FlowShape, Inlet, Outlet}
import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
// format: off
class StreamEventInspector[Elem](onUpstreamFinishInspection: () => Unit = () => {},
onUpstreamFailureInspection: Throwable => Unit = _ => {},
onDownstreamFinishInspection: () => Unit = () => {},
onPushInspection: Elem => Unit = (_: Elem) => {},
onPullInspection: () => Unit = () => {}) extends GraphStage[FlowShape[Elem, Elem]] {
// format: on
private val in = Inlet[Elem]("StreamEventInspector.in")
private val out = Outlet[Elem]("StreamEventInspector.out")
override val shape = FlowShape(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
setHandler(
in,
new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
onPushInspection(elem)
push(out, elem)
}
override def onUpstreamFailure(ex: Throwable): Unit = {
onUpstreamFailureInspection(ex)
super.onUpstreamFailure(ex)
}
override def onUpstreamFinish(): Unit = {
onUpstreamFinishInspection()
super.onUpstreamFinish()
}
}
)
setHandler(
out,
new OutHandler {
override def onPull(): Unit = {
onPullInspection()
pull(in)
}
override def onDownstreamFinish(): Unit = {
onDownstreamFinishInspection()
super.onDownstreamFinish()
}
}
)
}
}
|
hochgi/CM-Well
|
server/cmwell-util/src/main/scala/cmwell/util/stream/StreamEventInspector.scala
|
Scala
|
apache-2.0
| 2,401
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package transform
import scala.annotation.tailrec
import scala.reflect.internal.ClassfileConstants._
import scala.collection.{immutable, mutable}
import symtab._
import Flags._
import scala.reflect.internal.Mode._
abstract class Erasure extends InfoTransform
with scala.reflect.internal.transform.Erasure
with typechecker.Analyzer
with TypingTransformers
with ast.TreeDSL
with TypeAdaptingTransformer
{
import global._
import definitions._
import CODE._
val phaseName: String = "erasure"
val requiredDirectInterfaces = perRunCaches.newAnyRefMap[Symbol, mutable.Set[Symbol]]()
def newTransformer(unit: CompilationUnit): AstTransformer =
new ErasureTransformer(unit)
override def keepsTypeParams = false
// -------- erasure on types --------------------------------------------------------
// convert a numeric with a toXXX method
def numericConversion(tree: Tree, numericSym: Symbol): Tree = {
val mname = newTermName("to" + numericSym.name)
val conversion = tree.tpe member mname
assert(conversion != NoSymbol, s"$tree => $numericSym")
atPos(tree.pos)(Apply(Select(tree, conversion), Nil))
}
private object NeedsSigCollector {
private val NeedsSigCollector_true = new NeedsSigCollector(true)
private val NeedsSigCollector_false = new NeedsSigCollector(false)
def apply(isClassConstructor: Boolean) = if (isClassConstructor) NeedsSigCollector_true else NeedsSigCollector_false
}
private class NeedsSigCollector(isClassConstructor: Boolean) extends TypeCollector(false) {
def apply(tp: Type): Unit =
if (!result) {
tp match {
case st: SubType =>
apply(st.supertype)
case TypeRef(pre, sym, args) =>
if (sym == ArrayClass) untilApply(args)
else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
else if (sym.isClass) apply(rebindInnerClass(pre, sym)) // #2585
else if (!sym.isTopLevel) apply(pre)
case PolyType(_, _) | ExistentialType(_, _) => result = true
case RefinedType(parents, _) =>
untilApply(parents)
case ClassInfoType(parents, _, _) =>
untilApply(parents)
case AnnotatedType(_, atp) =>
apply(atp)
case MethodType(params, resultType) =>
if (isClassConstructor) {
val sigParams = params match {
case head :: tail if head.isOuterParam => tail
case _ => params
}
this.foldOver(sigParams)
// skip the result type, it is Void in the signature.
} else {
tp.foldOver(this)
}
case _ =>
tp.foldOver(this)
}
}
@tailrec
private[this] def untilApply(ts: List[Type]): Unit =
if (! ts.isEmpty && ! result) { apply(ts.head) ; untilApply(ts.tail) }
}
override protected def verifyJavaErasure = settings.Xverify || settings.isDebug
private def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && {
def needs(tp: Type) = NeedsSigCollector(sym.isClassConstructor).collect(tp)
needs(tp) || throwsArgs.exists(needs)
}
// only refer to type params that will actually make it into the sig, this excludes:
// * higher-order type parameters
// * type parameters appearing in method parameters
// * type members not visible in an enclosing template
private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol) = (
!sym.isHigherOrderTypeParameter &&
sym.isTypeParameterOrSkolem && (
(initialSymbol.isMethod && initialSymbol.typeParams.contains(sym)) ||
(initialSymbol.ownersIterator.exists(encl => encl.isClass && !encl.hasPackageFlag && sym.isNestedIn(encl)))
)
)
/** This object is only used for sanity testing when -check:genjvm is set.
* In that case we make sure that the erasure of the `normalized` type
* is the same as the erased type that's generated. Normalization means
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
val prepareSigMap = new TypeMap {
def squashBoxed(tp: Type): Type = tp.dealiasWiden match {
case t @ RefinedType(parents, decls) =>
val parents1 = parents mapConserve squashBoxed
if (parents1 eq parents) tp
else RefinedType(parents1, decls)
case t @ ExistentialType(tparams, tpe) =>
val tpe1 = squashBoxed(tpe)
if (tpe1 eq tpe) t
else ExistentialType(tparams, tpe1)
case t =>
if (boxedClass contains t.typeSymbol) ObjectTpe
else tp
}
def apply(tp: Type): Type = tp.dealiasWiden match {
case tp1 @ TypeBounds(lo, hi) =>
val lo1 = squashBoxed(apply(lo))
val hi1 = squashBoxed(apply(hi))
if ((lo1 eq lo) && (hi1 eq hi)) tp1
else TypeBounds(lo1, hi1)
case tp1 @ TypeRef(pre, sym, args) =>
def argApply(tp: Type) = {
val tp1 = apply(tp)
if (tp1.typeSymbol == UnitClass) ObjectTpe
else squashBoxed(tp1)
}
if (sym == ArrayClass && args.nonEmpty)
if (unboundedGenericArrayLevel(tp1) == 1) ObjectTpe
else mapOver(tp1)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
ObjectTpe
else if (sym == UnitClass)
BoxedUnitTpe
else if (sym == NothingClass)
RuntimeNothingClass.tpe
else if (sym == NullClass)
RuntimeNullClass.tpe
else {
val pre1 = apply(pre)
val args1 = args mapConserve argApply
if ((pre1 eq pre) && (args1 eq args)) tp1
else TypeRef(pre1, sym, args1)
}
case tp1 @ MethodType(params, restpe) =>
val params1 = mapOver(params)
val restpe1 = if (restpe.typeSymbol == UnitClass) UnitTpe else apply(restpe)
if ((params1 eq params) && (restpe1 eq restpe)) tp1
else MethodType(params1, restpe1)
case tp1 @ RefinedType(parents, decls) =>
val parents1 = parents mapConserve apply
if (parents1 eq parents) tp1
else RefinedType(parents1, decls)
case t @ ExistentialType(tparams, tpe) =>
val tpe1 = apply(tpe)
if (tpe1 eq tpe) t
else ExistentialType(tparams, tpe1)
case tp1: ClassInfoType =>
tp1
case tp1 =>
mapOver(tp1)
}
}
private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match {
case RefinedType(parents, _) => parents map (_.dealiasWiden)
case tp => tp :: Nil
}
private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
/* Drop redundant types (ones which are implemented by some other parent) from the immediate parents.
* This is important on Android because there is otherwise an interface explosion.
*/
def minimizeParents(cls: Symbol, parents: List[Type]): List[Type] = if (parents.isEmpty) parents else {
val requiredDirect: Symbol => Boolean = requiredDirectInterfaces.getOrElse(cls, Set.empty)
var rest = parents.tail
var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head
while (rest.nonEmpty) {
val candidate = rest.head
val candidateSym = candidate.typeSymbol
val required = requiredDirect(candidateSym) || !leaves.exists(t => t.typeSymbol isSubClass candidateSym)
if (required) {
leaves = leaves filter { t =>
val ts = t.typeSymbol
requiredDirect(ts) || !ts.isTraitOrInterface || !candidateSym.isSubClass(ts)
}
leaves += candidate
}
rest = rest.tail
}
leaves.toList
}
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
final def javaSig(sym0: Symbol, info: Type, markClassUsed: Symbol => Unit): Option[String] = enteringErasure { javaSig0(sym0, info, markClassUsed) }
@noinline
private final def javaSig0(sym0: Symbol, info: Type, markClassUsed: Symbol => Unit): Option[String] = {
val builder = new java.lang.StringBuilder(64)
val isTraitSignature = sym0.enclClass.isTrait
def superSig(cls: Symbol, parents: List[Type]): Unit = {
def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait
// a signature should always start with a class
def ensureClassAsFirstParent(tps: List[Type]) = tps match {
case Nil => ObjectTpe :: Nil
case head :: tail if isInterfaceOrTrait(head.typeSymbol) => ObjectTpe :: tps
case _ => tps
}
val minParents = minimizeParents(cls, parents)
val validParents =
if (isTraitSignature)
// java is unthrilled about seeing interfaces inherit from classes
minParents filter (p => isInterfaceOrTrait(p.typeSymbol))
else minParents
val ps = ensureClassAsFirstParent(validParents)
ps.foreach(boxedSig)
}
def boxedSig(tp: Type): Unit = jsig(tp, unboxedVCs = false)
def boundsSig(bounds: List[Type]): Unit = {
val (isTrait, isClass) = partitionConserve(bounds)(_.typeSymbol.isTrait)
isClass match {
case Nil => builder.append(':') // + boxedSig(ObjectTpe)
case x :: _ => builder.append(':'); boxedSig(x)
}
isTrait.foreach { tp =>
builder.append(':')
boxedSig(tp)
}
}
def paramSig(tsym: Symbol): Unit = {
builder.append(tsym.name)
boundsSig(hiBounds(tsym.info.bounds))
}
def polyParamSig(tparams: List[Symbol]): Unit = (
if (!tparams.isEmpty) {
builder.append('<')
tparams foreach paramSig
builder.append('>')
}
)
// Anything which could conceivably be a module (i.e. isn't known to be
// a type parameter or similar) must go through here or the signature is
// likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
def fullNameInSig(sym: Symbol): Unit = builder.append('L').append(enteringJVM(sym.javaBinaryNameString))
@noinline
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, unboxedVCs: Boolean = true): Unit = {
@inline def jsig1(tp0: Type) = jsig(tp0, existentiallyBound = Nil, toplevel = false, unboxedVCs = true)
val tp = tp0.dealias
tp match {
case st: SubType =>
jsig(st.supertype, existentiallyBound, toplevel, unboxedVCs)
case ExistentialType(tparams, tpe) =>
jsig(tpe, tparams, toplevel, unboxedVCs)
case TypeRef(pre, sym, args) =>
def argSig(tp: Type): Unit =
if (existentiallyBound contains tp.typeSymbol) {
val bounds = tp.typeSymbol.info.bounds
if (!(AnyRefTpe <:< bounds.hi)) {
builder.append('+')
boxedSig(bounds.hi)
}
else if (!(bounds.lo <:< NullTpe)) {
builder.append('-')
boxedSig(bounds.lo)
}
else builder.append('*')
} else tp match {
case PolyType(_, res) =>
builder.append('*') // scala/bug#7932
case _ =>
boxedSig(tp)
}
def classSig(): Unit = {
markClassUsed(sym)
val preRebound = pre.baseType(sym.owner) // #2585
if (needsJavaSig(sym, preRebound, Nil)) {
val i = builder.length()
jsig(preRebound, existentiallyBound, toplevel = false, unboxedVCs = true)
if (builder.charAt(i) == 'L') {
builder.delete(builder.length() - 1, builder.length())// delete ';'
// If the prefix is a module, drop the '$'. Classes (or modules) nested in modules
// are separated by a single '$' in the filename: `object o { object i }` is o$i$.
if (preRebound.typeSymbol.isModuleClass)
builder.delete(builder.length() - 1, builder.length())
// Ensure every '.' in the generated signature immediately follows
// a close angle bracket '>'. Any which do not are replaced with '$'.
// This arises due to multiply nested classes in the face of the
// rewriting explained at rebindInnerClass.
// TODO revisit this. Does it align with javac for code that can be expressed in both languages?
val delimiter = if (builder.charAt(builder.length() - 1) == '>') '.' else '$'
builder.append(delimiter).append(sym.javaSimpleName)
} else fullNameInSig(sym)
} else fullNameInSig(sym)
if (!args.isEmpty) {
builder.append('<')
args foreach argSig
builder.append('>')
}
builder.append(';')
}
// If args isEmpty, Array is being used as a type constructor
if (sym == ArrayClass && args.nonEmpty) {
if (unboundedGenericArrayLevel(tp) == 1) jsig1(ObjectTpe)
else {
builder.append(ARRAY_TAG)
args.foreach(jsig1(_))
}
}
else if (isTypeParameterInSig(sym, sym0)) {
assert(!sym.isAliasType, "Unexpected alias type: " + sym)
builder.append(TVAR_TAG).append(sym.name).append(';')
}
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
jsig1(ObjectTpe)
else if (sym == UnitClass)
jsig1(BoxedUnitTpe)
else if (sym == NothingClass)
jsig1(RuntimeNothingClass.tpe)
else if (sym == NullClass)
jsig1(RuntimeNullClass.tpe)
else if (isPrimitiveValueClass(sym)) {
if (!unboxedVCs) jsig1(ObjectTpe)
else if (sym == UnitClass) jsig1(BoxedUnitTpe)
else builder.append(abbrvTag(sym))
}
else if (sym.isDerivedValueClass) {
if (unboxedVCs) {
val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType
jsig(unboxedSeen, existentiallyBound, toplevel, unboxedVCs = true)
} else classSig()
}
else if (sym.isClass)
classSig()
else
jsig(erasure(sym0)(tp), existentiallyBound, toplevel, unboxedVCs)
case PolyType(tparams, restpe) =>
assert(tparams.nonEmpty, tparams)
if (toplevel) polyParamSig(tparams)
jsig1(restpe)
case MethodType(params, restpe) =>
builder.append('(')
params foreach (p => {
val isClassOuterParam = sym0.isClassConstructor && p.isOuterParam
if (!isClassOuterParam) {
val tp = p.attachments.get[TypeParamVarargsAttachment] match {
case Some(att) =>
// For @varargs forwarders, a T* parameter has type Array[Object] in the forwarder
// instead of Array[T], as the latter would erase to Object (instead of Array[Object]).
// To make the generic signature correct ("[T", not "[Object"), an attachment on the
// parameter symbol stores the type T that was replaced by Object.
builder.append('['); att.typeParamRef
case _ => p.tpe
}
jsig1(tp)
}
})
builder.append(')')
if (restpe.typeSymbol == UnitClass || sym0.isConstructor) builder.append(VOID_TAG) else jsig1(restpe)
case RefinedType(parents, decls) =>
jsig(intersectionDominator(parents), existentiallyBound = Nil, toplevel = false, unboxedVCs = unboxedVCs)
case ClassInfoType(parents, _, _) =>
superSig(tp.typeSymbol, parents)
case AnnotatedType(_, atp) =>
jsig(atp, existentiallyBound, toplevel, unboxedVCs)
case BoundedWildcardType(bounds) =>
println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type")
jsig(bounds.hi, existentiallyBound, toplevel, unboxedVCs)
case _ =>
val etp = erasure(sym0)(tp)
if (etp eq tp) throw new UnknownSig
else jsig1(etp)
}
}
val throwsArgs = sym0.annotations flatMap ThrownException.unapply
if (needsJavaSig(sym0, info, throwsArgs)) {
try {
jsig(info, toplevel = true)
throwsArgs.foreach { t =>
builder.append('^')
jsig(t, toplevel = true)
}
Some(builder.toString)
}
catch { case ex: UnknownSig => None }
}
else None
}
class UnknownSig extends Exception
/** Add calls to supermixin constructors
* `super[mix].$init$()`
* to tree, which is assumed to be the body of a constructor of class clazz.
*/
private def addMixinConstructorCalls(tree: Tree, clazz: Symbol): Tree = {
// TODO: move to constructors?
def mixinConstructorCalls: List[Tree] = {
for (mc <- clazz.mixinClasses.reverse if mc.isTrait && mc.primaryConstructor != NoSymbol)
yield atPos(tree.pos) {
Apply(SuperSelect(clazz, mc.primaryConstructor), Nil)
}
}
tree match {
case Block(Nil, expr) =>
// AnyVal constructor - have to provide a real body so the
// jvm doesn't throw a VerifyError. But we can't add the
// body until now, because the typer knows that Any has no
// constructor and won't accept a call to super.init.
assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
case Block(stats, expr) =>
// needs `hasSymbolField` check because `supercall` could be a block (named / default args)
val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER)): @unchecked
treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
case x => throw new MatchError(x)
}
}
val deconstMap = new TypeMap {
// For some reason classOf[Foo] creates ConstantType(Constant(tpe)) with an actual Type for tpe,
// which is later translated to a Class. Unfortunately that means we have bugs like the erasure
// of Class[Foo] and classOf[Bar] not being seen as equivalent, leading to duplicate method
// generation and failing bytecode. See ticket #4753.
def apply(tp: Type): Type = tp match {
case PolyType(_, _) => mapOver(tp)
case MethodType(_, _) => mapOver(tp) // nullarymethod was eliminated during uncurry
case ConstantType(Constant(_: Type)) => ClassClass.tpe // all classOfs erase to Class
case ConstantType(value) => value.tpe.deconst
case _ => tp.deconst
}
}
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
private lazy val interceptedMethods = poundPoundMethods ++ primitiveGetClassMethods
// -------- erasure on trees ------------------------------------------
override def newTyper(context: Context) = new Eraser(context)
class EnterBridges(unit: CompilationUnit, root: Symbol) {
val site = root.thisType
val bridgesScope = newScope
val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
val opc = enteringExplicitOuter { new overridingPairs.BridgesCursor(root) }
def computeAndEnter(): Unit = {
while (opc.hasNext) {
if (enteringExplicitOuter(!opc.low.isDeferred))
checkPair(opc.currentPair)
opc.next()
}
}
/** Check that a bridge only overrides members that are also overridden by the original member.
* This test is necessary only for members that have a value class in their type.
* Such members are special because their types after erasure and after post-erasure differ/.
* This means we generate them after erasure, but the post-erasure transform might introduce
* a name clash. The present method guards against these name clashes.
*
* @param member The original member
* @param other The overridden symbol for which the bridge was generated
* @param bridge The bridge
*/
def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): scala.collection.Seq[(Position, String)] = {
def fulldef(sym: Symbol) =
if (sym == NoSymbol) sym.toString
else s"$sym: ${sym.tpe} in ${sym.owner}"
val clashErrors = mutable.Buffer[(Position, String)]()
def clashError(what: String) = {
val pos = if (member.owner == root) member.pos else root.pos
val msg = sm"""bridge generated for member ${fulldef(member)}
|which overrides ${fulldef(other)}
|clashes with definition of $what;
|both have erased type ${exitingPostErasure(bridge.tpe)}"""
clashErrors += Tuple2(pos, msg)
}
for (bc <- root.baseClasses) {
if (settings.isDebug)
exitingPostErasure(println(
sm"""check bridge overrides in $bc
|${bc.info.nonPrivateDecl(bridge.name)}
|${site.memberType(bridge)}
|${site.memberType(bc.info.nonPrivateDecl(bridge.name) orElse IntClass)}
|${(bridge.matchingSymbol(bc, site))}"""))
def overriddenBy(sym: Symbol) =
sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
for (overBridge <- exitingPostErasure(overriddenBy(bridge))) {
if (overBridge == member) {
clashError("the member itself")
} else {
val overMembers = overriddenBy(member)
if (!overMembers.exists(overMember =>
exitingPostErasure(overMember.tpe =:= overBridge.tpe))) {
clashError(fulldef(overBridge))
}
}
}
}
clashErrors
}
/** TODO - work through this logic with a fine-toothed comb, incorporating
* into SymbolPairs where appropriate.
*/
def checkPair(pair: SymbolPair): Unit = {
import pair._
val member = low
val other = high
val otpe = highErased
val bridgeNeeded = exitingErasure (
!member.isMacro &&
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
{ var e = bridgesScope.lookupEntry(member.name)
while ((e ne null) && !((e.sym.tpe =:= otpe) && (bridgeTarget(e.sym) == member)))
e = bridgesScope.lookupNextEntry(e)
(e eq null)
}
)
if (!bridgeNeeded)
return
var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY)
// If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we
// end up with two module symbols with the same name in the same scope, which is surprising
// when implementing later phases.
if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | STABLE)
val bridge = other.cloneSymbolImpl(root, newFlags).setPos(root.pos).setAnnotations(member.annotations)
debuglog("generating bridge from %s (%s): %s%s to %s: %s%s".format(
other, flagsToString(newFlags),
otpe, other.locationString, member,
specialErasure(root)(member.tpe), member.locationString)
)
// the parameter symbols need to have the new owner
bridge setInfo (otpe cloneInfo bridge)
bridgeTarget(bridge) = member
def sigContainsValueClass = (member.tpe exists (_.typeSymbol.isDerivedValueClass))
val shouldAdd = (
!sigContainsValueClass
|| (checkBridgeOverrides(member, other, bridge) match {
case Nil => true
case es if member.owner.isAnonymousClass => resolveAnonymousBridgeClash(member, bridge); true
case es => for ((pos, msg) <- es) reporter.error(pos, msg); false
})
)
if (shouldAdd) {
exitingErasure(root.info.decls enter bridge)
bridgesScope enter bridge
addBridge(bridge, member, other)
//bridges ::= makeBridgeDefDef(bridge, member, other)
}
}
protected def addBridge(bridge: Symbol, member: Symbol, other: Symbol): Unit = {} // hook for GenerateBridges
}
class GenerateBridges(unit: CompilationUnit, root: Symbol) extends EnterBridges(unit, root) {
var bridges = List.empty[Tree]
var toBeRemoved = immutable.Set.empty[Symbol]
def generate(): (List[Tree], immutable.Set[Symbol]) = {
super.computeAndEnter()
(bridges, toBeRemoved)
}
override def addBridge(bridge: Symbol, member: Symbol, other: Symbol): Unit = {
if (other.owner == root) {
exitingErasure(root.info.decls.unlink(other))
toBeRemoved += other
}
bridges ::= makeBridgeDefDef(bridge, member, other)
}
final def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = exitingErasure {
// type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
// calling `member` is not guaranteed to succeed in general, there's
// nothing we can do about this, except for an unapply: when this subtype test fails,
// return None without calling `member`
//
// TODO: should we do this for user-defined unapplies as well?
// does the first argument list have exactly one argument -- for user-defined unapplies we can't be sure
def maybeWrap(bridgingCall: Tree): Tree = {
val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
(member.name == nme.unapply || member.name == nme.unapplySeq)
&& !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
import CODE._
val _false = FALSE
val pt = member.tpe.resultType
lazy val zero =
if (_false.tpe <:< pt) _false
else if (NoneModule.tpe <:< pt) REF(NoneModule)
else EmptyTree
if (guardExtractor && (zero ne EmptyTree)) {
val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe)
IF (typeTest) THEN bridgingCall ELSE zero
} else bridgingCall
}
val rhs = member.tpe match {
case MethodType(Nil, FoldableConstantType(c)) => Literal(c)
case _ =>
val sel: Tree = gen.mkAttributedSelect(gen.mkAttributedThis(root), member)
val bridgingCall = bridge.paramss.foldLeft(sel)((fun, vparams) => Apply(fun, vparams map Ident))
maybeWrap(bridgingCall)
}
DefDef(bridge, rhs)
}
}
/** The modifier typer which retypes with erased types. */
class Eraser(_context: Context) extends Typer(_context) {
val typeAdapter = new TypeAdapter { def typedPos(pos: Position)(tree: Tree): Tree = Eraser.this.typedPos(pos)(tree) }
import typeAdapter._
override protected def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = tree
/** Replace member references as follows:
*
* - `x == y` for == in class Any becomes `x equals y` with equals in class Object.
* - `x != y` for != in class Any becomes `!(x equals y)` with equals in class Object.
* - x.asInstanceOf[T] becomes x.$asInstanceOf[T]
* - x.isInstanceOf[T] becomes x.$isInstanceOf[T]
* - x.isInstanceOf[ErasedValueType(tref)] becomes x.isInstanceOf[tref.sym.tpe]
* - x.m where m is some other member of Any becomes x.m where m is a member of class Object.
* - x.m where x has unboxed value type T and m is not a directly translated member of T becomes T.box(x).m
* - x.m where x is a reference type and m is a directly translated member of value type T becomes x.TValue().m
* - All forms of x.m where x is a boxed type and m is a member of an unboxed class become
* x.m where m is the corresponding member of the boxed class.
*/
private def adaptMember(tree: Tree): Tree = {
//Console.println("adaptMember: " + tree);
tree match {
case Apply(ta @ TypeApply(sel @ Select(qual, name), targ :: Nil), List())
if tree.symbol == Any_asInstanceOf =>
val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
// !!! Make pending/run/t5866b.scala work. The fix might be here and/or in unbox1.
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
case ErasedValueType(_, underlying) => isPrimitiveValueType(underlying)
case _ => true
}
if (noNullCheckNeeded) unbox(qual1, targ.tpe)
else {
val untyped =
// util.trace("new asinstanceof test") {
gen.evalOnce(qual1, context.owner, fresh) { qual =>
If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullTpe)),
Literal(Constant(null)) setType targ.tpe,
unbox(qual(), targ.tpe))
}
// }
typed(untyped)
}
} else treeCopy.Apply(tree, treeCopy.TypeApply(ta, treeCopy.Select(sel, qual1, name), List(targ)), List())
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_isInstanceOf =>
targ.tpe match {
case ErasedValueType(clazz, _) => targ.setType(clazz.tpe)
case _ =>
}
tree
case Select(qual, name) =>
if (tree.symbol == NoSymbol) {
tree
} else if (name == nme.CONSTRUCTOR) {
if (tree.symbol.owner == AnyValClass) tree.symbol = ObjectClass.primaryConstructor
tree
} else if (tree.symbol == Any_asInstanceOf)
adaptMember(atPos(tree.pos)(Select(qual, Object_asInstanceOf)))
else if (tree.symbol == Any_isInstanceOf)
adaptMember(atPos(tree.pos)(Select(qual, Object_isInstanceOf)))
else if (tree.symbol.owner == AnyClass)
adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, tree.symbol.name))))
else {
var qual1 = typedQualifier(qual)
if ((isPrimitiveValueType(qual1.tpe) && !isPrimitiveValueMember(tree.symbol)) ||
isErasedValueType(qual1.tpe))
qual1 = box(qual1)
else if (!isPrimitiveValueType(qual1.tpe) && isPrimitiveValueMember(tree.symbol))
qual1 = unbox(qual1, tree.symbol.owner.tpe)
def selectFrom(qual: Tree) = treeCopy.Select(tree, qual, name)
if (isPrimitiveValueMember(tree.symbol) && !isPrimitiveValueType(qual1.tpe)) {
tree.symbol = NoSymbol
selectFrom(qual1)
} else if (isMethodTypeWithEmptyParams(qual1.tpe)) { // see also adaptToType in TypeAdapter
assert(qual1.symbol.isStable, qual1.symbol)
adaptMember(selectFrom(applyMethodWithEmptyParams(qual1)))
} else if (!qual1.isInstanceOf[Super] && (!isJvmAccessible(qual1.tpe.typeSymbol, context) || !qual1.tpe.typeSymbol.isSubClass(tree.symbol.owner))) {
// A selection requires a cast:
// - In `(foo: Option[String]).get.trim`, the qualifier has type `Object`. We cast
// to the owner of `trim` (`String`), unless the owner is a non-accessible Java
// class, in which case a `QualTypeSymAttachment` is present (see below).
// - In `a.b().c()`, the qualifier `a.b()` may have an accessible type `X` before
// erasure, but a non-accessible type `Y` after erasure (scala/bug#10450). Again
// we cast to the owner of `c`, or, if that is not accessible either, to the
// class stored in the `QualTypeSymAttachment`.
//
// A `QualTypeSymAttachment` is present if the selected member's owner is not an
// accessible (java-defined) class, see `preErase`.
//
// Selections from `super` are not handled here because inserting a cast would not be
// legal code. Instead there's a special case in `typedSelectInternal`.
val qualTpe = tree.getAndRemoveAttachment[QualTypeSymAttachment] match {
case Some(a) => a.sym.tpe
case None => tree.symbol.owner.tpe
}
selectFrom(cast(qual1, qualTpe))
} else {
selectFrom(qual1)
}
}
case SelectFromArray(qual, name, erasure) =>
var qual1 = typedQualifier(qual)
if (!(qual1.tpe <:< erasure)) qual1 = cast(qual1, erasure)
Select(qual1, name) copyAttrs tree
case _ =>
tree
}
}
/** A replacement for the standard typer's adapt method.
*/
override protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree =
adaptToType(tree, pt)
/** A replacement for the standard typer's `typed1` method.
*/
override def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
val tree1 = try {
tree match {
case DefDef(_,_,_,_,_,_) if tree.symbol.isClassConstructor && tree.symbol.isPrimaryConstructor && tree.symbol.owner != ArrayClass =>
super.typed1(deriveDefDef(tree)(addMixinConstructorCalls(_, tree.symbol.owner)), mode, pt) // (3)
case Template(parents, self, body) =>
val parents1 = tree.symbol.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
super.typed1(treeCopy.Template(tree, parents1, noSelfType, body), mode, pt)
case InjectDerivedValue(arg) =>
(tree.attachments.get[TypeRefAttachment]: @unchecked) match {
case Some(itype) =>
val tref = itype.tpe
val argPt = enteringErasure(erasedValueClassArg(tref))
log(s"transforming inject $arg -> $tref/$argPt")
val result = typed(arg, mode, argPt)
log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
return result setType ErasedValueType(tref.sym, result.tpe)
}
case _ =>
super.typed1(adaptMember(tree), mode, pt)
}
} catch {
case er: TypeError =>
Console.println("exception when typing " + tree+"/"+tree.getClass)
Console.println(er.msg + " in file " + context.owner.sourceFile)
er.printStackTrace
abort("unrecoverable error")
case ex: Exception =>
//if (settings.debug.value)
try Console.println("exception when typing " + tree)
finally throw ex
throw ex
}
def adaptCase(cdef: CaseDef): CaseDef = {
val newCdef = deriveCaseDef(cdef)(adaptToType(_, tree1.tpe))
newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
tree1 match {
case fun: Function =>
fun.attachments.get[SAMFunction] match {
case Some(SAMFunction(samTp, _, _)) => fun setType specialScalaErasure(samTp)
case _ => fun
}
case If(cond, thenp, elsep) =>
treeCopy.If(tree1, cond, adaptBranch(thenp), adaptBranch(elsep))
case Match(selector, cases) =>
treeCopy.Match(tree1, selector, cases map adaptCase)
case Try(block, catches, finalizer) =>
treeCopy.Try(tree1, adaptBranch(block), catches map adaptCase, finalizer)
case Ident(_) | Select(_, _) =>
if (tree1.symbol.isOverloaded) {
val first = tree1.symbol.alternatives.head
val firstTpe = first.tpe
val sym1 = tree1.symbol.filter {
alt => alt == first || !(firstTpe looselyMatches alt.tpe)
}
if (tree.symbol ne sym1) {
tree1 setSymbol sym1 setType sym1.tpe
}
}
tree1
case _ =>
tree1
}
}
}
/** The erasure transformer */
class ErasureTransformer(unit: CompilationUnit) extends AstTransformer {
import overridingPairs.PairsCursor
private def doubleDefError(pair: SymbolPair): Unit = {
import pair._
if (!pair.isErroneous) {
val what = (
if (low.owner == high.owner) "double definition"
else if (low.owner == base) "name clash between defined and inherited member"
else "name clash between inherited members"
)
val when = if (exitingRefchecks(lowType matches highType)) "" else " after erasure: " + exitingPostErasure(highType)
reporter.error(pos,
s"""|$what:
|${exitingRefchecks(highString)} and
|${exitingRefchecks(lowString)}
|have same type$when""".trim.stripMargin
)
}
low setInfo ErrorType
}
private def sameTypeAfterErasure(sym1: Symbol, sym2: Symbol) =
exitingPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro
/** TODO - adapt SymbolPairs so it can be used here. */
private def checkNoDeclaredDoubleDefs(base: Symbol): Unit = {
val decls = base.info.decls
// scala/bug#8010 force infos, otherwise makeNotPrivate in ExplicitOuter info transformer can trigger
// a scope rehash while were iterating and we can see the same entry twice!
// Inspection of SymbolPairs (the basis of OverridingPairs), suggests that it is immune
// from this sort of bug as it copies the symbols into a temporary scope *before* any calls to `.info`,
// ie, no variant of it calls `info` or `tpe` in `SymbolPair#exclude`.
//
// Why not just create a temporary scope here? We need to force the name changes in any case before
// we do these checks, so that we're comparing same-named methods based on the expanded names that actually
// end up in the bytecode.
exitingPostErasure(decls.foreach(_.info))
var e = decls.elems
while (e ne null) {
if (e.sym.isTerm) {
var e1 = decls lookupNextEntry e
while (e1 ne null) {
assert(e.sym ne e1.sym, s"Internal error: encountered ${e.sym.debugLocationString} twice during scope traversal. This might be related to scala/bug#8010.")
if (sameTypeAfterErasure(e.sym, e1.sym))
doubleDefError(new SymbolPair(base, e.sym, e1.sym))
e1 = decls lookupNextEntry e1
}
}
e = e.next
}
}
private class DoubleDefsCursor(root: Symbol) extends PairsCursor(root) {
// specialized members have no type history before 'specialize', causing double def errors for curried defs
override def exclude(sym: Symbol): Boolean = (
sym.isType
|| super.exclude(sym)
|| !sym.hasTypeAt(currentRun.refchecksPhase.id)
)
override def matches(high: Symbol) = !high.isPrivate
}
/** Emit an error if there is a double definition. This can happen if:
*
* - A template defines two members with the same name and erased type.
* - A template defines and inherits two members `m` with different types,
* but their erased types are the same.
* - A template inherits two members `m` with different types,
* but their erased types are the same.
*/
private def checkNoDoubleDefs(root: Symbol): Unit = {
checkNoDeclaredDoubleDefs(root)
def isErasureDoubleDef(pair: SymbolPair) = {
import pair._
log(s"Considering for erasure clash:\n$pair")
!exitingRefchecks(lowType matches highType) && sameTypeAfterErasure(low, high)
}
(new DoubleDefsCursor(root)).iterator filter isErasureDoubleDef foreach doubleDefError
}
/** Add bridge definitions to a template. This means:
*
* If there is a concrete member `m` which overrides a member in a base
* class of the template, and the erased types of the two members differ,
* and the two members are not inherited or defined by some parent class
* of the template, then a bridge from the overridden member `m1` to the
* member `m0` is added. The bridge has the erased type of `m1` and
* forwards to `m0`.
*
* No bridge is added if there is already a bridge to `m0` with the erased
* type of `m1` in the template.
*/
private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
assert(phase == currentRun.erasurePhase, phase)
new GenerateBridges(unit, owner).generate()
}
def addBridgesToTemplate(stats: List[Tree], base: Symbol): List[Tree] =
if (base.isTrait) stats
else {
val (bridges, toBeRemoved) = bridgeDefs(base)
if (bridges.isEmpty) stats
else (stats filterNot (stat => toBeRemoved contains stat.symbol)) ::: bridges
}
def addBridgesToLambda(lambdaClass: Symbol): Unit = {
assert(phase == currentRun.erasurePhase, phase)
assert(lambdaClass.isClass, lambdaClass)
new EnterBridges(unit, lambdaClass).computeAndEnter()
}
/** Transform tree at phase erasure before retyping it.
* This entails the following:
*
* - Remove all type parameters in class and method definitions.
* - Remove all abstract and alias type definitions.
* - Remove all type applications other than those involving a type test or cast.
* - Remove all empty trees in statements and definitions in a PackageDef.
* - Check that there are no double definitions in a template.
* - Add bridge definitions to a template.
* - Replace all types in type nodes and the EmptyTree object by their erasure.
* Type nodes of type Unit representing result types of methods are left alone.
* - Remove all instance creations new C(arg) where C is an inlined class.
* - Reset all other type attributes to null, thus enforcing a retyping.
*/
private val preTransformer = new TypingTransformer(unit) {
// Work around some incomplete path unification :( there are similar casts in SpecializeTypes
def context: Context = localTyper.context.asInstanceOf[Context]
// TODO: since the spec defines instanceOf checks in terms of pattern matching,
// this extractor should share code with TypeTestTreeMaker. The corresponding
// code is somewhat buried in and entangled with the pattern matching mechanics
// which makes this fiddly to do now.
object SingletonInstanceCheck {
def unapply(pt: Type): Option[(TermSymbol, Tree)] = {
def containsSingleton(tp: Type): Boolean =
tp.dealias match {
case SingleType(_, _) | ConstantType(_) | ThisType(_) | SuperType(_, _) => true
case RefinedType(parents, _) => parents.exists(containsSingleton)
case _ => false
}
if(containsSingleton(pt)) {
val cmpOp = if (pt.typeSymbol.isSubClass(AnyValClass)) Any_equals else Object_eq
val cmpArg = gen.mkAttributedQualifier(pt)
Some((cmpOp, cmpArg))
} else None
}
}
private def preEraseNormalApply(tree: Apply) = {
val fn = tree.fun
val args = tree.args
def qualifier = fn match {
case Select(qual, _) => qual
case TypeApply(Select(qual, _), _) => qual
case x => throw new MatchError(x)
}
// TODO: this should share logic with TypeTestTreeMaker in the pattern matcher,
// since `x.isInstanceOf[T]` is specified as the pattern match. The corresponding
// code is somewhat buried in and entangled with the pattern matching mechanics
// which makes this fiddly to do now.
def preEraseAsInstanceOf = {
(fn: @unchecked) match {
case TypeApply(Select(qual, _), List(targ)) =>
targ.tpe match {
case argTp if qual.tpe <:< argTp =>
atPos(tree.pos) { Typed(qual, TypeTree(argTp)) }
case argTp if isNumericValueClass(qual.tpe.typeSymbol) && isNumericValueClass(argTp.typeSymbol) =>
atPos(tree.pos)(numericConversion(qual, argTp.typeSymbol))
case _ =>
tree
}
}
// todo: also handle the case where the singleton type is buried in a compound
}
// TODO: this should share logic with TypeTestTreeMaker in the pattern matcher,
// since `x.isInstanceOf[T]` is specified as the pattern match. The corresponding
// code is somewhat buried in and entangled with the pattern matching mechanics
// which makes this fiddly to do now.
// `x match { case _: T => true case _ => false }` (modulo numeric conversion)
def preEraseIsInstanceOf = {
fn match {
case TypeApply(sel @ Select(qual, name), List(targ)) =>
if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe)
reporter.error(sel.pos, "isInstanceOf cannot test if value types are references.")
def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
Apply(
TypeApply(
Select(q(), Object_isInstanceOf) setPos sel.pos,
List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
List()) setPos tree.pos
targ.tpe match {
case SingletonInstanceCheck(cmpOp, cmpArg) =>
atPos(tree.pos) { Apply(Select(cmpArg, cmpOp), List(qual)) }
case RefinedType(parents, decls) if (parents.lengthIs >= 2) =>
gen.evalOnce(qual, currentOwner, localTyper.fresh) { q =>
// Optimization: don't generate isInstanceOf tests if the static type
// conforms, because it always succeeds. (Or at least it had better.)
// At this writing the pattern matcher generates some instance tests
// involving intersections where at least one parent is statically known true.
// That needs fixing, but filtering the parents here adds an additional
// level of robustness (in addition to the short term fix.)
val parentTests = parents filterNot (qual.tpe <:< _)
if (parentTests.isEmpty) Literal(Constant(true))
else atPos(tree.pos) {
parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
}
case TypeRef(_, SingletonClass, _) =>
atPos(tree.pos) {
if(qual.tpe <:< AnyRefTpe)
Apply(Select(qual, Object_ne), List(Literal(Constant(null)) setType NullTpe))
else
Literal(Constant(true))
}
case _ => tree
}
case _ => tree
}
}
if (fn.symbol == Any_asInstanceOf) {
preEraseAsInstanceOf
} else if (fn.symbol == Any_isInstanceOf) {
preEraseIsInstanceOf
} else if (fn.symbol.isOnlyRefinementMember) {
// !!! Another spot where we produce overloaded types (see test pos/t6301)
log(s"${fn.symbol.fullLocationString} originates in refinement class - call will be implemented via reflection.")
ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
} else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
} else {
tree
}
}
private def preEraseApply(tree: Apply) = {
tree.fun match {
case TypeApply(fun @ Select(qual, name), args @ List(arg))
if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
unboundedGenericArrayLevel(arg.tpe) > 0) => // !!! todo: simplify by having GenericArray also extract trees
val level = unboundedGenericArrayLevel(arg.tpe)
def isArrayTest(arg: Tree) =
gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level))))
global.typer.typedPos(tree.pos) {
if (level == 1) isArrayTest(qual)
else gen.evalOnce(qual, currentOwner, localTyper.fresh) { qual1 =>
gen.mkAnd(
gen.mkMethodCall(
qual1(),
fun.symbol,
List(specialErasure(fun.symbol)(arg.tpe)),
Nil
),
isArrayTest(qual1())
)
}
}
case fn @ Select(qual, name) =>
val args = tree.args
if (fn.symbol.owner == ArrayClass) {
// Have to also catch calls to abstract types which are bounded by Array.
if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) {
// convert calls to apply/update/length on generic arrays to
// calls of ScalaRunTime.array_xxx method calls
global.typer.typedPos(tree.pos) {
val arrayMethodName = name match {
case nme.apply => nme.array_apply
case nme.length => nme.array_length
case nme.update => nme.array_update
case nme.clone_ => nme.array_clone
case _ => reporter.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
}
gen.mkRuntimeCall(arrayMethodName, qual :: args)
}
} else {
// store exact array erasure in map to be retrieved later when we might
// need to do the cast in adaptMember
// Note: No specialErasure needed here because we simply cast, on
// elimination of SelectFromArray, no boxing or unboxing is done there.
treeCopy.Apply(
tree,
SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
args)
}
}
else if (args.isEmpty && interceptedMethods(fn.symbol)) {
if (poundPoundMethods.contains(fn.symbol)) {
// This is unattractive, but without it we crash here on ().## because after
// erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
// This must be because some earlier transformation is being skipped on ##, but so
// far I don't know what. For null we now define null.## == 0.
def staticsCall(methodName: TermName): Tree = {
val newTree = gen.mkMethodCall(RuntimeStaticsModule, methodName, qual :: Nil)
global.typer.typed(newTree)
}
qual.tpe.typeSymbol match {
case UnitClass | NullClass => LIT(0)
case IntClass | ShortClass | ByteClass | CharClass => qual
case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
case LongClass => staticsCall(nme.longHash)
case FloatClass => staticsCall(nme.floatHash)
case DoubleClass => staticsCall(nme.doubleHash)
case _ => staticsCall(nme.anyHash)
}
} else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
} else if (primitiveGetClassMethods.contains(fn.symbol)) {
// if we got here then we're trying to send a primitive getClass method to either
// a) an Any, in which cage Object_getClass works because Any erases to object. Or
//
// b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
// of the refinement is a primitive and another is AnyRef. In that case
// we get a primitive form of _getClass trying to target a boxed value
// so we need replace that method name with Object_getClass to get correct behavior.
// See scala/bug#5568.
tree setSymbol Object_getClass
} else {
devWarning(s"The symbol '${fn.symbol}' was intercepted but didn't match any cases, that means the intercepted methods set doesn't match the code")
tree
}
} else qual match {
case New(tpt) if name == nme.CONSTRUCTOR && tpt.tpe.typeSymbol.isDerivedValueClass =>
// println("inject derived: "+arg+" "+tpt.tpe)
val List(arg) = args: @unchecked
val attachment = new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
InjectDerivedValue(arg) updateAttachment attachment
case _ =>
preEraseNormalApply(tree)
}
case _ =>
preEraseNormalApply(tree)
}
}
def preErase(tree: Tree): Tree = tree match {
case tree: Apply =>
preEraseApply(tree)
case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
fun.symbol != Object_asInstanceOf &&
fun.symbol != Object_isInstanceOf &&
fun.symbol != Object_synchronized) =>
// leave all other type tests/type casts, remove all other type applications
preErase(fun)
case Select(qual, name) =>
val sym = tree.symbol
val owner = sym.owner
if (owner.isRefinementClass) {
sym.allOverriddenSymbols filterNot (_.owner.isRefinementClass) match {
case overridden :: _ =>
log(s"${sym.fullLocationString} originates in refinement class - replacing with ${overridden.fullLocationString}.")
tree.symbol = overridden
case Nil =>
// Ideally this should not be reached or reachable; anything which would
// get here should have been caught in the surrounding Apply.
devWarning(s"Failed to rewrite reflective apply - now don't know what to do with " + tree)
return treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name)
}
}
// This code may add an QualTypeSymAttachment to the Select tree. The referenced class is
// then used in erasure type checking as the type of the Select's qualifier. This fixes
// two situations where erasure type checking cannot assign a precise enough type.
//
// - In a `super.m` selection, erasure typing assigns the type of the superclass to the
// Super tree. This is wrong if `m` is a member of a trait (not the superclass). A
// special-case in `typedSelectInternal` by default assigns m's owner in this case.
// - In a non-super selection, the qualifier may erase to a type that doesn't define the
// selected member, for example the qualifier of `(q: Option[String]).get.trim` erases
// to Object. Similarly, the qualifier may erase to a Java class that *does* define the
// selected member but is not accessible (scala/bug#10450).
// Erasure's `adaptMember` detects these cases and, by default, introduces a cast to
// the member's owner.
//
// In both cases, using the member's owner is not legal if the member is defined in
// Java and the owner class is not accessible (scala/bug#7936, scala/bug#4283). In this
// situation we store a valid class type of the qualifier in the attachment.
// - For `super.m`, we store a direct parent of the current class
// - For a non-super selection, we store the non-erased class type of the qualifier
//
// In addition, for `super.m` selections, we also store a direct parent of the current
// class if `m` is defined in Java. This avoids the need for having the Java class as
// a direct parent (scala-dev#143).
if (qual.isInstanceOf[Super]) {
val qualSym = accessibleOwnerOrParentDefiningMember(sym, qual.tpe.typeSymbol.parentSymbolsIterator, context) match {
case Some(p) => p
case None =>
// There is no test for this warning, I have been unable to come up with an example that would trigger it.
// In a selection `a.m`, there must be a direct parent from which `m` can be selected.
reporter.error(tree.pos, s"Unable to emit super reference to ${sym.fullLocationString}, $owner is not accessible in ${context.enclClass.owner}")
owner
}
if (sym.isJavaDefined && qualSym.isTraitOrInterface)
requiredDirectInterfaces.getOrElseUpdate(context.enclClass.owner, mutable.Set.empty) += qualSym
if (qualSym != owner)
tree.updateAttachment(new QualTypeSymAttachment(qualSym))
} else if (!isJvmAccessible(owner, context)) {
val qualSym = qual.tpe.typeSymbol
if (qualSym != owner && isJvmAccessible(qualSym, context) && definesMemberAfterErasure(qualSym, sym))
tree.updateAttachment(new QualTypeSymAttachment(qualSym))
else
reporter.error(tree.pos, s"Unable to emit reference to ${sym.fullLocationString}, $owner is not accessible in ${context.enclClass.owner}")
}
tree
case Template(parents, self, body) =>
//Console.println("checking no dble defs " + tree)//DEBUG
checkNoDoubleDefs(tree.symbol.owner)
treeCopy.Template(tree, parents, noSelfType, addBridgesToTemplate(body, currentOwner))
case Match(selector, cases) =>
treeCopy.Match(tree, Typed(selector, TypeTree(selector.tpe)), cases)
case Literal(ct) =>
// We remove the original tree attachments in pre-erasure to free up memory
val cleanLiteral = tree.removeAttachment[OriginalTreeAttachment]
if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != definitions.UnitClass) {
val typeValue = ct.typeValue.dealiasWiden
val erased = erasure(typeValue.typeSymbol) applyInArray typeValue
treeCopy.Literal(cleanLiteral, Constant(erased))
} else cleanLiteral
case ClassDef(_,_,_,_) =>
debuglog("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
copyClassDef(tree)(tparams = Nil)
case DefDef(_,_,_,_,_,_) =>
copyDefDef(tree)(tparams = Nil)
case TypeDef(_, _, _, _) =>
EmptyTree
case fun: Function =>
fun.attachments.get[SAMFunction] foreach {
samf => addBridgesToLambda(samf.synthCls)
}
fun
case _ =>
tree
}
override def transform(tree: Tree): Tree = {
// Reply to "!!! needed?" which adorned the next line: without it, build fails with:
// Exception in thread "main" scala.tools.nsc.symtab.Types$TypeError:
// value array_this is not a member of object scala.runtime.ScalaRunTime
//
// What the heck is array_this? See preTransformer in this file:
// gen.mkRuntimeCall("array_"+name, qual :: args)
if (tree.symbol == ArrayClass && !tree.isType) tree
else {
val tree1 = preErase(tree)
tree1 match {
case TypeApply(fun, targs @ List(targ)) if (fun.symbol == Any_asInstanceOf || fun.symbol == Object_synchronized) && targ.tpe == UnitTpe =>
// scala/bug#9066 prevent transforming `o.asInstanceOf[Unit]` to `o.asInstanceOf[BoxedUnit]`.
// adaptMember will then replace the call by a reference to BoxedUnit.UNIT.
treeCopy.TypeApply(tree1, transform(fun), targs).clearType()
case EmptyTree | TypeTree() =>
tree1 setType specialScalaErasure(tree1.tpe)
case ArrayValue(elemtpt, trees) =>
treeCopy.ArrayValue(
tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType()
case ValDef(_, _, tpt, rhs) =>
val vd1 = super.transform(tree1).clearType().asInstanceOf[ValDef]
vd1.tpt.tpe match {
case FoldableConstantType(_) if !vd1.rhs.isInstanceOf[Literal] =>
val deconst = vd1.tpt.tpe.deconst
vd1.tpt setType deconst
tree1.symbol.setInfo(deconst)
case _ =>
}
vd1
case DefDef(_, _, _, _, tpt, _) =>
// TODO: move this in some post-processing transform in the fields phase?
if (fields.symbolAnnotationsTargetFieldAndGetter(tree.symbol))
fields.dropFieldAnnotationsFromGetter(tree.symbol)
try super.transform(tree1).clearType()
finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
case ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: _) =>
tree
case _: Apply if tree1 ne tree =>
/* some Apply trees get replaced (in `preEraseApply`) with one of
* their subtrees, which needs to be `preErase`d in its entirety,
* not just recursed over by super.transform(). */
transform(tree1)
case _ =>
super.transform(tree1).clearType()
}
}
}
}
/** The main transform function: Pretransform the tree, and then
* re-type it at phase erasure.next.
*/
override def transform(tree: Tree): Tree = {
val tree1 = preTransformer.transform(tree)
// log("tree after pretransform: "+tree1)
exitingErasure {
newTyper(rootContextPostTyper(unit, tree)).typed(tree1)
}
}
}
final def resolveAnonymousBridgeClash(sym: Symbol, bridge: Symbol): Unit = {
// TODO reinstate this after Delambdafy generates anonymous classes that meet this requirement.
// require(sym.owner.isAnonymousClass, sym.owner)
log(s"Expanding name of ${sym.debugLocationString} as it clashes with bridge. Renaming deemed safe because the owner is anonymous.")
sym.expandName(sym.owner)
bridge.resetFlag(BRIDGE)
}
/** Does this symbol compile to the underlying platform's notion of an interface,
* without requiring compiler magic before it can be instantiated?
*
* More specifically, we're interested in whether LambdaMetaFactory can instantiate this type,
* assuming it has a single abstract method. In other words, if we were to mix this
* trait into a class, it should not result in any compiler-generated members having to be
* implemented in ("mixed in to") this class (except for the SAM).
*
* Thus, the type must erase to a java interface, either by virtue of being defined as one,
* or by being a trait that:
* - is static (explicitouter or lambdalift may add disqualifying members)
* - extends only other traits that compile to pure interfaces (except for Any)
* - has no val/var members
*
* TODO: can we speed this up using the INTERFACE flag, or set it correctly by construction?
*/
final def compilesToPureInterface(tpSym: Symbol): Boolean = {
def ok(sym: Symbol) =
sym.isJavaInterface ||
sym.isTrait &&
// Unless sym.isStatic, even if the constructor is zero-argument now, it may acquire arguments in explicit outer or lambdalift.
// This is an impl restriction to simplify the decision of whether to expand the SAM during uncurry
// (when we don't yet know whether it will receive an outer pointer in explicit outer or whether lambda lift will add proxies for captures).
// When we delay sam expansion until after explicit outer & lambda lift, we could decide there whether
// to expand sam at compile time or use LMF, and this implementation restriction could be lifted.
sym.isStatic &&
// HACK: this is to rule out traits with an effectful initializer.
// The constructor only exists if the trait's template has statements.
// Sadly, we can't be more precise without access to the tree that defines the SAM's owner.
!sym.primaryConstructor.exists &&
(sym.isInterface || sym.info.decls.forall(mem => mem.isMethod || mem.isType)) // TODO OPT: && {sym setFlag INTERFACE; true})
// we still need to check our ancestors even if the INTERFACE flag is set, as it doesn't take inheritance into account
ok(tpSym) && tpSym.ancestors.forall(sym => (sym eq AnyClass) || (sym eq ObjectClass) || ok(sym))
}
final def isJvmAccessible(cls: Symbol, context: Context): Boolean = {
// Phase travel necessary, isAccessible is too lax after erasure for Java-defined members, see
// comment in its implementation.
!cls.isJavaDefined || enteringErasure(context.isAccessible(cls, cls.owner.thisType))
}
/**
* Check if a class defines a member after erasure. The phase travel is important for
* `trait T extends AClass`: after erasure (and in bytecode), `T` has supertype `Object`, not
* `AClass`.
*/
final def definesMemberAfterErasure(cls: Symbol, member: Symbol): Boolean =
exitingErasure(cls.tpe.member(member.name).alternatives.contains(member))
/**
* The goal of this method is to find a class that is accessible (in bytecode) and can be used
* to select `member`.
* - For constructors, it returns the `member.owner`. We can assume the class is accessible: if
* it wasn't, the typer would have rejected the program, as the class is referenced in source.
* - For Scala-defined members it also returns `member.owner`, all Scala-defined classes are
* public in bytecode.
* - For Java-defined members we prefer a direct parent over of the owner, even if the owner is
* accessible. This way the owner doesn't need to be added as a direct parent, see scala-dev#143.
*/
final def accessibleOwnerOrParentDefiningMember(member: Symbol, parents: Iterator[Symbol], context: Context): Option[Symbol] = {
def eraseAny(cls: Symbol) = if (cls == AnyClass || cls == AnyValClass) ObjectClass else cls
if (member.isConstructor || !member.isJavaDefined) Some(eraseAny(member.owner))
else parents.find { p =>
val e = eraseAny(p)
isJvmAccessible(e, context) && definesMemberAfterErasure(e, member)
} orElse {
val e = eraseAny(member.owner)
if (isJvmAccessible(e, context)) Some(e) else None
}
}
private class TypeRefAttachment(val tpe: TypeRef)
}
|
lrytz/scala
|
src/compiler/scala/tools/nsc/transform/Erasure.scala
|
Scala
|
apache-2.0
| 68,692
|
package com.github.dtaniwaki.akka_pusher
import akka.actor.ActorSystem
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.HttpMethods._
import akka.stream.ActorMaterializer
import akka.http.scaladsl.model.MediaTypes._
import akka.stream.scaladsl.Flow
import org.joda.time.DateTimeUtils
import org.specs2.mock.Mockito
import com.github.dtaniwaki.akka_pusher.PusherModels._
import com.github.dtaniwaki.akka_pusher.attributes.{PusherChannelsAttributes, PusherChannelAttributes}
import com.typesafe.config.ConfigFactory
import org.specs2.mutable.Specification
import org.specs2.specification.process.RandomSequentialExecution
import org.specs2.matcher.{Expectable, Matcher}
import spray.json._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.util.{Try, Success}
import scala.language.reflectiveCalls
class PusherClientSpec extends Specification
with RandomSequentialExecution
with SpecHelper
with Mockito
with PusherJsonSupport
{
implicit val system: ActorSystem = ActorSystem("pusher-client")
implicit val materializer = ActorMaterializer()
DateTimeUtils.setCurrentMillisFixed(1452184169130L)
private def awaitResult[A](future: Future[A]) = Await.result(future, Duration.Inf)
private def pusherStub(responseBody: String) = new PusherClient() {
var consumedRequest: HttpRequest = null
override val pool: Flow[(HttpRequest, Int), (Try[HttpResponse], Int), Any] = Flow[(HttpRequest, Int)].map {
case (request, n) =>
consumedRequest = request
(Success(HttpResponse(status = StatusCodes.OK, entity = HttpEntity(ContentType(`application/json`), responseBody))), n)
}
}
class HttpGetRequestMatcher(uri: String) extends Matcher[HttpRequest] {
def apply[S <: HttpRequest](e: Expectable[S]) = {
val actualReq = e.value
val actualMethod = e.value.method
val actualUri = actualReq.uri.toString
result(
actualMethod == GET && actualUri == uri,
s"HttpRequest(${actualMethod}, ${actualUri})\\n is (GET, ${uri})",
s"HttpRequest(${actualMethod}, ${actualUri})\\n is not (GET, ${uri})",
e
)
}
}
class HttpPostRequestMatcher(uri: String, bodyJson: JsValue) extends Matcher[HttpRequest] {
def apply[S <: HttpRequest](e: Expectable[S]) = {
val actualReq = e.value
val actualMethod = e.value.method
val actualUri = actualReq.uri.toString
val actualBodyJson = awaitResult(actualReq.entity.toStrict(5 seconds).map(_.data.decodeString("UTF-8"))).parseJson
result(
actualMethod == POST && actualUri == uri && actualBodyJson == bodyJson,
s"HttpRequest(${actualMethod}, ${actualUri}, ${actualBodyJson})\\n is (POST, ${uri}, ${bodyJson})",
s"HttpRequest(${actualMethod}, ${actualUri}, ${actualBodyJson})\\n is not (POST, ${uri}, ${bodyJson})",
e
)
}
}
private def equalToHttpGetRequest(uri: String) = new HttpGetRequestMatcher(uri)
private def equalToHttpPostRequest(uri: String, jsonBody: JsValue) = new HttpPostRequestMatcher(uri, jsonBody)
"#constructor" should {
"accept the config by argument" in {
val pusher = new PusherClient(ConfigFactory.parseString("""pusher: {appId: "app0", key: "key0", secret: "secret0"}"""))
pusher.appId === "app0"
pusher.key === "key0"
pusher.secret === "secret0"
}
}
"#trigger(channels: Seq[String], event: String, data: T, socketId: Option[String] = None)" should {
"make a request to the the channels" in {
val pusher = pusherStub("")
val res = pusher.trigger(Seq("channel1", "channel2"), "event", "message", Some("123.234"))
awaitResult(res) === Success(Result(""))
pusher.consumedRequest must equalToHttpPostRequest(
"""http://api.pusherapp.com/apps/app/events?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&body_md5=567be22be06070b3cf618f8bc59efa74&auth_signature=59a8f724d3487bde7dcc51a13d9b6d2747fb80d22ed824ada51d5b0e60f42c1e""",
"""{"data":"\\"message\\"","name":"event","channels":["channel1","channel2"],"socket_id":"123.234"}""".parseJson
)
}
"without socket" in {
"make a request to the channels" in {
val pusher = pusherStub("")
val res = pusher.trigger(Seq("channel1", "channel2"), "event", "message")
awaitResult(res) === Success(Result(""))
pusher.consumedRequest must equalToHttpPostRequest(
"""http://api.pusherapp.com/apps/app/events?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&body_md5=a263789fb4fa6eea04c6f0f1ed57c827&auth_signature=5092137acbdb89a5c5f3d3b69b359c19c87b11a7e3e5492559d366ce011e259b""",
"""{"data":"\\"message\\"","name":"event","channels":["channel1","channel2"]}""".parseJson
)
}
}
}
"#trigger(channel: String, event: String, data: T)" should {
"make a request to the channel" in {
val pusher = pusherStub("")
val res = pusher.trigger("channel", "event", "message")
awaitResult(res) === Success(Result(""))
pusher.consumedRequest must equalToHttpPostRequest(
"""http://api.pusherapp.com/apps/app/events?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&body_md5=4caaeb8a1a2a881977ab8dbbedb44165&auth_signature=edf2aa45294d7128b4d4669d9583eb27f5fb481e064668efea4eca955588cbf6""",
"""{"data":"\\"message\\"","name":"event","channels":["channel"]}""".parseJson
)
}
}
"#trigger(channel: String, event: String, data: T, socketId: Option[String])" should {
"make a request to the channel" in {
val pusher = pusherStub("")
val res = pusher.trigger("channel", "event", "message", Some("123.234"))
awaitResult(res) === Success(Result(""))
pusher.consumedRequest must equalToHttpPostRequest(
"""http://api.pusherapp.com/apps/app/events?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&body_md5=9f368a70902d7977ad32b4fdd01e8929&auth_signature=cfdafb4b8f6cfd6ffe69f8531f7fa9411c01a49617ccab74ead60715f3453fdf""",
"""{"data":"\\"message\\"","name":"event","channels":["channel"],"socket_id":"123.234"}""".parseJson
)
}
}
"#trigger(Seq((channel: String, event: String, data: T, socketId: Option[String])))" should {
"make a request to the channels" in {
val pusher = pusherStub("")
val res = pusher.trigger(Seq(
("channel1", "event1", "message1", Some("123.234")),
("channel2", "event2", "message2", Some("234.345"))
))
awaitResult(res) === Success(Result(""))
pusher.consumedRequest must equalToHttpPostRequest(
"""http://api.pusherapp.com/apps/app/batch_events?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&body_md5=27ca0add3a65dd8d8300d03f11c2cfdb&auth_signature=e5db0134b6da74adff472638f77d2f2f73d0f259584701995afc49ae33d66d33""",
"""
{"batch":[
{"data":"\\"message1\\"","name":"event1","channel":"channel1","socket_id":"123.234"},
{"data":"\\"message2\\"","name":"event2","channel":"channel2","socket_id":"234.345"}
]}
""".parseJson
)
}
}
"#channel(channelName: String, attributes: Seq[PusherChannelAttributes.Value] = Seq())" should {
"make a request to pusher" in {
val pusher = pusherStub("{}")
val res = pusher.channel("channel", Seq(PusherChannelAttributes.subscriptionCount, PusherChannelAttributes.userCount))
awaitResult(res) === Success(Channel())
pusher.consumedRequest must equalToHttpGetRequest(
"""http://api.pusherapp.com/apps/app/channels/channel?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&info=subscription_count,user_count&auth_signature=135e1dada101b10e127f5ba7bfbbf810d24463fb0820922ba781a5cc47bf633e"""
)
}
"without attributes" in {
"make a request to pusher" in {
val pusher = pusherStub("{}")
val res = pusher.channel("channel")
awaitResult(res) === Success(Channel())
pusher.consumedRequest must equalToHttpGetRequest(
"""http://api.pusherapp.com/apps/app/channels/channel?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&auth_signature=4898a6b178f2a53a88285f4f937bdce663dc43aa942dcb84fb84d86ec904c6aa"""
)
}
}
}
"(deprecated) #channel(channelName: String, attributes: Option[Seq[String]])" should {
"call the new channel function" in {
val clientMock = mock[PusherClient]
clientMock.channel(anyString, anyListOf[PusherChannelAttributes.Value]) returns Future(Success(Channel()))
clientMock.channel("channel", Seq(PusherChannelAttributes.userCount))
there was one(clientMock).channel("channel", Seq(PusherChannelAttributes.userCount))
}
}
"#channels(prefixFilter: String, attributes: Seq[PusherChannelsAttributes.Value] = Seq())" should {
"make a request to pusher" in {
val pusher = pusherStub("{}")
val res = pusher.channels("prefix", Seq(PusherChannelsAttributes.userCount))
awaitResult(res) === Success(ChannelMap())
pusher.consumedRequest must equalToHttpGetRequest(
"""http://api.pusherapp.com/apps/app/channels?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&filter_by_prefix=prefix&info=user_count&auth_signature=5fbace1f69182f30f977fb6f7004fcab587fd2339fe3f5de4288ff734a86cf6e"""
)
}
"without attributes" in {
"make a request to pusher" in {
val pusher = pusherStub("{}")
val res = pusher.channels("prefix")
awaitResult(res) === Success(ChannelMap())
pusher.consumedRequest must equalToHttpGetRequest(
"""http://api.pusherapp.com/apps/app/channels?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&filter_by_prefix=prefix&auth_signature=748998ffe9e177acfea1b7cb8213f5c969b79b9aa0d54e477538e26c5b998bf9"""
)
}
}
}
"(deprecated) #channels(prefixFilter: String, attributes: Option[Seq[String]])" should {
"call the new channel function" in {
val clientMock = mock[PusherClient]
clientMock.channels(anyString, Seq(any)) returns Future(Success(ChannelMap()))
clientMock.channels("channel", Seq(PusherChannelsAttributes.userCount))
there was one(clientMock).channels("channel", Seq(PusherChannelsAttributes.userCount))
}
}
"#users" should {
"make a request to pusher" in {
val pusher = pusherStub("""{"users" : []}""")
val res = pusher.users("channel")
awaitResult(res) === Success(UserList())
pusher.consumedRequest must equalToHttpGetRequest(
"""http://api.pusherapp.com/apps/app/channels/channel/users?auth_key=key&auth_timestamp=1452184169&auth_version=1.0&auth_signature=04baeea473d69c1c104b4b306c1fde000f75b2baf9a39a50d01d7fc5d9c80268"""
)
}
}
"#authenticate" should {
"return an authenticatedParams" in {
val pusher = spy(new PusherClient())
val channelData = ChannelData(
userId = "test user",
userInfo = Some(Map("foo" -> "bar"))
)
val res = pusher.authenticate("channel", "123.234", Some(channelData))
res === AuthenticatedParams("key:bd773eb7c2796dcfc240a894f0f4b5a438e901d97d2d474ea9fa34310d3e8357", Some("""{"user_id":"test user","user_info":{"foo":"bar"}}"""))
}
"without data" in {
"return an authenticatedParams" in {
val pusher = spy(new PusherClient())
val res = pusher.authenticate("channel", "123.234")
res === AuthenticatedParams("key:2e3527935cd952830573d54a9199cfac42d5aace747bf301c5517d2da8ef7c38", None)
}
}
}
"#validateSignature" should {
"with valid arguments" in {
"returns true" in {
val pusher = spy(new PusherClient())
val res = pusher.validateSignature("key", "773ba44693c7553d6ee20f61ea5d2757a9a4f4a44d2841ae4e95b52e4cd62db4", "foo")
res === true
}
}
"with invalid key" in {
"returns false" in {
val pusher = spy(new PusherClient())
val res = pusher.validateSignature("invalid", "773ba44693c7553d6ee20f61ea5d2757a9a4f4a44d2841ae4e95b52e4cd62db4", "foo")
res === false
}
}
"with invalid signature" in {
"returns false" in {
val pusher = spy(new PusherClient())
val res = pusher.validateSignature("key", "invalid", "foo")
res === false
}
}
}
"#shutdown" should {
"shutdown" in {
val pusher = new PusherClient()
{
pusher.shutdown()
} must not(throwA[Exception])
}
}
}
|
dtaniwaki/akka-pusher
|
src/test/scala/com/github/dtaniwaki/akka_pusher/PusherClientSpec.scala
|
Scala
|
mit
| 12,529
|
package pl.iterators.kebs.instances
import pl.iterators.kebs.instances.net.URIString
trait NetInstances extends URIString
object NetInstances extends NetInstances
|
theiterators/kebs
|
instances/src/main/scala/pl/iterators/kebs/instances/NetInstances.scala
|
Scala
|
mit
| 166
|
package controllers
import io.flow.healthcheck.v0.Client
import io.flow.healthcheck.v0.models.Healthcheck
import play.api.libs.ws._
import play.api.test._
class HealthchecksSpec extends PlaySpecification {
import scala.concurrent.ExecutionContext.Implicits.global
private val port = 9010
lazy val client = new Client(s"http://localhost:$port")
"GET /_internal_/healthcheck" in new WithServer(port=port) {
await(
client.healthchecks.getHealthcheck()
) must beEqualTo(
io.flow.healthcheck.v0.models.Healthcheck("healthy")
)
}
}
|
flowcommerce/splashpage
|
api/test/controllers/Healthchecks.scala
|
Scala
|
mit
| 567
|
package fr.univnantes.vroom.control
import fr.univnantes.vroom.core.Systeme
/**
* Classe paramétrable représentant une commande concrète
* @param system Objet utilisé pour le fonctionnement du logiciel
* @tparam R Type de retour,
*/
abstract class Command[R](system: Systeme) {
/**
* Méthode exécutant la commande contre le système
*/
def execute(): R
}
|
Callidon/v-room
|
src/main/scala/fr/univnantes/vroom/control/Command.scala
|
Scala
|
mit
| 387
|
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.utils
import io.deepsense.deeplang.DeeplangIntegTestSupport
class SparkUtilsIntegSpec extends DeeplangIntegTestSupport {
"countOccurrencesWithKeyLimit operation" should {
"Return Some(result) if distinct value limit is not reached" in {
val data = Seq("A", "B", "C")
val satisfiedLimit = 3
val result = execute(data, satisfiedLimit )
result shouldBe defined
}
"Return None if distinct value limit is reached" in {
val data = Seq("A", "B", "C")
val unsatisfiedLimit = 2
val result = execute(data, unsatisfiedLimit)
result should not be defined
}
"Properly calculate amount of occurrences" in {
val data = Seq("A", "A", "A", "A", "A", "B", "B")
val result = execute(data, 3)
result shouldBe Some(Map("A" -> 5, "B" -> 2))
}
}
private def execute(data: Seq[String], limit: Int) = {
val rdd = sparkContext.parallelize(data)
SparkUtils.countOccurrencesWithKeyLimit(rdd, limit)
}
}
|
deepsense-io/seahorse-workflow-executor
|
deeplang/src/it/scala/io/deepsense/deeplang/utils/SparkUtilsIntegSpec.scala
|
Scala
|
apache-2.0
| 1,619
|
class B {
def foo(a: A): Int = 1
}
|
som-snytt/xsbt
|
sbt/src/sbt-test/source-dependencies/value-class/changes/B0.scala
|
Scala
|
bsd-3-clause
| 37
|
/*
* Copyright 2013 websudos ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.websudos.phantom.dsl.query
import com.websudos.phantom.tables.Primitives
import org.scalatest.{FlatSpec, Matchers}
import com.websudos.phantom.Implicits._
import com.websudos.util.testing._
class AllowedBatchQueriesTest extends FlatSpec with Matchers {
val s = gen[String]
val b = BatchStatement
val p = Primitives
it should "allow using Insert queries in a Batch statement" in {
"BatchStatement().add(Primitives.insert)" should compile
}
it should " allow using an Insert.Value statement in a BatchStatement" in {
"BatchStatement().add(Primitives.insert.value(_.long, 4L))" should compile
}
it should "allow using an Update.Assignments statement in a BatchStatement" in {
"BatchStatement().add(Primitives.update.modify(_.long setTo 5L))" should compile
}
it should "allow using Update.Where queries in a BatchStatement" in {
"BatchStatement().add(Primitives.update.where(_.pkey eqs gen[String]))" should compile
}
it should "allow using Conditional Update.Where queries in a BatchStatement" in {
"BatchStatement().add(Primitives.update.where(_.pkey eqs gen[String]).onlyIf(_.long eqs 5L))" should compile
}
it should " allow using Conditional Assignments queries in a BatchStatement" in {
"BatchStatement().add(Primitives.update.where(_.pkey eqs gen[String]).modify(_.long setTo 10L).onlyIf(_.long eqs 5L))" should compile
}
it should " allow using Delete queries in a BatchStatement" in {
"BatchStatement().add(Primitives.delete)" should compile
}
it should "Delete.Where queries in a BatchStatement" in {
"BatchStatement().add(Primitives.delete)" should compile
}
}
|
nosheenzaza/phantom-data-centric
|
phantom-dsl/src/test/scala/com/websudos/phantom/dsl/query/AllowedBatchQueriesTest.scala
|
Scala
|
gpl-2.0
| 2,261
|
package io.github.chenfh5.lucene_analysis.pinyin
import org.elasticsearch.analysis.PinyinConfig
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.index.analysis.PinyinAnalyzer
import org.slf4j.LoggerFactory
import io.github.chenfh5.lucene_analysis.CustomAnalyzer
object PinyinClient extends CustomAnalyzer {
private val LOG = LoggerFactory.getLogger(getClass.getName)
/*
* @see
* configuration -> https://github.com/medcl/elasticsearch-analysis-pinyin/tree/v1.8.2
* */
private lazy val pinyinSetting = {
Settings.builder()
.put("keep_first_letter", true)
.put("keep_full_pinyin", false) //necessary for my business
.put("keep_joined_full_pinyin", true)
.put("none_chinese_pinyin_tokenize", false)
.put("limit_first_letter_length", 30)
.put("keep_original", true)
}
private lazy val pinyinAnalyzer = {
val setting = pinyinSetting.build()
val pinyinAnalyzer = new PinyinAnalyzer(new PinyinConfig(setting))
LOG.info("this is the pinyinAnalyzer={}, initialized successfully", pinyinAnalyzer)
pinyinAnalyzer
}
def getPinyinTokens(inputText: String) = {
getTokens(inputText, pinyinAnalyzer)
}
}
|
chenfh5/test-spark-connect-es
|
src/main/scala/io/github/chenfh5/lucene_analysis/pinyin/PinyinClient.scala
|
Scala
|
apache-2.0
| 1,217
|
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.server.staticcontent
import cats.effect.IO
import org.http4s.Method.GET
import org.http4s._
import org.http4s.syntax.all._
class WebjarServiceFilterSuite extends Http4sSuite with StaticContentShared {
def routes: HttpRoutes[IO] =
webjarServiceBuilder[IO]
.withWebjarAssetFilter(webjar =>
webjar.library == "test-lib" && webjar.version == "1.0.0" && webjar.asset == "testresource.txt"
)
.toRoutes
test("Return a 200 Ok file") {
val req = Request[IO](GET, uri"/test-lib/1.0.0/testresource.txt")
val rb = runReq(req)
rb.flatMap { case (b, r) =>
assertEquals(r.status, Status.Ok)
b.assertEquals(testWebjarResource)
}
}
test("Not find filtered asset") {
val req = Request[IO](GET, uri"/test-lib/1.0.0/sub/testresource.txt")
val rb = runReq(req)
rb.flatMap { case (_, r) =>
IO.pure(r.status).assertEquals(Status.NotFound)
}
}
}
|
http4s/http4s
|
server/jvm/src/test/scala/org/http4s/server/staticcontent/WebjarServiceFilterSuite.scala
|
Scala
|
apache-2.0
| 1,532
|
package mesosphere.marathon.core.task.tracker.impl
import mesosphere.marathon.core.base.ConstantClock
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.tracker.TaskTracker
import mesosphere.marathon.core.task.tracker.impl.TaskOpProcessorImpl.StatusUpdateActionResolver
import mesosphere.marathon.state.PathId
import mesosphere.marathon.test.Mockito
import org.apache.mesos.Protos.TaskStatus
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ Matchers, GivenWhenThen, FunSuite }
import scala.concurrent.Future
/**
* Some specialized tests for statusUpdate action resolving.
*
* More tests are in [[mesosphere.marathon.tasks.TaskTrackerImplTest]]
*/
class StatusUpdateActionResolverTest
extends FunSuite with Mockito with GivenWhenThen with ScalaFutures with Matchers {
import scala.concurrent.ExecutionContext.Implicits.global
test("an update for a non-existing tasks is mapped to fail") {
val f = new Fixture
Given("a taskID without task")
val appId = PathId("/app")
val taskId = Task.Id.forApp(appId)
f.taskTracker.task(taskId) returns Future.successful(None)
And("a status update")
val update = TaskStatus.getDefaultInstance
When("resolve is called")
val action = f.actionResolver.resolve(taskId, update).futureValue
Then("getTAskAsync is called")
verify(f.taskTracker).task(taskId)
And("a fail action is returned")
action.getClass should be(classOf[TaskOpProcessor.Action.Fail])
action.asInstanceOf[TaskOpProcessor.Action.Fail].cause.getMessage should
equal(s"$taskId of app [$appId] does not exist")
And("there are no more interactions")
f.verifyNoMoreInteractions()
}
class Fixture {
val clock = ConstantClock()
val taskTracker = mock[TaskTracker]
val actionResolver = new StatusUpdateActionResolver(clock, taskTracker)
def verifyNoMoreInteractions(): Unit = {
noMoreInteractions(taskTracker)
}
}
}
|
pgkelley4/marathon
|
src/test/scala/mesosphere/marathon/core/task/tracker/impl/StatusUpdateActionResolverTest.scala
|
Scala
|
apache-2.0
| 1,978
|
package models
import scalikejdbc.specs2.mutable.AutoRollback
import org.specs2.mutable._
import scalikejdbc._
import org.joda.time.{LocalDate}
class CronLineSpec extends Specification {
"CronLine" should {
val cl = CronLine.syntax("cl")
"find by primary keys" in new AutoRollback {
val maybeFound = CronLine.find(1L)
maybeFound.isDefined should beTrue
}
"find by where clauses" in new AutoRollback {
val maybeFound = CronLine.findBy(sqls.eq(cl.id, 1L))
maybeFound.isDefined should beTrue
}
"find all records" in new AutoRollback {
val allResults = CronLine.findAll()
allResults.size should be_>(0)
}
"count all records" in new AutoRollback {
val count = CronLine.countAll()
count should be_>(0L)
}
"find all by where clauses" in new AutoRollback {
val results = CronLine.findAllBy(sqls.eq(cl.id, 1L))
results.size should be_>(0)
}
"count by where clauses" in new AutoRollback {
val count = CronLine.countBy(sqls.eq(cl.id, 1L))
count should be_>(0L)
}
"create new record" in new AutoRollback {
val created = CronLine.create(cronId = 1L)
created should not beNull
}
"save a record" in new AutoRollback {
val entity = CronLine.findAll().head
// TODO modify something
val modified = entity
val updated = CronLine.save(modified)
updated should not equalTo(entity)
}
"destroy a record" in new AutoRollback {
val entity = CronLine.findAll().head
CronLine.destroy(entity)
val shouldBeNone = CronLine.find(1L)
shouldBeNone.isDefined should beFalse
}
"perform batch insert" in new AutoRollback {
val entities = CronLine.findAll()
entities.foreach(e => CronLine.destroy(e))
val batchInserted = CronLine.batchInsert(entities)
batchInserted.size should be_>(0)
}
}
}
|
akyao/ketsuco
|
test/models/CronLineSpec.scala
|
Scala
|
mit
| 1,912
|
/*
* Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
import sbt._
import Keys._
object KinesisExampleScalaConsumerBuild extends Build {
import Dependencies._
import BuildSettings._
// Configure prompt to show current project
override lazy val settings = super.settings :+ {
shellPrompt := { s => Project.extract(s).currentProject.id + " > " }
}
// Define our project, with basic project information and library dependencies
lazy val project = Project("kinesis-example-scala-consumer", file("."))
.settings(buildSettings: _*)
.settings(
libraryDependencies ++= Seq(
Libraries.logging,
Libraries.httpCore,
Libraries.httpClient,
Libraries.jacksonCore,
Libraries.argot,
Libraries.config,
Libraries.scalaUtil,
Libraries.scalazon,
Libraries.specs2,
Libraries.commonsLang3,
Libraries.thrift,
Libraries.slf4j,
Libraries.awsSdk,
Libraries.kinesisClient
// Add your additional libraries here (comma-separated)...
)
)
}
|
snowplow/kinesis-example-scala-consumer
|
project/KinesisExampleScalaConsumerBuild.scala
|
Scala
|
apache-2.0
| 1,728
|
package a65.测试2
object Runner3 {
def number1gen(n: Int): Number1 = n match {
case n1 if n1 > 0 => Number1S(number1gen(n1 - 1))
case 0 => Number1T
}
def number3gen(n: Int): Number3 = n match {
case n1 if n1 > 0 => new Number3S(number3gen(n1 - 1))
case 0 => Number3T
}
def 乘数(n: Int): (Number2, Number2) = {
def gen(n1: Int, zero: => Number2): Number2 = n1 match {
case n2 if n2 > 0 => Number2S(() => gen(n2 - 1, zero))
case 0 => zero
}
lazy val number2s: Number2 = gen(n, number2t)
lazy val number2t: Number2 = Number2T(() => number2s)
(number2s, number2t)
}
def 除数(n: Int): (Number2T, Number2S) = {
def gen(n1: Int, zero: => Number2S): Number2T = n1 match {
case n2 if n2 > 1 => Number2T(() => gen(n2 - 1, zero))
case 1 => Number2T(() => zero)
}
lazy val number2t: Number2T = gen(n, number2s)
lazy val number2s: Number2S = Number2S(() => number2t)
(number2t, number2s)
}
def count(number1: Number1): Int = number1 match {
case s: Number1S => count(s.tail) + 1
case Number1T => 0
}
def main(arr: Array[String]): Unit = {
for {
i <- 1 to 8
n <- 1 to 5
} yield {
val number1 = number3gen(n)
val (number2Positive, number2Zero) = 乘数(i)
val result: Number1 = number1.method3(number2Positive)
val countResult = Pow.pow(i, n)
assert(count(result) == countResult)
}
for {
i <- 1 to 2000
n <- 2 to 20
} {
val number1 = number1gen(i)
val (number2Positive, number2Zero) = 除数(n)
val result: Number1 = number2Positive.method5(number1)
val countResult = Pow.log(n, i)
assert(count(result) == countResult + 1)
}
}
}
|
djx314/ubw
|
a66-指数对数-原型/src/main/scala/a65/测试2/Runner3.scala
|
Scala
|
bsd-3-clause
| 1,909
|
package org.nisshiee.towerdefensescala
import org.specs2._, matcher.DataTables
class PointSpec extends Specification with DataTables { def is =
"Point" ^
"isIn" ^
"x, yのいずれかがleft, top未満の場合はfalse" ! e2^
"xがleft + width以上、またはyがtop + height以上の場合はfalse" ! e3^
"それ以外で、x,yがいずれもRectangleの範囲内の場合はtrue" ! e4^
end
implicit lazy val TupleRectangle = new Rectangle[(Int, Int, Int, Int)] {
def top(a: (Int, Int, Int, Int)) = a._1
def left(a: (Int, Int, Int, Int)) = a._2
def width(a: (Int, Int, Int, Int)) = a._3
def height(a: (Int, Int, Int, Int)) = a._4
}
def e2 =
"point" | "rect" |
Point(-1, 0) ! (0, 0, 3, 3) |
Point(0, -1) ! (0, 0, 3, 3) |
Point(-1, -1) ! (0, 0, 3, 3) |
Point(1, 1) ! (2, 2, 3, 3) |> { (point, rect) =>
point isIn rect must beFalse
}
def e3 =
"point" | "rect" |
Point(3, 0) ! (0, 0, 3, 3) |
Point(0, 3) ! (0, 0, 3, 3) |
Point(3, 3) ! (0, 0, 3, 3) |
Point(5, 5) ! (2, 2, 3, 3) |> { (point, rect) =>
point isIn rect must beFalse
}
def e4 =
"point" | "rect" |
Point(0, 0) ! (0, 0, 3, 3) |
Point(0, 2) ! (0, 0, 3, 3) |
Point(2, 0) ! (0, 0, 3, 3) |
Point(2, 2) ! (0, 0, 3, 3) |
Point(1, 1) ! (0, 0, 3, 3) |
Point(2, 2) ! (2, 2, 3, 3) |
Point(4, 2) ! (2, 2, 3, 3) |
Point(2, 4) ! (2, 2, 3, 3) |
Point(4, 4) ! (2, 2, 3, 3) |> { (point, rect) =>
point isIn rect must beTrue
}
}
|
nisshiee/towerdefense-scala
|
src/test/scala/model/PointSpec.scala
|
Scala
|
mit
| 1,850
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.dataSet
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.dataset.DataSetPythonCalc
import org.apache.flink.table.plan.nodes.logical.FlinkLogicalCalc
import org.apache.flink.table.plan.util.PythonUtil.containsPythonCall
import scala.collection.JavaConverters._
class DataSetPythonCalcRule
extends ConverterRule(
classOf[FlinkLogicalCalc],
FlinkConventions.LOGICAL,
FlinkConventions.DATASET,
"DataSetPythonCalcRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val calc: FlinkLogicalCalc = call.rel(0).asInstanceOf[FlinkLogicalCalc]
val program = calc.getProgram
program.getExprList.asScala.exists(containsPythonCall(_))
}
def convert(rel: RelNode): RelNode = {
val calc: FlinkLogicalCalc = rel.asInstanceOf[FlinkLogicalCalc]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.DATASET)
val convInput: RelNode = RelOptRule.convert(calc.getInput, FlinkConventions.DATASET)
new DataSetPythonCalc(
rel.getCluster,
traitSet,
convInput,
rel.getRowType,
calc.getProgram,
"DataSetPythonCalcRule")
}
}
object DataSetPythonCalcRule {
val INSTANCE: RelOptRule = new DataSetPythonCalcRule
}
|
hequn8128/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/rules/dataSet/DataSetPythonCalcRule.scala
|
Scala
|
apache-2.0
| 2,270
|
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
object Root
|
benmccann/playframework
|
dev-mode/sbt-plugin/src/sbt-test/play-sbt-plugin/multiproject/app/Root.scala
|
Scala
|
apache-2.0
| 80
|
package com.jgrier.flinkstuff.metrics
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{MetricFilter, ScheduledReporter}
import metrics_influxdb.{HttpInfluxdbProtocol, InfluxdbReporter}
import metrics_influxdb.api.measurements.CategoriesMetricMeasurementTransformer
import org.apache.flink.dropwizard.ScheduledDropwizardReporter
import org.apache.flink.metrics.MetricConfig
/**
* InfluxDB Metrics Reporter for Apache Flink
*
* To use this add the following configuration to your flink-conf.yaml file and place the JAR containing
* this class in your flink/lib directory:
*
* #==========================================
# Metrics
#==========================================
metrics.reporters: influxdb
metrics.reporter.influxdb.server: localhost
metrics.reporter.influxdb.port: 8086
metrics.reporter.influxdb.user: admin
metrics.reporter.influxdb.password: admin
metrics.reporter.influxdb.db: flink
metrics.reporter.influxdb.class: com.jgrier.flinkstuff.metrics.InfluxDbReporter
metrics.reporter.influxdb.interval: 10 SECONDS
# metrics format: host.process_type.tm_id.job_name.task_name.subtask_index
metrics.scope.jm: <host>.jobmanager.na.na.na.na
metrics.scope.jm.job: <host>.jobmanager.na.<job_name>.na.na
metrics.scope.tm: <host>.taskmanager.<tm_id>.na.na.na
metrics.scope.tm.job: <host>.taskmanager.<tm_id>.<job_name>.na.na
metrics.scope.tm.task: <host>.taskmanager.<tm_id>.<job_name>.<task_name>.<subtask_index>
metrics.scope.tm.operator: <host>.taskmanager.<tm_id>.<job_name>.<task_name>.<subtask_index> */
class InfluxDbReporter extends ScheduledDropwizardReporter {
override def getReporter(metricConfig: MetricConfig): ScheduledReporter = {
val server = metricConfig.getString("server", "localhost")
val port = metricConfig.getInteger("port", 8086)
val user = metricConfig.getString("user", "admin")
val password = metricConfig.getString("password", "admin")
val db = metricConfig.getString("db", "flink")
InfluxdbReporter.forRegistry(registry)
.protocol(new HttpInfluxdbProtocol(server, port, user, password, db))
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.filter(MetricFilter.ALL)
.skipIdleMetrics(false)
.transformer(new CategoriesMetricMeasurementTransformer("host", "process_type", "tm_id", "job_name", "task_name", "subtask_index"))
.build()
}
}
|
jgrier/flink-stuff
|
flink-influx-reporter/src/main/scala/com/jgrier/flinkstuff/metrics/InfluxDbReporter.scala
|
Scala
|
apache-2.0
| 2,470
|
import org.scalactic.Equality
import org.scalatest.matchers.{MatchResult, Matcher}
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FlatSpec, MustMatchers}
class GallantTest extends FlatSpec with MustMatchers with PropertyChecks {
case class DummySensor(override val name: String, override val id: Long, valueFunc: () => Double) extends Sensor(name, id) {
var initialized = false
def value = Some(valueFunc())
def initialize = {initialized = true}
}
"A Temperature sensor" must "store its ID" in {
val id = 42l
val tested = new TemperatureSensor(id)
tested.id must be(id)
tested.id mustBe id //no parens
tested.id must equal(id)
tested.id mustEqual id //no parens
}
it must "the same as another sensor with the same id" in {
implicit val sensorEq = SensorEquality
val id = 7l
val tested: Sensor = new TemperatureSensor(id) //won't work without type ascription
tested must equal(DummySensor("blah", id, () => 0))
}
"A SingleSensorCollector" must "be the same reference as" in {
val sensor = DummySensor("blah", 7, () => 10)
val tested = new SingleSensorCollector(sensor)
tested.sensors.head must be theSameInstanceAs sensor
}
it must "not throw an exception when adding a sensor" in {
noException must be thrownBy {
new SingleSensorCollector(new DummySensor("blah", 7, () => 10))
}
}
it must "throw an IllegalArgumentException when attempting to add a null sensor" in {
an [IllegalArgumentException] must be thrownBy {
new SingleSensorCollector(null)
}
}
"A TemperatureSensor's value" must "be within a range of +-3 when freshly initialized" in {
val tested = new TemperatureSensor(0l)
tested.initialize
tested.value.get must be (0.0 +- 3.0)
}
"A MultiSensorCollection" must "produce a complete collection" in {
val testData = List(DummySensor("blah", 10, () => 0), DummySensor("blah", 7, () => 0), DummySensor("blah", 8, () => 0))
val tested = new MultiSensorCollector(List.empty)
testData.foreach(tested.addSensor)
tested.sensors must contain theSameElementsAs(testData)
}
it must "add sensors in order" in {
val testData = List(DummySensor("blah", 10, () => 0), DummySensor("blah", 7, () => 0), DummySensor("blah", 8, () => 0))
val tested = new MultiSensorCollector(List.empty)
testData.foreach(tested.addSensor)
tested.sensors must contain theSameElementsInOrderAs(testData)
}
it must "return an empty list when provided with an empty seed" in {
new MultiSensorCollector(List.empty).sensors must be('empty)
}
it must "remove an element provided" in {
val testData = List(DummySensor("blah", 10, () => 0), DummySensor("blah", 7, () => 0), DummySensor("blah", 8, () => 0))
val deletedItem = testData(1)
val tested = new MultiSensorCollector(List.empty)
testData.foreach(tested.addSensor)
tested.removeSensor(deletedItem)
tested.sensors must not contain deletedItem
}
it must "return a Java list with sth" in {
val testData = List(DummySensor("blah", 10, () => 0), DummySensor("blah", 7, () => 0), DummySensor("blah", 8, () => 0))
val tested = new MultiSensorCollector(List.empty)
testData.foreach(tested.addSensor)
tested.legacySensors must contain theSameElementsInOrderAs(testData)
}
it must "return the same type of a sensor as searched" in {
val searchedName = "something"
val tested = new MultiSensorCollector(List(DummySensor(searchedName, 10, () => 0)))
import org.scalatest.OptionValues._
tested.findByName(searchedName).value.name must be(searchedName)
}
it must "return the first matching sensors as searched" in {
val searchedName = "something"
val firstId = 10l
val tested = new MultiSensorCollector(List(DummySensor(searchedName, firstId, () => 0),DummySensor(searchedName, 11, () => 0)))
import org.scalatest.OptionValues._
tested.findByName(searchedName).value.name must have (
'name (searchedName),
'id (firstId)
)
//alternatively
tested.findByName(searchedName).value must haveId(firstId)
}
"A TemperatureSensor's text output" must "have be prefixed with the name of the sensor" in {
new TemperatureSensor(0).textValue must startWith("Temp")
}
it must "output a sensible start value in text" in {
new TemperatureSensor(30).textValue must fullyMatch regex (raw"Temp: (\\d+)K" withGroup("0"))
}
"A TemperatureSensor" must "switch to initialized status when required" in {
val tested = new BadTemperatureSensor(30l)
tested.initialize
tested must be an 'initialized
}
"TheDsl" must "allow to construct a TemperatureSensor from a Double" in {
val builder = SensorCollectionDsl.start
"builder.withTemperatureSensor(3.0)" must compile
}
it must "not allow to construct a TemperatureSensor from a String" in {
val builder = SensorCollectionDsl.start
"""builder.withTemperatureSensor("3.0")""" mustNot typeCheck
"""builder.withTemperatureSensor("3.0")""" mustNot compile
}
"A SolarIrradianceSensor must cap geographically incorrect values as invalid" must "output sane data" in {
val valueCheck = Table(("Latitude", "Max value"), (90.0, 160.0), (90.0, 380.0), (52.0, 120.0))
forAll(valueCheck) { (latitude, maxValue) => {
new SolarIrradianceSensorSensor(7, latitude).value.get must be < maxValue
}
}
}
"A SensorData display" must "correctly sort a list" in {
import org.scalacheck.Gen._
val dataPointGen = zip(posNum[Long], posNum[Double]).map(SensorDataPoint.tupled)
val pointLists = listOf(dataPointGen)
forAll(pointLists) { (list) => {
noException must be thrownBy {
new SensorDataDisplay(list).sorted
}
}
}
}
//Util stuff
class HaveIdMatcher(expectedId: Long) extends Matcher[Sensor] {
override def apply(left: Sensor) = {
MatchResult(
left.id.equals(expectedId),
s"Sensor $left has a different ID than $expectedId",
s"Sensor $left has the ID $expectedId"
)
}
}
def haveId(expectedId: Long) = new HaveIdMatcher(expectedId)
object SensorEquality extends Equality[Sensor] {
def areEqual(a: Sensor, b: Any) = b match {
case other: Sensor => other.id == a.id
case _ => false
}
}
}
|
mikolak-net/scalatest_matcher_prez_demo
|
src/test/scala/GallantTest.scala
|
Scala
|
gpl-2.0
| 6,388
|
package org.broadinstitute.dsde.vault.model
import com.wordnik.swagger.annotations.ApiModelProperty
import spray.json
import scala.annotation.meta.field
object uBAMCollectionJsonProtocol extends json.DefaultJsonProtocol {
implicit val impUBamCollection = jsonFormat4(UBamCollection)
implicit val impUBamCollectionIngest = jsonFormat2(UBamCollectionIngest)
implicit val impUBamCollectionIngestResponse = jsonFormat4(UBamCollectionIngestResponse)
}
case class UBamCollection
(
@(ApiModelProperty@field)(value = "The Vault ID of this uBAM collection", required = true)
id: Option[String] = None,
@(ApiModelProperty@field)(value = "The Vault IDs of the uBAMs included in this collection.", required = true)
members: Option[Seq[String]] = None,
@(ApiModelProperty@field)(value = "The metadata key-value pairs associated with this uBAM collection.", required = true)
metadata: Map[String, String],
@(ApiModelProperty@field)(value = "The properties associated with this uBAM collection.", required = true)
properties: Map[String, String]
)
case class UBamCollectionIngest
(
@(ApiModelProperty@field)(value = "The Vault IDs of the uBAMs included in this collection.", required = true)
members: Option[Seq[String]] = None,
@(ApiModelProperty@field)(value = "The metadata key-value pairs associated with this uBAM collection.", required = true)
metadata: Map[String, String]
)
case class UBamCollectionIngestResponse
(
@(ApiModelProperty@field)(value = "The Vault ID of this uBAM collection", required = true)
id: String,
@(ApiModelProperty@field)(value = "The Vault IDs of the uBAMs included in this collection.", required = true)
members: Option[Seq[String]] = None,
@(ApiModelProperty@field)(value = "The metadata key-value pairs associated with this uBAM collection.", required = true)
metadata: Map[String, String],
@(ApiModelProperty@field)(value = "The properties associated with this uBAM collection.", required = true)
properties: Map[String, String]
)
|
broadinstitute/vault-api
|
src/main/scala/org/broadinstitute/dsde/vault/model/UBamCollection.scala
|
Scala
|
bsd-3-clause
| 2,003
|
package com.github.tminglei.slickpg
import slick.driver.PostgresDriver
import slick.jdbc.{PositionedResult, JdbcType}
trait PgJson4sSupport extends json.PgJsonExtensions with utils.PgCommonJdbcTypes { driver: PostgresDriver =>
import driver.api._
import org.json4s._
type DOCType
def pgjson: String
val jsonMethods: JsonMethods[DOCType]
/// alias
trait JsonImplicits extends Json4sJsonImplicits
trait Json4sJsonImplicits {
implicit val json4sJsonTypeMapper =
new GenericJdbcType[JValue](
pgjson,
(s) => jsonMethods.parse(s),
(v) => jsonMethods.compact(jsonMethods.render(v)),
hasLiteralForm = false
)
implicit def json4sJsonColumnExtensionMethods(c: Rep[JValue])(
implicit tm: JdbcType[JValue], tm1: JdbcType[List[String]]) = {
new JsonColumnExtensionMethods[JValue, JValue](c)
}
implicit def json4sJsonOptionColumnExtensionMethods(c: Rep[Option[JValue]])(
implicit tm: JdbcType[JValue], tm1: JdbcType[List[String]]) = {
new JsonColumnExtensionMethods[JValue, Option[JValue]](c)
}
}
trait Json4sJsonPlainImplicits {
import utils.PlainSQLUtils._
implicit class PgJsonPositionedResult(r: PositionedResult) {
def nextJson() = nextJsonOption().getOrElse(JNull)
def nextJsonOption() = r.nextStringOption().map(jsonMethods.parse(_))
}
//////////////////////////////////////////////////////////
implicit val getJson = mkGetResult(_.nextJson())
implicit val getJsonOption = mkGetResult(_.nextJsonOption())
implicit val setJson = mkSetParameter[JValue](pgjson, (v) => jsonMethods.compact(jsonMethods.render(v)))
implicit val setJsonOption = mkOptionSetParameter[JValue](pgjson, (v) => jsonMethods.compact(jsonMethods.render(v)))
}
}
|
btd/slick-pg
|
src/main/scala/com/github/tminglei/slickpg/addon/PgJson4sSupport.scala
|
Scala
|
bsd-2-clause
| 1,796
|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util.Map.Entry
import org.apache.accumulo.core.client.IteratorSetting
import org.apache.accumulo.core.data.{Key, Value}
import org.geotools.factory.Hints
import org.locationtech.geomesa.accumulo.AccumuloFeatureIndexType
import org.locationtech.geomesa.arrow.ArrowEncodedSft
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.iterators.{ArrowFileAggregate, ArrowFileScan}
import org.locationtech.geomesa.utils.geotools.GeometryUtils
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
/**
* Aggregates and returns arrow 'files'. Each value will be a full arrow file with metadata and batches.
* This allows us to build up the dictionary values as we encounter the features, instead of
* having to look them up ahead of time.
*/
class ArrowFileIterator extends BaseAggregatingIterator[ArrowFileAggregate] with ArrowFileScan
object ArrowFileIterator {
def configure(sft: SimpleFeatureType,
index: AccumuloFeatureIndexType,
filter: Option[Filter],
dictionaries: Seq[String],
hints: Hints,
deduplicate: Boolean,
priority: Int = ArrowBatchIterator.DefaultPriority): IteratorSetting = {
val is = new IteratorSetting(priority, "arrow-file-iter", classOf[ArrowFileIterator])
BaseAggregatingIterator.configure(is, deduplicate, None)
ArrowFileScan.configure(sft, index, filter, dictionaries, hints).foreach { case (k, v) => is.addOption(k, v) }
is
}
/**
* Adapts the iterator to create simple features.
* WARNING - the same feature is re-used and mutated - the iterator stream should be operated on serially.
*/
def kvsToFeatures(): (Entry[Key, Value]) => SimpleFeature = {
val sf = new ScalaSimpleFeature(ArrowEncodedSft, "")
sf.setAttribute(1, GeometryUtils.zeroPoint)
(e: Entry[Key, Value]) => {
sf.setAttribute(0, e.getValue.get())
sf
}
}
}
|
ronq/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/ArrowFileIterator.scala
|
Scala
|
apache-2.0
| 2,535
|
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.graphite
import io.gatling.core.config.GatlingConfiguration
import io.gatling.core.stats.writer.RunMessage
import io.gatling.graphite.types._
abstract class GraphitePathPattern(runMessage: RunMessage, configuration: GatlingConfiguration) {
def allUsersPath: GraphitePath
def usersPath(scenario: String): GraphitePath
def allResponsesPath: GraphitePath
def responsePath(requestName: String, groups: List[String]): GraphitePath
def metrics(userBreakdowns: Map[GraphitePath, UserBreakdown], responseMetricsByStatus: Map[GraphitePath, MetricByStatus]): Iterator[(String, Long)] = {
val userMetrics = userBreakdowns.iterator.flatMap(byProgress)
val targetResponseMetrics =
if (configuration.data.graphite.light)
responseMetricsByStatus.get(allResponsesPath).map(m => Iterator.single(allResponsesPath -> m)).getOrElse(Iterator.empty)
else
responseMetricsByStatus.iterator
val responseMetrics = targetResponseMetrics.flatMap(byStatus).flatMap(byMetric)
(userMetrics ++ responseMetrics)
.map { case (path, value) => (metricRootPath / path).pathKey -> value }
}
private def byProgress(metricsEntry: (GraphitePath, UserBreakdown)): Seq[(GraphitePath, Long)] = {
val (path, usersBreakdown) = metricsEntry
Seq(
activeUsers(path) -> usersBreakdown.active,
waitingUsers(path) -> usersBreakdown.waiting,
doneUsers(path) -> usersBreakdown.done
)
}
private def byStatus(metricsEntry: (GraphitePath, MetricByStatus)): Seq[(GraphitePath, Option[Metrics])] = {
val (path, metricByStatus) = metricsEntry
Seq(
okResponses(path) -> metricByStatus.ok,
koResponses(path) -> metricByStatus.ko,
allResponses(path) -> metricByStatus.all
)
}
private def byMetric(metricsEntry: (GraphitePath, Option[Metrics])): Seq[(GraphitePath, Long)] =
metricsEntry match {
case (path, None) => Seq(count(path) -> 0)
case (path, Some(m)) =>
Seq(
count(path) -> m.count,
min(path) -> m.min,
max(path) -> m.max,
mean(path) -> m.mean,
stdDev(path) -> m.stdDev,
percentiles1(path) -> m.percentile1,
percentiles2(path) -> m.percentile2,
percentiles3(path) -> m.percentile3,
percentiles4(path) -> m.percentile4
)
}
protected def metricRootPath: GraphitePath
protected def activeUsers(path: GraphitePath): GraphitePath
protected def waitingUsers(path: GraphitePath): GraphitePath
protected def doneUsers(path: GraphitePath): GraphitePath
protected def okResponses(path: GraphitePath): GraphitePath
protected def koResponses(path: GraphitePath): GraphitePath
protected def allResponses(path: GraphitePath): GraphitePath
protected def count(path: GraphitePath): GraphitePath
protected def min(path: GraphitePath): GraphitePath
protected def max(path: GraphitePath): GraphitePath
protected def mean(path: GraphitePath): GraphitePath
protected def stdDev(path: GraphitePath): GraphitePath
protected def percentiles1(path: GraphitePath): GraphitePath
protected def percentiles2(path: GraphitePath): GraphitePath
protected def percentiles3(path: GraphitePath): GraphitePath
protected def percentiles4(path: GraphitePath): GraphitePath
}
|
gatling/gatling
|
gatling-graphite/src/main/scala/io/gatling/graphite/GraphitePathPattern.scala
|
Scala
|
apache-2.0
| 3,921
|
/*
* Copyright (C) 2017 LREN CHUV for Human Brain Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ch.chuv.lren.woken.test
import java.util.concurrent.{Semaphore, TimeUnit}
import akka.actor.{ActorRef, ActorSystem}
import akka.cluster.Cluster
import akka.cluster.pubsub.{DistributedPubSub, DistributedPubSubMediator}
import akka.pattern.ask
import akka.stream.ActorMaterializer
import akka.util.Timeout
import ch.chuv.lren.woken.monitoring.KamonSupport
import com.typesafe.config.{Config, ConfigFactory}
import ch.chuv.lren.woken.messages.datasets._
import ch.chuv.lren.woken.messages.query._
import ch.chuv.lren.woken.messages.variables.{VariableId, VariablesForDatasetsQuery, VariablesForDatasetsResponse}
import com.typesafe.scalalogging.LazyLogging
import kamon.Kamon
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpec}
import org.scalatest.TryValues._
import org.scalatest.tagobjects.Slow
import spray.json._
import queryProtocol._
import JsonHelpers._
import ch.chuv.lren.woken.utils.ConfigurationLoader
import scala.collection.immutable.TreeSet
import scala.concurrent.{Await, ExecutionContextExecutor, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.Try
class WokenAkkaAPITest
extends WordSpec
with Matchers
with Queries
with BeforeAndAfterAll
with LazyLogging {
implicit val timeout: Timeout = Timeout(200 seconds)
lazy val config: Config = {
val appConfig = ConfigFactory
.parseResourcesAnySyntax("application.conf")
.withFallback(ConfigFactory.parseResourcesAnySyntax("kamon.conf"))
ConfigurationLoader.appendClusterConfiguration(appConfig).resolve()
}
implicit val system: ActorSystem = ActorSystem("woken", config)
implicit val materializer: ActorMaterializer = ActorMaterializer()
implicit val executionContext: ExecutionContextExecutor = system.dispatcher
val cluster: Cluster = Cluster(system)
val mediator: ActorRef = DistributedPubSub(system).mediator
val entryPoint = "/user/entrypoint"
val distributed: Boolean = config.getBoolean("test.distributed")
KamonSupport.startReporters(config)
override def beforeAll: Unit = {
val waitClusterUp = new Semaphore(1)
cluster.registerOnMemberUp(waitClusterUp.release())
waitClusterUp.acquire()
// TODO: Woken should response to a Ping message
// val future = mediator ? DistributedPubSubMediator.Send(entryPoint,
// Ping(Some("woken")),
// localAffinity = true)
// val result = waitFor[R](future)
Thread.sleep(10000)
}
override def afterAll: Unit = {
cluster.leave(cluster.selfAddress)
cluster.down(cluster.selfAddress)
Kamon.stopAllReporters()
system.terminate().onComplete { result =>
logger.debug(s"Actor system shutdown: $result")
}
}
"Woken" should {
"respond to a query for the list of algorithms" in {
val response: MethodsResponse =
timedQuery(MethodsQuery, "list of algorithms")
val expected = loadJson("/responses/list_algorithms.json")
response.methods shouldBe expected
}
"respond to a query for the list of available datasets" in {
val response: DatasetsResponse =
timedQuery(DatasetsQuery(Some("cde_features_a")), "list of datasets")
response.datasets should have size 1
val expected = Set(
Dataset(DatasetId("desd-synthdata"),
"DESD",
"Demo dataset DESD",
List(cdeFeaturesATableId, cdeFeaturesMixedTableId),
AnonymisationLevel.Anonymised,
None))
response.datasets shouldBe expected
}
// Available variables query
"respond to a query for the list of available variables" which {
"return all variables if no datasets specified" in {
val response: VariablesForDatasetsResponse =
timedQuery(VariablesForDatasetsQuery(Set(), exhaustive = false),
"list of variables for all datasets")
response.variables should have size 203
}
"return only variables for datasets if a set is passed with the query" in {
val response: VariablesForDatasetsResponse =
timedQuery(VariablesForDatasetsQuery(Set(DatasetId("churn")),
exhaustive = false),
"list of variables for Churn dataset")
response.variables should have size 21
val expected = loadJson("/responses/list_churn_variables.json")
.convertTo[VariablesForDatasetsResponse]
response shouldBe expected
}
"return only variables present in all datasets if exhaustive mode set to true" in {
val response: VariablesForDatasetsResponse =
timedQuery(VariablesForDatasetsQuery(Set(), exhaustive = true),
"list of variables common to all datasets")
response.variables should have size 0
}
}
"respond to a data mining query," which {
"uses a k-NN algorithm [PFA]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables = List(VariableId("score_math_course1")),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("knn", List(CodeValue("k", "5")), None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using k-NN algorithm")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/knn_data_mining.json")
// k-NN is not deterministic, cannot check exactly its results
val skippedTags = List("codebook")
save(approximate(json), "/responses/knn_data_mining.json")
assertResult(approximate(expected, skippedTags))(
approximate(json, skippedTags))
}
"uses a histogram [visualisation, highcharts]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("histograms", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using a histogram")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/histograms.json")
save(approximate(json), "/responses/histograms.json")
assertResult(approximate(expected))(approximate(json))
}
"uses a summary statistics algorithm [visualisation, tabular results]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables = List(),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("statisticsSummary", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using summary statistics algorithm")
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/summary_statistics.json")
save(approximate(json), "/responses/summary_statistics.json")
assertResult(approximate(expected))(approximate(json))
}
"uses t-SNE [visualisation, highcharts]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("tSNE", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult = timedQuery(query, "mine data using t-SNE")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
// t-SNE is not deterministic, cannot check exactly its results
val skippedTags = List("series")
val expected = loadJson("/responses/tsne_data_mining.json")
save(approximate(json), "/responses/tsne_data_mining.json")
assertResult(approximate(expected, skippedTags))(
approximate(json, skippedTags))
}
"uses correlation heatmap [visualisation, plotly.js]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("correlationHeatmap", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using correlation heatmap")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected =
loadJson("/responses/correlation_heatmap_data_mining.json")
save(approximate(json),
"/responses/correlation_heatmap_data_mining.json")
assertResult(approximate(expected))(approximate(json))
}
"uses PCA [visualisation, plotly.js]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("pca", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using PCA")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected =
loadJson("/responses/pca_data_mining.json")
save(approximate(json), "/responses/pca_data_mining.json")
assertResult(approximate(expected))(approximate(json))
}
"uses TAU ggparci [visualisation, svg]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("ggparci", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using TAU ggparsi")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/ggparci_data_mining.json")
save(approximate(json), "/responses/ggparci_data_mining.json")
assertResult(approximate(expected))(approximate(json))
}
"uses TAU heatmaply [visualisation, plotly.js]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("heatmaply", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using TAU heatmaply")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/heatmaply_data_mining.json")
def cleanMore(s: String): String =
s.replaceAll(""" id=\\".*?\\"""", """ id=\\"\\"""")
.replaceAll(""" data-for=\\".*?\\"""", """ data-for=\\"\\"""")
.replaceAll("""\\"attrs\\":\{.*</script>""",
"""\"attrs\":{}}]}}</script>""")
save(approximate(json), "/responses/heatmaply_data_mining.json")
assertResult(cleanMore(approximate(expected)))(
cleanMore(approximate(json)))
}
"uses JSI Hedwig [visualisation, text]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("hedwig", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using JSI Hedwig")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/hedwig_data_mining.json")
def cleanMore(s: String): String =
s.replaceAll("""Start: .*?\\n""", """Start:\\n""")
.replaceAll("""Time taken: .*?\\n""", """Time taken:\\n""")
.replaceAll("""bk_dir=.*?\\n""", """bk_dir=\\n""")
save(approximate(json), "/responses/hedwig_data_mining.json")
assertResult(cleanMore(approximate(expected)))(
cleanMore(approximate(json)))
}
"uses JSI hinmine [feature generation, tabular results]" in {
val query = MiningQuery(
user = UserId("test1"),
variables = List(VariableId("cognitive_task2")),
covariables =
List("score_math_course1", "score_math_course2").map(VariableId),
covariablesMustExist = true,
grouping = Nil,
filters = None,
targetTable = Some(sampleTable),
algorithm = AlgorithmSpec("hinmine", Nil, None),
datasets = TreeSet(),
executionPlan = None
)
val response: QueryResult =
timedQuery(query, "mine data using JSI hinmine")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/hinmine_data_mining.json")
save(approximate(json), "/responses/hinmine_data_mining.json")
assertResult(approximate(expected))(approximate(json))
}
}
"respond to an experiment query," which {
// Test experiment query
"executes a k-NN algorithm" in {
val query =
experimentQuery("knn", parameters = List(CodeValue("k", "5")))
val response: QueryResult =
timedQuery(query, "an experiment with k-NN algorithm")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/knn_experiment.json")
save(approximate(json), "/responses/knn_experiment.json")
assertResult(approximate(expected))(approximate(json))
}
"executes Linear regression and Anova algorithms" in {
val query = multipleExperimentQuery(
List(AlgorithmSpec("linearRegression", Nil, None),
AlgorithmSpec("anova", Nil, None)))
val response: QueryResult =
timedQuery(query, "an experiment with Linear regression algorithm")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/lr_and_anova_experiment.json")
save(approximate(json), "/responses/lr_and_anova_experiment.json")
assertResult(approximate(expected))(approximate(json))
}
"executes a Naive Bayes algorithm" in {
val query = experimentQuery(
"naiveBayes",
parameters = List(),
variables = List(VariableId("alzheimerbroadcategory")),
covariables = List(VariableId("lefthippocampus")),
targetTable = Some(cdeFeaturesMixedTableId)
)
val response: QueryResult =
timedQuery(query, "an experiment with Naive Bayes algorithm")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/naive_bayes_experiment.json")
save(approximate(json), "/responses/naive_bayes_experiment.json")
assertResult(approximate(expected))(approximate(json))
}
"executes a SGD Linear Model algorithm" in {
val query = experimentQuery(
"sgdLinearModel",
parameters =
List(CodeValue("alpha", "0.25"), CodeValue("penalty", "l1")),
variables = List(VariableId("alzheimerbroadcategory")),
covariables = List(VariableId("lefthippocampus")),
targetTable = Some(cdeFeaturesMixedTableId)
)
val response: QueryResult =
timedQuery(query, "an experiment with SGD Linear Model algorithm")
response.error shouldBe empty
response.data should not be empty
// SGD Linear Model is not deterministic, cannot check exactly its results
val skippedTags = List("model", "validations", "metadata")
val json = response.toJson
val expected = loadJson("/responses/sgd_linear_model_experiment.json")
save(approximate(json), "/responses/sgd_linear_model_experiment.json")
assertResult(approximate(expected, skippedTags))(
approximate(json, skippedTags))
}
"executes a SGD Neural Network algorithm" in {
val query = experimentQuery(
"sgdNeuralNetwork",
parameters = List(CodeValue("hidden_layer_sizes", "60,30"),
CodeValue("activation", "tanh")),
variables = List(VariableId("alzheimerbroadcategory")),
covariables = List(VariableId("lefthippocampus")),
targetTable = Some(cdeFeaturesMixedTableId)
)
val response: QueryResult =
timedQuery(query, "an experiment with SGD Neural Network algorithm")
response.error shouldBe empty
response.data should not be empty
// SGD Neural Network is not deterministic, cannot check exactly its results
val skippedTags = List("neuralnet", "validations", "metadata")
val json = response.toJson
val expected = loadJson("/responses/sgd_neural_network_experiment.json")
save(approximate(json), "/responses/sgd_neural_network_experiment.json")
assertResult(approximate(expected, skippedTags))(
approximate(json, skippedTags))
}
"executes a Gradient Boosting algorithm" in {
val query = experimentQuery(
"gradientBoosting",
parameters = List(CodeValue("learning_rate", "0.15"),
CodeValue("max_depth", "4")),
variables = List(VariableId("alzheimerbroadcategory")),
covariables = List(VariableId("lefthippocampus")),
targetTable = Some(cdeFeaturesMixedTableId)
)
val response: QueryResult =
timedQuery(query, "an experiment with Gradient Boosting algorithm")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/gradient_boosting_experiment.json")
save(approximate(json), "/responses/gradient_boosting_experiment.json")
assertResult(approximate(expected))(approximate(json))
}
}
// Test resiliency
"recover from multiple failed experiments" taggedAs Slow in {
// TODO: add never_end
val failures = List("training_fails",
"invalid_json",
"invalid_pfa_syntax",
"invalid_pfa_semantics",
"no_results")
val queries = failures.map(failure =>
experimentQuery("chaos", List(CodeValue("failure", failure))))
val futures = queries.map(
query =>
mediator ? DistributedPubSubMediator
.Send(entryPoint, query, localAffinity = true))
futures.foreach { f =>
logger.info("Waiting for result from chaos algorithm...")
val result = waitFor[QueryResult](f)
if (result.isFailure) {
logger.info(s"Chaos algorithm failed with ${result.failed.get}")
} else {
logger.info(s"Chaos algorithm returned ${result.success.value}")
}
}
val knnQuery = experimentQuery("knn", List(CodeValue("k", "5")))
val response: QueryResult =
timedQuery(knnQuery, "an experiment with k-NN algorithm")
response.error shouldBe empty
response.data should not be empty
val json = response.toJson
val expected = loadJson("/responses/knn_experiment.json")
assertResult(approximate(expected))(approximate(json))
}
}
private def waitFor[T](future: Future[Any])(
implicit timeout: Timeout): Try[T] = {
Try {
Await.result(future, timeout.duration).asInstanceOf[T]
}
}
private def timedQuery[R](query: Any, description: String): R = {
val span = Kamon.buildSpan(description.replaceAll(" ", "-")).start()
val start = System.currentTimeMillis()
val future = Kamon.withSpan(span) {
mediator ? DistributedPubSubMediator.Send(entryPoint,
query,
localAffinity = false)
}
val result = waitFor[R](future)
val end = System.currentTimeMillis()
logger.info(
s"Query for $description complete in " + Duration(end - start,
TimeUnit.MILLISECONDS))
if (!result.isSuccess) {
logger.error(result.toString)
}
assert(result.isSuccess, "Query returned a failure")
span.finish()
result.success.value
}
}
|
HBPSP8Repo/workflow
|
tests/woken-test/src/test/scala/ch/chuv/lren/woken/test/WokenAkkaAPITest.scala
|
Scala
|
apache-2.0
| 23,976
|
package akka.ainterface.datatype.interpolation
import akka.ainterface.datatype.{ErlAtom, ErlTerm}
import scala.language.experimental.macros
final class ErlTermStringContext(val context: StringContext) extends AnyVal {
def erl(args: ErlTerm*): ErlTerm = macro ErlTermInterpolationMacro.erlImpl
}
final class ErlAtomStringContext(val context: StringContext) extends AnyVal {
def atom(args: String*): ErlAtom = {
context.checkLengths(args)
val pi = context.parts.iterator
val ai = args.iterator
val sb = StringBuilder.newBuilder.append(pi.next())
while (pi.hasNext) {
sb.append(ai.next())
sb.append(pi.next())
}
ErlAtom(sb.result())
}
}
|
ainterface/ainterface
|
ainterface/src/main/scala/akka/ainterface/datatype/interpolation/ErlTermStringContext.scala
|
Scala
|
apache-2.0
| 683
|
package com.blogspot.ramannanda.scala.algorithms.easy
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{FlatSpec, FunSuite, Matchers}
/**
* Created by Ramandeep Singh on 7/17/17.
*/
class SumIntegerPairs$Test extends FlatSpec with Matchers with LazyLogging {
val data = Array(1, 5, 4, 9, 8, 10)
s"testFindPairForSum for array ${data.toSeq}" should "find (1,4) as the pair for getting 5" in {
SumIntegerPairs.findPairForSum(data,5) should be ((1,4))
}
it should "find (1,8) as the pair for getting 9" in {
SumIntegerPairs.findPairForSum(data,9) should be ((1,8))
}
it should "throw an RuntimeException for getting 4" in {
a[RuntimeException] should be thrownBy SumIntegerPairs.findPairForSum(data,4)
}
}
|
ramannanda9/algorithms-in-scala
|
src/test/scala/com/blogspot/ramannanda/scala/algorithms/easy/SumIntegerPairs$Test.scala
|
Scala
|
gpl-3.0
| 754
|
package pl.pholda.malpompaaligxilo.dsl.expr.date
import pl.pholda.malpompaaligxilo.dsl.DslFormExpr
import pl.pholda.malpompaaligxilo.form.FormInstance
import pl.pholda.malpompaaligxilo.util.Date
case class DateFromString(string: DslFormExpr[Any]) extends DslFormExpr[Date] {
override def apply(formInstance: FormInstance[_]): Date = {
val stringDate = string(formInstance) match {
case Some(s: String) => s
case s: String => s
case x => throw new Exception(s"cannot execute dateFromString on not string! (passed value: $x")
}
formInstance.dates.fromString(stringDate)
}
}
|
pholda/MalpompaAligxilo
|
dsl/shared/src/main/scala/pl/pholda/malpompaaligxilo/dsl/expr/date/DateFromString.scala
|
Scala
|
gpl-3.0
| 607
|
package net.iakovlev.dynamo.generic.test
import net.iakovlev.dynamo.generic.AwsAttributeValueDecoder
import org.specs2.mutable.Specification
import scala.collection.JavaConverters._
class DecodeMapAsMapTest
extends Specification
with AwsAttributeValueDecoder
with TestBase {
"Decode map field as a simple map, not nested class" >> {
// TODO has to be Map[String, F[_]], because of lack of Optional type class in Cats
case class MapHostString(m: Map[String, String])
val res = awsDecoder[MapHostString](
Map("m" -> attr().m(Map("hello" -> attr("world")).asJava).build()))
res must beRight(MapHostString(Map("hello" -> "world")))
case class MapHostInt(m: Map[String, Int])
val res1 = awsDecoder[MapHostInt](
Map("m" -> attr().m(Map("hello" -> attr().n("123").build()).asJava).build()))
res1 must beRight(MapHostInt(Map("hello" -> 123)))
}
}
|
RomanIakovlev/easycodecs
|
aws-dynamodb-v2-bindings/src/test/scala/net/iakovlev/dynamo/generic/test/DecodeMapAsMapTest.scala
|
Scala
|
apache-2.0
| 898
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.