code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
def greet(greeting: String, name: String, surname: String) =
greeting + " " + name + " " + surname + "!"
| grzegorzbalcerek/scala-book-examples | examples/Functions6.scala | Scala | mit | 107 |
object E4 extends Application {
var max = 0
for (x <- 999 to (100, -1); y <- 999 to (100, -1))
if ((x * y).toString == (x * y).toString.reverse)
max = Math.max(max, x * y)
println(max)
}
| rickyclarkson/euler-java | E4.scala | Scala | apache-2.0 | 204 |
package intellij.haskell.ui
import java.awt.BorderLayout
import com.intellij.openapi.ui.DialogWrapper
import javax.swing.{JComponent, JLabel, JPanel, JTextField}
class EnterNameDialog(prompt: String, suggestion: String = "") extends DialogWrapper(true) {
private val textField = if (suggestion.isEmpty) new JTextField(10) else new JTextField(suggestion)
init()
setTitle(prompt)
override def createCenterPanel(): JComponent = {
val dialogPanel: JPanel = new JPanel(new BorderLayout)
val label: JLabel = new JLabel(prompt)
dialogPanel.add(label, BorderLayout.NORTH)
dialogPanel.add(textField, BorderLayout.SOUTH)
dialogPanel
}
override def getPreferredFocusedComponent: JComponent = textField
def getName: String = textField.getText
}
| rikvdkleij/intellij-haskell | src/main/scala/intellij/haskell/ui/EnterNameDialog.scala | Scala | apache-2.0 | 776 |
package mesosphere.marathon.api.v2.json
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.health.HealthCheckActor.Health
case class EnrichedTask(appId: String, task: MarathonTask, healthCheckResults : Seq[Option[Health]])
| MiLk/marathon | src/main/scala/mesosphere/marathon/api/v2/json/EnrichedTask.scala | Scala | apache-2.0 | 248 |
package reactivemongo.api.gridfs
object `package` {
private[gridfs] val logger =
reactivemongo.util.LazyLogger("reactivemongo.api.gridfs")
}
| ReactiveMongo/ReactiveMongo | driver/src/main/scala/api/gridfs/package.scala | Scala | apache-2.0 | 148 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, analysis}
import org.apache.spark.sql.types.{StructField, StructType}
object LocalRelation {
def apply(output: Attribute*): LocalRelation = new LocalRelation(output)
def apply(output1: StructField, output: StructField*): LocalRelation = {
new LocalRelation(StructType(output1 +: output).toAttributes)
}
def fromExternalRows(output: Seq[Attribute], data: Seq[Row]): LocalRelation = {
val schema = StructType.fromAttributes(output)
val converter = CatalystTypeConverters.createToCatalystConverter(schema)
LocalRelation(output, data.map(converter(_).asInstanceOf[InternalRow]))
}
def fromProduct(output: Seq[Attribute], data: Seq[Product]): LocalRelation = {
val schema = StructType.fromAttributes(output)
val converter = CatalystTypeConverters.createToCatalystConverter(schema)
LocalRelation(output, data.map(converter(_).asInstanceOf[InternalRow]))
}
}
case class LocalRelation(output: Seq[Attribute], data: Seq[InternalRow] = Nil)
extends LeafNode with analysis.MultiInstanceRelation {
/**
* Returns an identical copy of this relation with new exprIds for all attributes. Different
* attributes are required when a relation is going to be included multiple times in the same
* query.
*
* 返回此关系的相同副本,其中包含所有属性的新exprIds,
* 当要在同一查询中多次包含关系时,需要不同的属性。
*/
override final def newInstance(): this.type = {
LocalRelation(output.map(_.newInstance()), data).asInstanceOf[this.type]
}
override protected def stringArgs = Iterator(output)
override def sameResult(plan: LogicalPlan): Boolean = plan match {
case LocalRelation(otherOutput, otherData) =>
otherOutput.map(_.dataType) == output.map(_.dataType) && otherData == data
case _ => false
}
override lazy val statistics =
Statistics(sizeInBytes = output.map(_.dataType.defaultSize).sum * data.length)
}
| tophua/spark1.52 | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala | Scala | apache-2.0 | 2,963 |
package org.opencommercesearch.api.common
/*
* Licensed to OpenCommerceSearch under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. OpenCommerceSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import play.api.mvc.{AnyContent, Request}
import play.api.i18n.Lang
import play.api.i18n.Lang.preferred
import play.api.Play.current
import org.apache.solr.client.solrj.SolrQuery
import org.apache.solr.client.solrj.request.AbstractUpdateRequest
import org.opencommercesearch.api.Global._
import org.opencommercesearch.api.service.{StorageFactory, Storage}
import org.opencommercesearch.common.Context
trait ContentPreview {
val SupportedCountries = Seq("US", "CA")
val SupportedLanguages = Seq("en", "fr")
def withSuggestCollection(query: SolrQuery, preview: Boolean) : SolrQuery = {
query.setParam("collection", SuggestCollection)
}
def withSuggestCollection[T <: AbstractUpdateRequest](request: T) : T = {
request.setParam("collection", SuggestCollection)
request
}
def withNamespace[T](factory: StorageFactory[T])(implicit context: Context) : Storage[T] = {
var namespace = "public"
if (context.isPreview) {
namespace = "preview"
}
namespace += "_" + context.lang.language
factory.getInstance(namespace)
}
def withNamespace[R, T](factory: StorageFactory[T], preview: Boolean)(implicit req: Request[R]) : Storage[T] = {
var namespace = "public"
if (preview) {
namespace = "preview"
}
namespace += "_" + language(req.acceptLanguages)
factory.getInstance(namespace)
}
def withCategoryCollection(query: SolrQuery)(implicit context: Context) : SolrQuery = {
query.setParam("collection", getCategoryCollection(context.isPreview))
}
def withCategoryCollection[T <: AbstractUpdateRequest, R](request: T)(implicit context: Context) : T = {
request.setParam("collection", getCategoryCollection(context.isPreview))
request
}
private def getCategoryCollection(preview: Boolean) : String = {
var collection = CategoryPublicCollection
if (preview) {
collection = CategoryPreviewCollection
}
collection
}
def withRuleCollection(query: SolrQuery, preview: Boolean, acceptLanguages:Seq[Lang]) : SolrQuery = {
query.setParam("collection", getRuleCollection(preview, acceptLanguages))
}
def withRuleCollection[T <: AbstractUpdateRequest](request: T, preview: Boolean, acceptLanguages:Seq[Lang]) : T = {
request.setParam("collection", getRuleCollection(preview, acceptLanguages))
request
}
private def getRuleCollection(preview: Boolean, acceptLanguages:Seq[Lang]) : String = {
var collection = RulePublicCollection
if (preview) {
collection = RulePreviewCollection
}
collection = collection + "_" + language(acceptLanguages)
collection
}
def withFacetCollection(query: SolrQuery, preview: Boolean, acceptLanguages:Seq[Lang]) : SolrQuery = {
query.setParam("collection", getFacetCollection(preview, acceptLanguages))
}
def withFacetCollection[T <: AbstractUpdateRequest](request: T, preview: Boolean, acceptLanguages:Seq[Lang]) : T = {
request.setParam("collection", getFacetCollection(preview, acceptLanguages))
request
}
private def getFacetCollection(preview: Boolean, acceptLanguages:Seq[Lang]) : String = {
var collection = FacetPublicCollection
if (preview) {
collection = FacetPreviewCollection
}
collection = collection + "_" + language(acceptLanguages)
collection
}
private def country(acceptLanguages:Seq[Lang]) : String = {
preferred(acceptLanguages).country
}
private def language(acceptLanguages:Seq[Lang]) : String = {
preferred(acceptLanguages).language
}
}
| madickson/opencommercesearch | opencommercesearch-api/app/org/opencommercesearch/api/common/ContentPreview.scala | Scala | apache-2.0 | 4,346 |
package no.nrk.samnorsk.wikiextractor
import org.scalatest.{FlatSpec, Matchers}
import scala.io.Source
class WikiIteratorTest extends FlatSpec with Matchers {
"A WikiIterator" should "return the text of all articles" in {
val source = Source.fromInputStream(getClass.getResourceAsStream("/dump-a.json"))
val it = new WikiIterator(source)
it.toList should contain inOrder ("ba", "foo")
}
"A WikiIterator" should "take a limit" in {
val source = Source.fromInputStream(getClass.getResourceAsStream("/dump-a.json"))
val it = new WikiIterator(source, limit = Some(1))
it.toList shouldBe Seq("ba")
}
}
| nrkno/samnorsk | SynonymCreator/src/test/scala/no/nrk/samnorsk/wikiextractor/WikiIteratorTest.scala | Scala | mit | 635 |
// Compile with scalac -classpath .:/path/to/javaee-jars/\\* -deprecation Credentials.scala
// To check the Java annotations, run javap -verbose Credentials.class
import scala.annotation.target._
import scala.reflect.BeanProperty
import javax.persistence.Entity
import javax.persistence.Id
import javax.validation.constraints.NotNull
@Entity class Credentials(@NotNull @BeanProperty var username: String) {
// @NotNull is only applied to the constructor parameter, not to
// the getters/setters
def check(@NotNull password: String) {}
// @NotNull is applied to the method parameter
@BeanProperty @deprecated("Use check instead", "1.5") var pwd = ""
// @deprecated is applied to the Scala and bean getters/setters
@(Id @beanGetter) @BeanProperty var id = 0
// @Id is only applied to the bean getter
}
class Main extends App {
val creds = new Credentials("Fred")
creds.pwd = "secret" // Deprecation warning for Scala setter
println(creds.getPwd()) // Deprecation warning for bean getter
}
| yeahnoob/scala-impatient-2e-code | src/ch15/sec04/Credentials.scala | Scala | gpl-3.0 | 1,025 |
/*
* Copyright (c) 2013-2014, ARM Limited
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.arm.carp.pencil
/** Clone an expression, such that no duplicate xxxVariableRefs are introduced. */
object ExpressionCloner extends ExpressionWalker {
override def walkScalarVariable(in: ScalarVariableRef) = {
(new ScalarVariableRef(in.variable), None)
}
override def walkArrayVariable(in: ArrayVariableRef) = {
(new ArrayVariableRef(in.variable), None)
}
}
| Meinersbur/pencil | src/scala/com/arm/carp/pencil/ExpressionCloner.scala | Scala | mit | 1,510 |
package Tutorial
import Chisel._
import Node._
import Literal._
import scala.collection.mutable.HashMap
import scala.collection.mutable.ArrayBuffer
class multiProtocolEngine (extCompName:String) extends gComponentLeaf (() => new NP_EthMpl3Header_t)(() => new NP_EthMpl3Header_t) (ArrayBuffer(("ipv4Lookup1", () => UFix(width = 32) , () => UFix(width = 8)), ("ipv4Lookup2", () => UFix(width = 32) , () => UFix(width = 8)), ("qosCount", () => UFix(width = 32) , () => UFix(width = 8)))) (extCompName = extCompName + "__type__engine__MT__1__") with include {
val numOfThreads = 1
val NONE_SELECTED = UFix(numOfThreads,log2Up(numOfThreads+1))
val WaitForInputValid = UFix(0, 8)
val WaitForOutputReady = UFix(255, 8)
val WaitForReady = UFix(0, 1)
val WaitForValid = UFix(1, 1)
val inputTag = Vec(numOfThreads) {Reg(UFix(width=TAGWIDTH*2))}
val State = Vec(numOfThreads) {Reg(UFix(width=8), resetVal=WaitForInputValid)}
val EmitReturnState = Vec(numOfThreads) {Reg(UFix(width=8), resetVal=WaitForInputValid)}
//val outstandingOffs = Vec(numOfThreads) {Reg(resetVal=UFix(0, 5))}
val AllOffloadsReady = Bool()
val AllOffloadsValid = Vec(numOfThreads) {Bool()}
/*******************Thread states*********************************/
val subStateTh = Vec(numOfThreads) {Reg(resetVal=WaitForReady)}
def myOff = io.elements.find(_._1 == "off").getOrElse(elseV)._2
val ipv4Input = Vec (numOfThreads) {Reg(new IPv4Header_t)} //Global variable
val ipv4Output = Vec (numOfThreads) {Reg(new IPv4Header_t)} //Global variable
val gOutPort = Vec (numOfThreads) {Reg(UFix(width = 32))} //Global variable
val inputReg = Vec(numOfThreads) {Reg(new NP_EthMpl3Header_t)}
val outputReg = Vec(numOfThreads) {Reg(new NP_EthMpl3Header_t)}
def mymyOffipv4Lookup1 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "ipv4Lookup1").getOrElse(elseV)._2
val ipv4Lookup1Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 8))
ipv4Lookup1Port <> mymyOffipv4Lookup1
def mymyOffipv4Lookup2 = myOff.asInstanceOf[Bundle].elements.find(_._1 == "ipv4Lookup2").getOrElse(elseV)._2
val ipv4Lookup2Port = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 8))
ipv4Lookup2Port <> mymyOffipv4Lookup2
def mymyOffqosCount = myOff.asInstanceOf[Bundle].elements.find(_._1 == "qosCount").getOrElse(elseV)._2
val qosCountPort = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 8))
qosCountPort <> mymyOffqosCount
val GS_ETHERNET = UFix(1)
val GS_IPV4 = UFix(2)
val GS_LOOKUP = UFix(3)
val GS_UPDATE = UFix(4)
val GS_EXCEPTION = UFix(5)
/******************Winner threads*********************************/
val rThreadEncoder = new RREncode (numOfThreads)
val rThread = rThreadEncoder.io.chosen
Range(0, numOfThreads, 1).map(i =>
rThreadEncoder.io.valid(i) := (subStateTh(i) === WaitForReady))
rThreadEncoder.io.ready := (rThread != NONE_SELECTED)
val vThreadEncoder = new RREncode (numOfThreads)
val vThread = vThreadEncoder.io.chosen
Range(0, numOfThreads, 1).map(i =>
vThreadEncoder.io.valid(i) := (subStateTh(i) === WaitForValid) && AllOffloadsValid(i))
vThreadEncoder.io.ready := vThread != NONE_SELECTED
val sThreadEncoder = new RREncode (numOfThreads)
val sThread = sThreadEncoder.io.chosen
Range(0, numOfThreads, 1).map(i => sThreadEncoder.io.valid(i) := (subStateTh(i) === WaitForReady) && (State(i) === WaitForInputValid))
sThreadEncoder.io.ready := sThread != NONE_SELECTED
Range(0, numOfThreads, 1).foreach(i => subStateTh(i) := MuxCase(subStateTh(i), Seq((AllOffloadsReady && UFix(i) === rThread && State(i) != WaitForInputValid && State(i) != WaitForOutputReady , WaitForValid), (UFix(i) === vThread, WaitForReady))))
ipv4Lookup1Port.rep.ready := Bool(true)
ipv4Lookup2Port.rep.ready := Bool(true)
qosCountPort.rep.ready := Bool(true)
/******************Ready stage handler************************/
val ipv4Lookup1PortHadReadyRequest = Reg(resetVal=Bool(false))
val ipv4Lookup1_ready_received = Reg(resetVal=Bool(false))
val ipv4Lookup2PortHadReadyRequest = Reg(resetVal=Bool(false))
val ipv4Lookup2_ready_received = Reg(resetVal=Bool(false))
val qosCountPortHadReadyRequest = Reg(resetVal=Bool(false))
val qosCount_ready_received = Reg(resetVal=Bool(false))
AllOffloadsReady :=
(ipv4Lookup1Port.req.ready || ipv4Lookup1_ready_received || (!ipv4Lookup1PortHadReadyRequest && !ipv4Lookup1Port.req.valid)) &&
(ipv4Lookup2Port.req.ready || ipv4Lookup2_ready_received || (!ipv4Lookup2PortHadReadyRequest && !ipv4Lookup2Port.req.valid)) &&
(qosCountPort.req.ready || qosCount_ready_received || (!qosCountPortHadReadyRequest && !qosCountPort.req.valid)) &&
Bool(true)
ipv4Lookup1_ready_received := !(AllOffloadsReady) && (ipv4Lookup1_ready_received || ipv4Lookup1Port.req.ready)
ipv4Lookup1PortHadReadyRequest := !AllOffloadsReady && (ipv4Lookup1PortHadReadyRequest || ipv4Lookup1Port.req.valid)
ipv4Lookup2_ready_received := !(AllOffloadsReady) && (ipv4Lookup2_ready_received || ipv4Lookup2Port.req.ready)
ipv4Lookup2PortHadReadyRequest := !AllOffloadsReady && (ipv4Lookup2PortHadReadyRequest || ipv4Lookup2Port.req.valid)
qosCount_ready_received := !(AllOffloadsReady) && (qosCount_ready_received || qosCountPort.req.ready)
qosCountPortHadReadyRequest := !AllOffloadsReady && (qosCountPortHadReadyRequest || qosCountPort.req.valid)
/******************Valid stage handler************************/
val ipv4Lookup1PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))}
val ipv4Lookup1_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))}
val ipv4Lookup2PortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))}
val ipv4Lookup2_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))}
val qosCountPortHadValidRequest = Vec(numOfThreads) {Reg(resetVal=Bool(false))}
val qosCount_valid_received = Vec(numOfThreads) {Reg(resetVal=Bool(false))}
for (i <- 0 to numOfThreads-1) {
AllOffloadsValid(i) :=
((ipv4Lookup1Port.rep.valid && (ipv4Lookup1Port.rep.tag === UFix(i, 5)))|| ipv4Lookup1_valid_received(i) || !ipv4Lookup1PortHadValidRequest(i)) &&
((ipv4Lookup2Port.rep.valid && (ipv4Lookup2Port.rep.tag === UFix(i, 5)))|| ipv4Lookup2_valid_received(i) || !ipv4Lookup2PortHadValidRequest(i)) &&
((qosCountPort.rep.valid && (qosCountPort.rep.tag === UFix(i, 5)))|| qosCount_valid_received(i) || !qosCountPortHadValidRequest(i)) &&
Bool(true)
ipv4Lookup1_valid_received(i) := !(vThread === UFix(i, 5)) && ((ipv4Lookup1_valid_received(i)) || (ipv4Lookup1Port.rep.valid && ipv4Lookup1Port.rep.tag === UFix(i, 5)))
ipv4Lookup1PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (ipv4Lookup1PortHadValidRequest(i) || (UFix(i,5)===rThread && ipv4Lookup1Port.req.valid)/*(ipv4Lookup1PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/)
ipv4Lookup2_valid_received(i) := !(vThread === UFix(i, 5)) && ((ipv4Lookup2_valid_received(i)) || (ipv4Lookup2Port.rep.valid && ipv4Lookup2Port.rep.tag === UFix(i, 5)))
ipv4Lookup2PortHadValidRequest(i) := !(vThread === UFix(i,5)) && (ipv4Lookup2PortHadValidRequest(i) || (UFix(i,5)===rThread && ipv4Lookup2Port.req.valid)/*(ipv4Lookup2PortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/)
qosCount_valid_received(i) := !(vThread === UFix(i, 5)) && ((qosCount_valid_received(i)) || (qosCountPort.rep.valid && qosCountPort.rep.tag === UFix(i, 5)))
qosCountPortHadValidRequest(i) := !(vThread === UFix(i,5)) && (qosCountPortHadValidRequest(i) || (UFix(i,5)===rThread && qosCountPort.req.valid)/*(qosCountPortHadReadyRequest && AllOffloadsReady && (UFix(i,5) === rThread))*/)
}
val outPort = ipv4Lookup1Port.rep.bits
val srcLookupResult = ipv4Lookup2Port.rep.bits
val qcOutput = qosCountPort.rep.bits
ipv4Lookup1Port.req.tag := rThread
ipv4Lookup1Port.req.valid := (rThread != NONE_SELECTED) && !ipv4Lookup1_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_LOOKUP))
ipv4Lookup1Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_LOOKUP),ipv4Input(rThread).dstAddr)))
ipv4Lookup2Port.req.tag := rThread
ipv4Lookup2Port.req.valid := (rThread != NONE_SELECTED) && !ipv4Lookup2_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_LOOKUP))
ipv4Lookup2Port.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_LOOKUP),ipv4Input(rThread).srcAddr)))
qosCountPort.req.tag := rThread
qosCountPort.req.valid := (rThread != NONE_SELECTED) && !qosCount_valid_received(rThread) && ( (rThread != NONE_SELECTED && State(rThread) === GS_UPDATE))
qosCountPort.req.bits := MuxCase(UFix(0, 32),Seq( ((rThread != NONE_SELECTED && State(rThread) === GS_UPDATE),ipv4Input(rThread).srcAddr)))
when (sThread != NONE_SELECTED && io.in.valid) {
inputReg(sThread) := io.in.bits
inputTag(sThread) := io.in.tag
State(sThread) := GS_ETHERNET
}
when (rThread != NONE_SELECTED && State(rThread) === WaitForOutputReady && io.out.ready) {
State(rThread) := EmitReturnState(rThread)
}
when (vThread != NONE_SELECTED &&State(vThread) === GS_ETHERNET){
ipv4Input(vThread):=(new IPv4Header_t).fromBits(Bits(0, width=(new IPv4Header_t).getWidth) | (inputReg(vThread).l3).toBits)
outputReg(vThread):=inputReg(vThread)
when (inputReg(vThread).l2Protocol===ETHERNET) {
State(vThread):=GS_IPV4
}
.otherwise {
State(vThread):=GS_EXCEPTION
}
}
when (vThread != NONE_SELECTED &&State(vThread) === GS_IPV4){
when (inputReg(vThread).eth.l3Type===IPV4) {
State(vThread):=GS_LOOKUP
ipv4Output(vThread):=ipv4Input(vThread)
}
.otherwise {
State(vThread):=GS_EXCEPTION
}
when (ipv4Input(vThread).length<UFix(20, width = 32)||ipv4Input(vThread).version!=UFix(4, width = 32)) {
State(vThread):=GS_EXCEPTION
}
}
when (vThread != NONE_SELECTED &&State(vThread) === GS_LOOKUP){
outputReg(vThread).outPort:=outPort+srcLookupResult
gOutPort(vThread):=outPort
State(vThread):=GS_UPDATE
}
when (vThread != NONE_SELECTED &&State(vThread) === GS_UPDATE){
outputReg(vThread).outPort:=gOutPort(vThread)+qcOutput
ipv4Output(vThread).ttl:=ipv4Input(vThread).ttl-UFix(1, width = 32)
ipv4Output(vThread).chksum:=ipv4Input(vThread).chksum+UFix(128, width = 32)
outputReg(vThread).l3:=(new mpl3Header_t).fromBits(Bits(0, width=(new mpl3Header_t).getWidth) | (ipv4Output(vThread)).toBits)
EmitReturnState(vThread) := WaitForInputValid
State(vThread) := WaitForOutputReady
}
when (vThread != NONE_SELECTED &&State(vThread) === GS_EXCEPTION){
outputReg(vThread).outPort:=CONTROL_PLANE
EmitReturnState(vThread) := WaitForInputValid
State(vThread) := WaitForOutputReady
}
io.out.tag := inputTag(rThread)
io.out.bits := outputReg(rThread)
io.out.valid := rThread != NONE_SELECTED && State(rThread) === WaitForOutputReady
io.in.ready := sThread != NONE_SELECTED
/******************Engine specific performance counters************************/
val IsPcReset =
io.pcIn.valid && io.pcIn.bits.request && io.pcIn.bits.pcType === Pcounters.pcReset
var portId = 3
when (IsPcReset) {
engineUtilization := UFix(0, Pcounters.PCWIDTH)
} .otherwise {
when (State(0) != WaitForInputValid) {
engineUtilization := engineUtilization +
UFix(1, Pcounters.PCWIDTH)
}
}
for ((n, i) <- ioOff.elements) {
if (n == "ipv4Lookup1") {
when (IsPcReset) {
offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH)
} .elsewhen (i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&
(ipv4Lookup1PortHadValidRequest(0) || ipv4Lookup1Port.req.valid) && !pcPaused) {
offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH)
}
}
if (n == "ipv4Lookup2") {
when (IsPcReset) {
offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH)
} .elsewhen (i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&
(ipv4Lookup2PortHadValidRequest(0) || ipv4Lookup2Port.req.valid) && !pcPaused) {
offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH)
}
}
if (n == "qosCount") {
when (IsPcReset) {
offloadRateArray(portId-3) := UFix(0, Pcounters.PCWIDTH)
} .elsewhen (i.asInstanceOf[gOffBundle[Bundle, Bundle]].req.ready &&
(qosCountPortHadValidRequest(0) || qosCountPort.req.valid) && !pcPaused) {
offloadRateArray(portId-3) := offloadRateArray(portId-3) + UFix(1, Pcounters.PCWIDTH)
}
}
portId = portId + 1
}
}
| seyedmaysamlavasani/GorillaPP | apps/multiProtocolNpu/build/multiProtocolEngine.scala | Scala | bsd-3-clause | 12,707 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.crud.rest.model.service
import com.bwsw.sj.common.si.model.service.ZKService
import com.bwsw.sj.common.utils.{RestLiterals, ServiceLiterals}
import com.fasterxml.jackson.annotation.JsonProperty
import scaldi.Injector
class ZKCoordServiceApi(name: String,
provider: String,
val namespace: String,
description: Option[String] = Some(RestLiterals.defaultDescription),
@JsonProperty("type") serviceType: Option[String] = Some(ServiceLiterals.zookeeperType),
creationDate: String)
extends ServiceApi(serviceType.getOrElse(ServiceLiterals.zookeeperType), name, provider, description, creationDate) {
override def to()(implicit injector: Injector): ZKService = {
val modelService =
new ZKService(
name = this.name,
provider = this.provider,
namespace = this.namespace,
description = this.description.getOrElse(RestLiterals.defaultDescription),
serviceType = this.serviceType.getOrElse(ServiceLiterals.zookeeperType),
creationDate = this.creationDate
)
modelService
}
}
| bwsw/sj-platform | core/sj-crud-rest/src/main/scala/com/bwsw/sj/crud/rest/model/service/ZKCoordServiceApi.scala | Scala | apache-2.0 | 1,988 |
// Databricks notebook source
// MAGIC %md
// MAGIC # [SDS-2.2-360-in-525-01: Intro to Apache Spark for data Scientists](https://lamastex.github.io/scalable-data-science/360-in-525/2018/01/)
// MAGIC ### [SDS-2.2, Scalable Data Science](https://lamastex.github.io/scalable-data-science/sds/2/2/)
// COMMAND ----------
// MAGIC %md
// MAGIC This is an elaboration of the [http://spark.apache.org/docs/latest/sql-programming-guide.html](http://spark.apache.org/docs/latest/sql-programming-guide.html) by Ivan Sadikov and Raazesh Sainudiin.
// MAGIC
// MAGIC # Performance Tuning
// MAGIC ## Spark Sql Programming Guide
// MAGIC
// MAGIC - Performance Tuning
// MAGIC - Caching Data In Memory
// MAGIC - Other Configuration Options
// COMMAND ----------
// MAGIC %md
// MAGIC # Performance Tuning
// MAGIC
// MAGIC For some workloads it is possible to improve performance by either
// MAGIC caching data in memory, or by turning on some experimental options.
// MAGIC
// MAGIC Caching Data In Memory
// MAGIC ----------------------
// MAGIC Spark SQL can cache tables using an in-memory columnar format by calling
// MAGIC `spark.cacheTable("tableName")` or `dataset.cache()`. Then Spark
// MAGIC SQL will scan only required columns and will automatically tune
// MAGIC compression to minimize memory usage and GC pressure. You can call
// MAGIC `spark.uncacheTable("tableName")` to remove the table from memory.
// MAGIC
// MAGIC Configuration of in-memory caching can be done using the `setConf`
// MAGIC method on `SparkSession` or by running `SET key=value` commands using SQL.
// MAGIC
// MAGIC | Property Name | Default | Meaning |
// MAGIC | --- | --- | --- |
// MAGIC | `spark.sql.inMemoryColumnarStorage.compressed` | true | When set to true Spark SQL will automatically select a compression codec for each column based on statistics of the data. |
// MAGIC | `spark.sql.inMemoryColumnarStorage.batchSize` | 10000 | Controls the size of batches for columnar caching. Larger batch sizes can improve memory utilization and compression, but risk OOMs when caching data. |
// MAGIC
// MAGIC Other Configuration Options
// MAGIC ---------------------------
// MAGIC
// MAGIC The following options can also be used to tune the performance of query
// MAGIC execution. It is possible that these options will be deprecated in
// MAGIC future release as more optimizations are performed automatically.
// MAGIC
// MAGIC | Property Name | Default | Meaning |
// MAGIC | ---|---|--- |
// MAGIC |`spark.sql.autoBroadcastJoinThreshold` | 10485760 (10 MB) | Configures the maximum size in bytes for a table that will be broadcast to all worker nodes when performing a join. By setting this value to -1 broadcasting can be disabled. Note that currently statistics are only supported for Hive Metastore tables where the command `ANALYZE TABLE <tableName> COMPUTE STATISTICS noscan` has been run. |
// MAGIC | `spark.sql.tungsten.enabled` | true | When true, use the optimized Tungsten physical execution backend which explicitly manages memory and dynamically generates bytecode for expression evaluation. |
// MAGIC | `spark.sql.shuffle.partitions` | 200 | Configures the number of partitions to use when shuffling data for joins or aggregations. | | raazesh-sainudiin/scalable-data-science | db/2/2/360-in-525-01/007e_SparkSQLProgGuide_HW.scala | Scala | unlicense | 3,266 |
/*
* Copyright (c) 2012 Roberto Tyley
*
* This file is part of 'BFG Repo-Cleaner' - a tool for removing large
* or troublesome blobs from Git repositories.
*
* BFG Repo-Cleaner is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BFG Repo-Cleaner is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/ .
*/
package com.madgag.git.bfg
import com.madgag.git.{SizedObject, _}
import com.madgag.git.bfg.cleaner._
import org.eclipse.jgit.internal.storage.file.ObjectDirectory
import org.eclipse.jgit.lib.Constants.OBJ_BLOB
import org.eclipse.jgit.lib.ObjectReader._
import org.eclipse.jgit.lib._
import org.eclipse.jgit.revwalk.RevWalk
import org.eclipse.jgit.storage.file.WindowCacheConfig
import scala.collection.convert.wrapAsScala._
import scala.language.implicitConversions
trait CleaningMapper[V] extends Cleaner[V] {
def isDirty(v: V) = apply(v) != v
def substitution(oldId: V): Option[(V, V)] = {
val newId = apply(oldId)
if (newId == oldId) None else Some((oldId, newId))
}
def replacement(oldId: V): Option[V] = {
val newId = apply(oldId)
if (newId == oldId) None else Some(newId)
}
}
object GitUtil {
val ProbablyNoNonFileObjectsOverSizeThreshold = 1024 * 1024
def tweakStaticJGitConfig(massiveNonFileObjects: Option[Int]) {
val wcConfig: WindowCacheConfig = new WindowCacheConfig()
wcConfig.setStreamFileThreshold(massiveNonFileObjects.getOrElse(ProbablyNoNonFileObjectsOverSizeThreshold))
wcConfig.install()
}
def hasBeenProcessedByBFGBefore(repo: Repository): Boolean = {
// This method just checks the tips of all refs - a good-enough indicator for our purposes...
implicit val revWalk = new RevWalk(repo)
implicit val objectReader = revWalk.getObjectReader
repo.getAllRefs.values.map(_.getObjectId).filter(_.open.getType == Constants.OBJ_COMMIT)
.map(_.asRevCommit).exists(_.getFooterLines(FormerCommitFooter.Key).nonEmpty)
}
implicit def cleaner2CleaningMapper[V](f: Cleaner[V]): CleaningMapper[V] = new CleaningMapper[V] {
def apply(v: V) = f(v)
}
def biggestBlobs(implicit objectDB: ObjectDirectory, progressMonitor: ProgressMonitor = NullProgressMonitor.INSTANCE): Stream[SizedObject] = {
Timing.measureTask("Scanning packfile for large blobs", ProgressMonitor.UNKNOWN) {
val reader = objectDB.newReader
objectDB.packedObjects.map {
objectId =>
progressMonitor update 1
SizedObject(objectId, reader.getObjectSize(objectId, OBJ_ANY))
}.toSeq.sorted.reverse.toStream.filter { oid =>
oid.size > ProbablyNoNonFileObjectsOverSizeThreshold || reader.open(oid.objectId).getType == OBJ_BLOB
}
}
}
}
| NeilBryant/bfg-repo-cleaner | bfg-library/src/main/scala/com/madgag/git/bfg/GitUtil.scala | Scala | gpl-3.0 | 3,202 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.ann
import breeze.linalg.{*, DenseMatrix => BDM, DenseVector => BDV, Vector => BV, axpy => Baxpy,
sum => Bsum}
import breeze.numerics.{log => Blog, sigmoid => Bsigmoid}
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.optimization._
import org.apache.spark.rdd.RDD
import org.apache.spark.util.random.XORShiftRandom
/**
* Trait that holds Layer properties, that are needed to instantiate it.
* Implements Layer instantiation.
*
*/
private[ann] trait Layer extends Serializable {
/**
* Returns the instance of the layer based on weights provided
* @param weights vector with layer weights
* @param position position of weights in the vector
* @return the layer model
*/
def getInstance(weights: Vector, position: Int): LayerModel
/**
* Returns the instance of the layer with random generated weights
* @param seed seed
* @return the layer model
*/
def getInstance(seed: Long): LayerModel
}
/**
* Trait that holds Layer weights (or parameters).
* Implements functions needed for forward propagation, computing delta and gradient.
* Can return weights in Vector format.
*/
private[ann] trait LayerModel extends Serializable {
/**
* number of weights
*/
val size: Int
/**
* Evaluates the data (process the data through the layer)
* @param data data
* @return processed data
*/
def eval(data: BDM[Double]): BDM[Double]
/**
* Computes the delta for back propagation
* @param nextDelta delta of the next layer
* @param input input data
* @return delta
*/
def prevDelta(nextDelta: BDM[Double], input: BDM[Double]): BDM[Double]
/**
* Computes the gradient
* @param delta delta for this layer
* @param input input data
* @return gradient
*/
def grad(delta: BDM[Double], input: BDM[Double]): Array[Double]
/**
* Returns weights for the layer in a single vector
* @return layer weights
*/
def weights(): Vector
}
/**
* Layer properties of affine transformations, that is y=A*x+b
* @param numIn number of inputs
* @param numOut number of outputs
*/
private[ann] class AffineLayer(val numIn: Int, val numOut: Int) extends Layer {
override def getInstance(weights: Vector, position: Int): LayerModel = {
AffineLayerModel(this, weights, position)
}
override def getInstance(seed: Long = 11L): LayerModel = {
AffineLayerModel(this, seed)
}
}
/**
* Model of Affine layer y=A*x+b
* @param w weights (matrix A)
* @param b bias (vector b)
*/
private[ann] class AffineLayerModel private(w: BDM[Double], b: BDV[Double]) extends LayerModel {
val size = w.size + b.length
val gwb = new Array[Double](size)
private lazy val gw: BDM[Double] = new BDM[Double](w.rows, w.cols, gwb)
private lazy val gb: BDV[Double] = new BDV[Double](gwb, w.size)
private var z: BDM[Double] = null
private var d: BDM[Double] = null
private var ones: BDV[Double] = null
override def eval(data: BDM[Double]): BDM[Double] = {
if (z == null || z.cols != data.cols) z = new BDM[Double](w.rows, data.cols)
z(::, *) := b
BreezeUtil.dgemm(1.0, w, data, 1.0, z)
z
}
override def prevDelta(nextDelta: BDM[Double], input: BDM[Double]): BDM[Double] = {
if (d == null || d.cols != nextDelta.cols) d = new BDM[Double](w.cols, nextDelta.cols)
BreezeUtil.dgemm(1.0, w.t, nextDelta, 0.0, d)
d
}
override def grad(delta: BDM[Double], input: BDM[Double]): Array[Double] = {
BreezeUtil.dgemm(1.0 / input.cols, delta, input.t, 0.0, gw)
if (ones == null || ones.length != delta.cols) ones = BDV.ones[Double](delta.cols)
BreezeUtil.dgemv(1.0 / input.cols, delta, ones, 0.0, gb)
gwb
}
override def weights(): Vector = AffineLayerModel.roll(w, b)
}
/**
* Fabric for Affine layer models
*/
private[ann] object AffineLayerModel {
/**
* Creates a model of Affine layer
* @param layer layer properties
* @param weights vector with weights
* @param position position of weights in the vector
* @return model of Affine layer
*/
def apply(layer: AffineLayer, weights: Vector, position: Int): AffineLayerModel = {
val (w, b) = unroll(weights, position, layer.numIn, layer.numOut)
new AffineLayerModel(w, b)
}
/**
* Creates a model of Affine layer
* @param layer layer properties
* @param seed seed
* @return model of Affine layer
*/
def apply(layer: AffineLayer, seed: Long): AffineLayerModel = {
val (w, b) = randomWeights(layer.numIn, layer.numOut, seed)
new AffineLayerModel(w, b)
}
/**
* Unrolls the weights from the vector
* @param weights vector with weights
* @param position position of weights for this layer
* @param numIn number of layer inputs
* @param numOut number of layer outputs
* @return matrix A and vector b
*/
def unroll(
weights: Vector,
position: Int,
numIn: Int,
numOut: Int): (BDM[Double], BDV[Double]) = {
val weightsCopy = weights.toArray
// TODO: the array is not copied to BDMs, make sure this is OK!
val a = new BDM[Double](numOut, numIn, weightsCopy, position)
val b = new BDV[Double](weightsCopy, position + (numOut * numIn), 1, numOut)
(a, b)
}
/**
* Roll the layer weights into a vector
* @param a matrix A
* @param b vector b
* @return vector of weights
*/
def roll(a: BDM[Double], b: BDV[Double]): Vector = {
val result = new Array[Double](a.size + b.length)
// TODO: make sure that we need to copy!
System.arraycopy(a.toArray, 0, result, 0, a.size)
System.arraycopy(b.toArray, 0, result, a.size, b.length)
Vectors.dense(result)
}
/**
* Generate random weights for the layer
* @param numIn number of inputs
* @param numOut number of outputs
* @param seed seed
* @return (matrix A, vector b)
*/
def randomWeights(numIn: Int, numOut: Int, seed: Long = 11L): (BDM[Double], BDV[Double]) = {
val rand: XORShiftRandom = new XORShiftRandom(seed)
val weights = BDM.fill[Double](numOut, numIn){ (rand.nextDouble * 4.8 - 2.4) / numIn }
val bias = BDV.fill[Double](numOut){ (rand.nextDouble * 4.8 - 2.4) / numIn }
(weights, bias)
}
}
/**
* Trait for functions and their derivatives for functional layers
*/
private[ann] trait ActivationFunction extends Serializable {
/**
* Implements a function
* @param x input data
* @param y output data
*/
def eval(x: BDM[Double], y: BDM[Double]): Unit
/**
* Implements a derivative of a function (needed for the back propagation)
* @param x input data
* @param y output data
*/
def derivative(x: BDM[Double], y: BDM[Double]): Unit
/**
* Implements a cross entropy error of a function.
* Needed if the functional layer that contains this function is the output layer
* of the network.
* @param target target output
* @param output computed output
* @param result intermediate result
* @return cross-entropy
*/
def crossEntropy(target: BDM[Double], output: BDM[Double], result: BDM[Double]): Double
/**
* Implements a mean squared error of a function
* @param target target output
* @param output computed output
* @param result intermediate result
* @return mean squared error
*/
def squared(target: BDM[Double], output: BDM[Double], result: BDM[Double]): Double
}
/**
* Implements in-place application of functions
*/
private[ann] object ActivationFunction {
def apply(x: BDM[Double], y: BDM[Double], func: Double => Double): Unit = {
var i = 0
while (i < x.rows) {
var j = 0
while (j < x.cols) {
y(i, j) = func(x(i, j))
j += 1
}
i += 1
}
}
def apply(
x1: BDM[Double],
x2: BDM[Double],
y: BDM[Double],
func: (Double, Double) => Double): Unit = {
var i = 0
while (i < x1.rows) {
var j = 0
while (j < x1.cols) {
y(i, j) = func(x1(i, j), x2(i, j))
j += 1
}
i += 1
}
}
}
/**
* Implements SoftMax activation function
*/
private[ann] class SoftmaxFunction extends ActivationFunction {
override def eval(x: BDM[Double], y: BDM[Double]): Unit = {
var j = 0
// find max value to make sure later that exponent is computable
while (j < x.cols) {
var i = 0
var max = Double.MinValue
while (i < x.rows) {
if (x(i, j) > max) {
max = x(i, j)
}
i += 1
}
var sum = 0.0
i = 0
while (i < x.rows) {
val res = Math.exp(x(i, j) - max)
y(i, j) = res
sum += res
i += 1
}
i = 0
while (i < x.rows) {
y(i, j) /= sum
i += 1
}
j += 1
}
}
override def crossEntropy(
output: BDM[Double],
target: BDM[Double],
result: BDM[Double]): Double = {
def m(o: Double, t: Double): Double = o - t
ActivationFunction(output, target, result, m)
-Bsum( target :* Blog(output)) / output.cols
}
override def derivative(x: BDM[Double], y: BDM[Double]): Unit = {
def sd(z: Double): Double = (1 - z) * z
ActivationFunction(x, y, sd)
}
override def squared(output: BDM[Double], target: BDM[Double], result: BDM[Double]): Double = {
throw new UnsupportedOperationException("Sorry, squared error is not defined for SoftMax.")
}
}
/**
* Implements Sigmoid activation function
*/
private[ann] class SigmoidFunction extends ActivationFunction {
override def eval(x: BDM[Double], y: BDM[Double]): Unit = {
def s(z: Double): Double = Bsigmoid(z)
ActivationFunction(x, y, s)
}
override def crossEntropy(
output: BDM[Double],
target: BDM[Double],
result: BDM[Double]): Double = {
def m(o: Double, t: Double): Double = o - t
ActivationFunction(output, target, result, m)
-Bsum(target :* Blog(output)) / output.cols
}
override def derivative(x: BDM[Double], y: BDM[Double]): Unit = {
def sd(z: Double): Double = (1 - z) * z
ActivationFunction(x, y, sd)
}
override def squared(output: BDM[Double], target: BDM[Double], result: BDM[Double]): Double = {
// TODO: make it readable
def m(o: Double, t: Double): Double = (o - t)
ActivationFunction(output, target, result, m)
val e = Bsum(result :* result) / 2 / output.cols
def m2(x: Double, o: Double) = x * (o - o * o)
ActivationFunction(result, output, result, m2)
e
}
}
/**
* Functional layer properties, y = f(x)
* @param activationFunction activation function
*/
private[ann] class FunctionalLayer (val activationFunction: ActivationFunction) extends Layer {
override def getInstance(weights: Vector, position: Int): LayerModel = getInstance(0L)
override def getInstance(seed: Long): LayerModel =
FunctionalLayerModel(this)
}
/**
* Functional layer model. Holds no weights.
* @param activationFunction activation function
*/
private[ann] class FunctionalLayerModel private (val activationFunction: ActivationFunction)
extends LayerModel {
val size = 0
// matrices for in-place computations
// outputs
private var f: BDM[Double] = null
// delta
private var d: BDM[Double] = null
// matrix for error computation
private var e: BDM[Double] = null
// delta gradient
private lazy val dg = new Array[Double](0)
override def eval(data: BDM[Double]): BDM[Double] = {
if (f == null || f.cols != data.cols) f = new BDM[Double](data.rows, data.cols)
activationFunction.eval(data, f)
f
}
override def prevDelta(nextDelta: BDM[Double], input: BDM[Double]): BDM[Double] = {
if (d == null || d.cols != nextDelta.cols) d = new BDM[Double](nextDelta.rows, nextDelta.cols)
activationFunction.derivative(input, d)
d :*= nextDelta
d
}
override def grad(delta: BDM[Double], input: BDM[Double]): Array[Double] = dg
override def weights(): Vector = Vectors.dense(new Array[Double](0))
def crossEntropy(output: BDM[Double], target: BDM[Double]): (BDM[Double], Double) = {
if (e == null || e.cols != output.cols) e = new BDM[Double](output.rows, output.cols)
val error = activationFunction.crossEntropy(output, target, e)
(e, error)
}
def squared(output: BDM[Double], target: BDM[Double]): (BDM[Double], Double) = {
if (e == null || e.cols != output.cols) e = new BDM[Double](output.rows, output.cols)
val error = activationFunction.squared(output, target, e)
(e, error)
}
def error(output: BDM[Double], target: BDM[Double]): (BDM[Double], Double) = {
// TODO: allow user pick error
activationFunction match {
case sigmoid: SigmoidFunction => squared(output, target)
case softmax: SoftmaxFunction => crossEntropy(output, target)
}
}
}
/**
* Fabric of functional layer models
*/
private[ann] object FunctionalLayerModel {
def apply(layer: FunctionalLayer): FunctionalLayerModel =
new FunctionalLayerModel(layer.activationFunction)
}
/**
* Trait for the artificial neural network (ANN) topology properties
*/
private[ann] trait Topology extends Serializable{
def getInstance(weights: Vector): TopologyModel
def getInstance(seed: Long): TopologyModel
}
/**
* Trait for ANN topology model
*/
private[ann] trait TopologyModel extends Serializable{
/**
* Forward propagation
* @param data input data
* @return array of outputs for each of the layers
*/
def forward(data: BDM[Double]): Array[BDM[Double]]
/**
* Prediction of the model
* @param data input data
* @return prediction
*/
def predict(data: Vector): Vector
/**
* Computes gradient for the network
* @param data input data
* @param target target output
* @param cumGradient cumulative gradient
* @param blockSize block size
* @return error
*/
def computeGradient(data: BDM[Double], target: BDM[Double], cumGradient: Vector,
blockSize: Int): Double
/**
* Returns the weights of the ANN
* @return weights
*/
def weights(): Vector
}
/**
* Feed forward ANN
* @param layers
*/
private[ann] class FeedForwardTopology private(val layers: Array[Layer]) extends Topology {
override def getInstance(weights: Vector): TopologyModel = FeedForwardModel(this, weights)
override def getInstance(seed: Long): TopologyModel = FeedForwardModel(this, seed)
}
/**
* Factory for some of the frequently-used topologies
*/
private[ml] object FeedForwardTopology {
/**
* Creates a feed forward topology from the array of layers
* @param layers array of layers
* @return feed forward topology
*/
def apply(layers: Array[Layer]): FeedForwardTopology = {
new FeedForwardTopology(layers)
}
/**
* Creates a multi-layer perceptron
* @param layerSizes sizes of layers including input and output size
* @param softmax wether to use SoftMax or Sigmoid function for an output layer.
* Softmax is default
* @return multilayer perceptron topology
*/
def multiLayerPerceptron(layerSizes: Array[Int], softmax: Boolean = true): FeedForwardTopology = {
val layers = new Array[Layer]((layerSizes.length - 1) * 2)
for(i <- 0 until layerSizes.length - 1){
layers(i * 2) = new AffineLayer(layerSizes(i), layerSizes(i + 1))
layers(i * 2 + 1) =
if (softmax && i == layerSizes.length - 2) {
new FunctionalLayer(new SoftmaxFunction())
} else {
new FunctionalLayer(new SigmoidFunction())
}
}
FeedForwardTopology(layers)
}
}
/**
* Model of Feed Forward Neural Network.
* Implements forward, gradient computation and can return weights in vector format.
* @param layerModels models of layers
* @param topology topology of the network
*/
private[ml] class FeedForwardModel private(
val layerModels: Array[LayerModel],
val topology: FeedForwardTopology) extends TopologyModel {
override def forward(data: BDM[Double]): Array[BDM[Double]] = {
val outputs = new Array[BDM[Double]](layerModels.length)
outputs(0) = layerModels(0).eval(data)
for (i <- 1 until layerModels.length) {
outputs(i) = layerModels(i).eval(outputs(i-1))
}
outputs
}
override def computeGradient(
data: BDM[Double],
target: BDM[Double],
cumGradient: Vector,
realBatchSize: Int): Double = {
val outputs = forward(data)
val deltas = new Array[BDM[Double]](layerModels.length)
val L = layerModels.length - 1
val (newE, newError) = layerModels.last match {
case flm: FunctionalLayerModel => flm.error(outputs.last, target)
case _ =>
throw new UnsupportedOperationException("Non-functional layer not supported at the top")
}
deltas(L) = new BDM[Double](0, 0)
deltas(L - 1) = newE
for (i <- (L - 2) to (0, -1)) {
deltas(i) = layerModels(i + 1).prevDelta(deltas(i + 1), outputs(i + 1))
}
val grads = new Array[Array[Double]](layerModels.length)
for (i <- 0 until layerModels.length) {
val input = if (i==0) data else outputs(i - 1)
grads(i) = layerModels(i).grad(deltas(i), input)
}
// update cumGradient
val cumGradientArray = cumGradient.toArray
var offset = 0
// TODO: extract roll
for (i <- 0 until grads.length) {
val gradArray = grads(i)
var k = 0
while (k < gradArray.length) {
cumGradientArray(offset + k) += gradArray(k)
k += 1
}
offset += gradArray.length
}
newError
}
// TODO: do we really need to copy the weights? they should be read-only
override def weights(): Vector = {
// TODO: extract roll
var size = 0
for (i <- 0 until layerModels.length) {
size += layerModels(i).size
}
val array = new Array[Double](size)
var offset = 0
for (i <- 0 until layerModels.length) {
val layerWeights = layerModels(i).weights().toArray
System.arraycopy(layerWeights, 0, array, offset, layerWeights.length)
offset += layerWeights.length
}
Vectors.dense(array)
}
override def predict(data: Vector): Vector = {
val size = data.size
val result = forward(new BDM[Double](size, 1, data.toArray))
Vectors.dense(result.last.toArray)
}
}
/**
* Fabric for feed forward ANN models
*/
private[ann] object FeedForwardModel {
/**
* Creates a model from a topology and weights
* @param topology topology
* @param weights weights
* @return model
*/
def apply(topology: FeedForwardTopology, weights: Vector): FeedForwardModel = {
val layers = topology.layers
val layerModels = new Array[LayerModel](layers.length)
var offset = 0
for (i <- 0 until layers.length) {
layerModels(i) = layers(i).getInstance(weights, offset)
offset += layerModels(i).size
}
new FeedForwardModel(layerModels, topology)
}
/**
* Creates a model given a topology and seed
* @param topology topology
* @param seed seed for generating the weights
* @return model
*/
def apply(topology: FeedForwardTopology, seed: Long = 11L): FeedForwardModel = {
val layers = topology.layers
val layerModels = new Array[LayerModel](layers.length)
var offset = 0
for(i <- 0 until layers.length){
layerModels(i) = layers(i).getInstance(seed)
offset += layerModels(i).size
}
new FeedForwardModel(layerModels, topology)
}
}
/**
* Neural network gradient. Does nothing but calling Model's gradient
* @param topology topology
* @param dataStacker data stacker
*/
private[ann] class ANNGradient(topology: Topology, dataStacker: DataStacker) extends Gradient {
override def compute(data: Vector, label: Double, weights: Vector): (Vector, Double) = {
val gradient = Vectors.zeros(weights.size)
val loss = compute(data, label, weights, gradient)
(gradient, loss)
}
override def compute(
data: Vector,
label: Double,
weights: Vector,
cumGradient: Vector): Double = {
val (input, target, realBatchSize) = dataStacker.unstack(data)
val model = topology.getInstance(weights)
model.computeGradient(input, target, cumGradient, realBatchSize)
}
}
/**
* Stacks pairs of training samples (input, output) in one vector allowing them to pass
* through Optimizer/Gradient interfaces. If stackSize is more than one, makes blocks
* or matrices of inputs and outputs and then stack them in one vector.
* This can be used for further batch computations after unstacking.
* @param stackSize stack size
* @param inputSize size of the input vectors
* @param outputSize size of the output vectors
*/
private[ann] class DataStacker(stackSize: Int, inputSize: Int, outputSize: Int)
extends Serializable {
/**
* Stacks the data
* @param data RDD of vector pairs
* @return RDD of double (always zero) and vector that contains the stacked vectors
*/
def stack(data: RDD[(Vector, Vector)]): RDD[(Double, Vector)] = {
val stackedData = if (stackSize == 1) {
data.map { v =>
(0.0,
Vectors.fromBreeze(BDV.vertcat(
v._1.toBreeze.toDenseVector,
v._2.toBreeze.toDenseVector))
) }
} else {
data.mapPartitions { it =>
it.grouped(stackSize).map { seq =>
val size = seq.size
val bigVector = new Array[Double](inputSize * size + outputSize * size)
var i = 0
seq.foreach { case (in, out) =>
System.arraycopy(in.toArray, 0, bigVector, i * inputSize, inputSize)
System.arraycopy(out.toArray, 0, bigVector,
inputSize * size + i * outputSize, outputSize)
i += 1
}
(0.0, Vectors.dense(bigVector))
}
}
}
stackedData
}
/**
* Unstack the stacked vectors into matrices for batch operations
* @param data stacked vector
* @return pair of matrices holding input and output data and the real stack size
*/
def unstack(data: Vector): (BDM[Double], BDM[Double], Int) = {
val arrData = data.toArray
val realStackSize = arrData.length / (inputSize + outputSize)
val input = new BDM(inputSize, realStackSize, arrData)
val target = new BDM(outputSize, realStackSize, arrData, inputSize * realStackSize)
(input, target, realStackSize)
}
}
/**
* Simple updater
*/
private[ann] class ANNUpdater extends Updater {
override def compute(
weightsOld: Vector,
gradient: Vector,
stepSize: Double,
iter: Int,
regParam: Double): (Vector, Double) = {
val thisIterStepSize = stepSize
val brzWeights: BV[Double] = weightsOld.toBreeze.toDenseVector
Baxpy(-thisIterStepSize, gradient.toBreeze, brzWeights)
(Vectors.fromBreeze(brzWeights), 0)
}
}
/**
* MLlib-style trainer class that trains a network given the data and topology
* @param topology topology of ANN
* @param inputSize input size
* @param outputSize output size
*/
private[ml] class FeedForwardTrainer(
topology: Topology,
val inputSize: Int,
val outputSize: Int) extends Serializable {
// TODO: what if we need to pass random seed?
private var _weights = topology.getInstance(11L).weights()
private var _stackSize = 128
private var dataStacker = new DataStacker(_stackSize, inputSize, outputSize)
private var _gradient: Gradient = new ANNGradient(topology, dataStacker)
private var _updater: Updater = new ANNUpdater()
private var optimizer: Optimizer = LBFGSOptimizer.setConvergenceTol(1e-4).setNumIterations(100)
/**
* Returns weights
* @return weights
*/
def getWeights: Vector = _weights
/**
* Sets weights
* @param value weights
* @return trainer
*/
def setWeights(value: Vector): FeedForwardTrainer = {
_weights = value
this
}
/**
* Sets the stack size
* @param value stack size
* @return trainer
*/
def setStackSize(value: Int): FeedForwardTrainer = {
_stackSize = value
dataStacker = new DataStacker(value, inputSize, outputSize)
this
}
/**
* Sets the SGD optimizer
* @return SGD optimizer
*/
def SGDOptimizer: GradientDescent = {
val sgd = new GradientDescent(_gradient, _updater)
optimizer = sgd
sgd
}
/**
* Sets the LBFGS optimizer
* @return LBGS optimizer
*/
def LBFGSOptimizer: LBFGS = {
val lbfgs = new LBFGS(_gradient, _updater)
optimizer = lbfgs
lbfgs
}
/**
* Sets the updater
* @param value updater
* @return trainer
*/
def setUpdater(value: Updater): FeedForwardTrainer = {
_updater = value
updateUpdater(value)
this
}
/**
* Sets the gradient
* @param value gradient
* @return trainer
*/
def setGradient(value: Gradient): FeedForwardTrainer = {
_gradient = value
updateGradient(value)
this
}
private[this] def updateGradient(gradient: Gradient): Unit = {
optimizer match {
case lbfgs: LBFGS => lbfgs.setGradient(gradient)
case sgd: GradientDescent => sgd.setGradient(gradient)
case other => throw new UnsupportedOperationException(
s"Only LBFGS and GradientDescent are supported but got ${other.getClass}.")
}
}
private[this] def updateUpdater(updater: Updater): Unit = {
optimizer match {
case lbfgs: LBFGS => lbfgs.setUpdater(updater)
case sgd: GradientDescent => sgd.setUpdater(updater)
case other => throw new UnsupportedOperationException(
s"Only LBFGS and GradientDescent are supported but got ${other.getClass}.")
}
}
/**
* Trains the ANN
* @param data RDD of input and output vector pairs
* @return model
*/
def train(data: RDD[(Vector, Vector)]): TopologyModel = {
val newWeights = optimizer.optimize(dataStacker.stack(data), getWeights)
topology.getInstance(newWeights)
}
}
| practice-vishnoi/dev-spark-1 | mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala | Scala | apache-2.0 | 26,428 |
package com.twitter.querulous.config
import com.twitter.querulous
import com.twitter.querulous.async
import com.twitter.querulous.database.DatabaseFactory
import com.twitter.querulous.query.QueryFactory
import concurrent.ExecutionContext
abstract class AsyncQueryEvaluator {
var database = new Database
var query = new Query
var singletonFactory = false
def contextFactory :ExecutionContext
//var maxWaiters = Int.MaxValue
// Size of the work pool used by the AsyncDatabase to do all the DB query work.
// This should typically be the same size as the DB connection pool.
//var workPoolSize: Int
private var memoizedFactory: Option[async.AsyncQueryEvaluatorFactory] = None
// Optionally takes in a method to transform the QueryFactory we are going to use (typically used for stats collection).
protected def newQueryFactory(stats: querulous.StatsCollector, queryStatsFactory: Option[QueryFactory => QueryFactory]) = {
query(stats, queryStatsFactory)
}
// Optionally takes in a method to transform the DatabaseFactory we are going to use (typically used for stats collection).
protected def newDatabaseFactory(stats: querulous.StatsCollector, dbStatsFactory: Option[DatabaseFactory => DatabaseFactory]) = {
database(stats, dbStatsFactory)
}
def apply(): async.AsyncQueryEvaluatorFactory = apply(querulous.NullStatsCollector)
def apply(stats: querulous.StatsCollector): async.AsyncQueryEvaluatorFactory = apply(stats, None, None)
def apply(stats: querulous.StatsCollector, dbStatsFactory: DatabaseFactory => DatabaseFactory, queryStatsFactory: QueryFactory => QueryFactory): async.AsyncQueryEvaluatorFactory = apply(stats, Some(dbStatsFactory), Some(queryStatsFactory))
def apply(stats: querulous.StatsCollector, dbStatsFactory: Option[DatabaseFactory => DatabaseFactory], queryStatsFactory: Option[QueryFactory => QueryFactory]): async.AsyncQueryEvaluatorFactory = {
synchronized {
if (!singletonFactory) memoizedFactory = None
memoizedFactory = memoizedFactory orElse {
var dbFactory: async.AsyncDatabaseFactory = new async.BlockingDatabaseWrapperFactory(
contextFactory _,
newDatabaseFactory(stats, dbStatsFactory),
stats
)
if (database.memoize) {
// Ensure AsyncDatabase gets memoized.
dbFactory = new async.AsyncMemoizingDatabaseFactory(dbFactory)
}
Some(new async.StandardAsyncQueryEvaluatorFactory(dbFactory, newQueryFactory(stats, queryStatsFactory)))
}
memoizedFactory.get
}
}
}
| kievbs/querulous210 | src/main/scala/com/twitter/querulous/config/AsyncQueryEvaluator.scala | Scala | apache-2.0 | 2,605 |
/**
* Copyright (C) 2010-2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.config
import scala.collection.JavaConversions._
import net.lshift.diffa.schema.jooq.{DatabaseFacade => JooqDatabaseFacade}
import net.lshift.diffa.schema.tables.Members.MEMBERS
import net.lshift.diffa.schema.tables.Policies.POLICIES
import net.lshift.diffa.schema.tables.ConfigOptions.CONFIG_OPTIONS
import net.lshift.diffa.schema.tables.RepairActions.REPAIR_ACTIONS
import net.lshift.diffa.schema.tables.Escalations.ESCALATIONS
import net.lshift.diffa.schema.tables.EscalationRules.ESCALATION_RULES
import net.lshift.diffa.schema.tables.PairReports.PAIR_REPORTS
import net.lshift.diffa.schema.tables.PairViews.PAIR_VIEWS
import net.lshift.diffa.schema.tables.Endpoints.ENDPOINTS
import net.lshift.diffa.schema.tables.Pairs.PAIRS
import net.lshift.diffa.schema.tables.Spaces.SPACES
import net.lshift.diffa.schema.tables.EndpointViews.ENDPOINT_VIEWS
import net.lshift.diffa.schema.tables.Breakers.BREAKERS
import net.lshift.diffa.schema.tables.Extents.EXTENTS
import JooqConfigStoreCompanion._
import net.lshift.diffa.kernel.naming.CacheName._
import net.lshift.diffa.kernel.util.MissingObjectException
import net.lshift.diffa.kernel.lifecycle.{PairLifecycleAware, DomainLifecycleAware}
import net.lshift.diffa.kernel.util.cache.{KeyPredicate, CacheProvider}
import reflect.BeanProperty
import java.util
import collection.mutable.ListBuffer
import org.jooq.impl.Factory
import org.jooq._
import java.lang.{Long => LONG}
import system.{ConflictingConcurrentModificationException, PolicyKey}
import java.lang.{Integer => INT}
import java.sql.SQLIntegrityConstraintViolationException
import org.jooq.exception.DataAccessException
import net.lshift.diffa.snowflake.IdProvider
import net.lshift.diffa.kernel.frontend.DomainPairDef
import net.lshift.diffa.kernel.frontend.EndpointDef
import net.lshift.diffa.kernel.frontend.PairDef
import scala.Some
import net.lshift.diffa.kernel.frontend.DomainEndpointDef
class JooqDomainConfigStore(jooq:JooqDatabaseFacade,
cacheProvider:CacheProvider,
idProvider:IdProvider,
membershipListener:DomainMembershipAware)
extends DomainConfigStore
with DomainLifecycleAware {
private val pairEventSubscribers = new ListBuffer[PairLifecycleAware]
def registerPairEventListener(p:PairLifecycleAware) = pairEventSubscribers += p
private val cachedConfigVersions = cacheProvider.getCachedMap[Long,Int]("domain.config.versions")
private val cachedPairs = cacheProvider.getCachedMap[Long, java.util.List[DomainPairDef]]("domain.pairs")
private val cachedPairsByKey = cacheProvider.getCachedMap[DomainPairKey, DomainPairDef]("domain.pairs.by.key")
private val cachedEndpoints = cacheProvider.getCachedMap[Long, java.util.List[DomainEndpointDef]]("domain.endpoints")
private val cachedEndpointsByKey = cacheProvider.getCachedMap[DomainEndpointKey, DomainEndpointDef]("domain.endpoints.by.key")
private val cachedPairsByEndpoint = cacheProvider.getCachedMap[DomainEndpointKey, java.util.List[DomainPairDef]]("domain.pairs.by.endpoint")
private val cachedBreakers = cacheProvider.getCachedMap[BreakerKey, Boolean](DOMAIN_PAIR_BREAKERS)
// Config options
private val cachedDomainConfigOptionsMap = cacheProvider.getCachedMap[Long, java.util.Map[String,String]](DOMAIN_CONFIG_OPTIONS_MAP)
private val cachedDomainConfigOptions = cacheProvider.getCachedMap[DomainConfigKey, String](DOMAIN_CONFIG_OPTIONS)
// Members
private val cachedMembers = cacheProvider.getCachedMap[Long, java.util.List[Member]](USER_DOMAIN_MEMBERS)
private val cachedPolicies = cacheProvider.getCachedMap[SpacePolicyKey, PolicyKey](SPACE_POLICIES)
def reset {
cachedConfigVersions.evictAll()
cachedPairs.evictAll()
cachedPairsByKey.evictAll()
cachedEndpoints.evictAll()
cachedEndpointsByKey.evictAll()
cachedPairsByEndpoint.evictAll()
cachedBreakers.evictAll()
cachedDomainConfigOptionsMap.evictAll()
cachedDomainConfigOptions.evictAll()
cachedMembers.evictAll()
cachedPolicies.evictAll()
}
private def invalidateMembershipCache(space:Long) = {
cachedMembers.evict(space)
}
private def invalidateConfigCaches(space:Long) = {
cachedDomainConfigOptionsMap.evict(space)
cachedDomainConfigOptions.keySubset(ConfigOptionByDomainPredicate(space)).evictAll()
}
private def invalidateAllCaches(space:Long) = {
cachedConfigVersions.evict(space)
cachedEndpoints.evict(space)
cachedPairs.evict(space)
cachedPairsByEndpoint.keySubset(EndpointByDomainPredicate(space)).evictAll()
cachedPairsByKey.keySubset(PairByDomainPredicate(space)).evictAll()
cachedEndpointsByKey.keySubset(EndpointByDomainPredicate(space)).evictAll()
cachedBreakers.keySubset(BreakerByDomainPredicate(space)).evictAll()
invalidateConfigCaches(space)
invalidateMembershipCache(space)
}
private def invalidateEndpointCachesOnly(space:Long, endpointName: String) = {
cachedEndpoints.evict(space)
cachedPairsByEndpoint.keySubset(PairByDomainAndEndpointPredicate(space, endpointName)).evictAll()
cachedEndpointsByKey.evict(DomainEndpointKey(space,endpointName))
// TODO This is a very coarse grained invalidation of the pair caches - this could be made finer at some stage
cachedPairs.evict(space)
cachedPairsByKey.keySubset(PairByDomainPredicate(space)).evictAll()
}
private def invalidatePairCachesOnly(space:Long) = {
cachedPairs.evict(space)
cachedPairsByKey.keySubset(PairByDomainPredicate(space)).evictAll()
cachedPairsByEndpoint.keySubset(EndpointByDomainPredicate(space)).evictAll()
}
def onDomainUpdated(space: Long) = invalidateAllCaches(space)
def onDomainRemoved(space: Long) = invalidateAllCaches(space)
def createOrUpdateEndpoint(space: Long, endpointDef: EndpointDef) : DomainEndpointDef = {
val id:LONG = idProvider.getId
jooq.execute(t => {
/* mergeInto should be used when there is merge support in MySQL. */
if (jooq.resolvedDialect.equals(SQLDialect.MYSQL)) {
val insert = t.insertInto(ENDPOINTS).
set(ENDPOINTS.SPACE, space:LONG).
set(ENDPOINTS.NAME, endpointDef.name).
set(ENDPOINTS.ID, id).
set(ENDPOINTS.COLLATION_TYPE, endpointDef.collation).
set(ENDPOINTS.CONTENT_RETRIEVAL_URL, endpointDef.contentRetrievalUrl).
set(ENDPOINTS.SCAN_URL, endpointDef.scanUrl).
set(ENDPOINTS.VERSION_GENERATION_URL, endpointDef.versionGenerationUrl).
set(ENDPOINTS.INBOUND_URL, endpointDef.inboundUrl)
val update = t.update(ENDPOINTS).
set(ENDPOINTS.COLLATION_TYPE, endpointDef.collation).
set(ENDPOINTS.CONTENT_RETRIEVAL_URL, endpointDef.contentRetrievalUrl).
set(ENDPOINTS.SCAN_URL, endpointDef.scanUrl).
set(ENDPOINTS.VERSION_GENERATION_URL, endpointDef.versionGenerationUrl).
set(ENDPOINTS.INBOUND_URL, endpointDef.inboundUrl).
where(ENDPOINTS.SPACE.equal(space:LONG).and(ENDPOINTS.NAME.equal(endpointDef.name)))
// This is not a fail-safe way to create or update. It is subject to a race condition:
// if the same endpoint is created between when the update fails and the insert is attempted,
// then the unique key constraint violation will trigger a DataAccessException. In this case,
// we just let the user know it failed.
val rowsUpdated = update.execute()
if (rowsUpdated > 1) {
throw new RuntimeException("Duplicate endpoint definition found for endpoint %s".format(endpointDef.name))
} else if (rowsUpdated < 1) {
try {
insert.execute()
} catch {
case ex: DataAccessException =>
log.warn("Potential concurrent modification inserting " + endpointDef.name, ex)
throw new ConflictingConcurrentModificationException(endpointDef.name)
}
}
} else {
t.mergeInto(ENDPOINTS).
usingDual().
on(ENDPOINTS.SPACE.equal(space), ENDPOINTS.NAME.equal(endpointDef.name)).
whenMatchedThenUpdate().
set(ENDPOINTS.COLLATION_TYPE, endpointDef.collation).
set(ENDPOINTS.CONTENT_RETRIEVAL_URL, endpointDef.contentRetrievalUrl).
set(ENDPOINTS.SCAN_URL, endpointDef.scanUrl).
set(ENDPOINTS.VERSION_GENERATION_URL, endpointDef.versionGenerationUrl).
set(ENDPOINTS.INBOUND_URL, endpointDef.inboundUrl).
whenNotMatchedThenInsert().
set(ENDPOINTS.SPACE, space:LONG).
set(ENDPOINTS.NAME, endpointDef.name).
set(ENDPOINTS.ID, id).
set(ENDPOINTS.COLLATION_TYPE, endpointDef.collation).
set(ENDPOINTS.CONTENT_RETRIEVAL_URL, endpointDef.contentRetrievalUrl).
set(ENDPOINTS.SCAN_URL, endpointDef.scanUrl).
set(ENDPOINTS.VERSION_GENERATION_URL, endpointDef.versionGenerationUrl).
set(ENDPOINTS.INBOUND_URL, endpointDef.inboundUrl).execute()
}
val endpointId = endpointIdByName(t, endpointDef.name, space)
// Don't attempt to update to update any rows per se, just delete every associated
// category and re-insert the new definitions, irrespective of
// whether they are identical to the previous definitions
deleteCategories(t, space, endpointDef.name)
// Insert categories for the endpoint proper
insertCategories(t, space, endpointId, endpointDef)
// Update the view definitions
t.delete(ENDPOINT_VIEWS).
where(ENDPOINT_VIEWS.ENDPOINT.equal(endpointId)).
execute()
endpointDef.views.foreach(v => {
t.insertInto(ENDPOINT_VIEWS).
set(ENDPOINT_VIEWS.ENDPOINT, endpointId).
set(ENDPOINT_VIEWS.NAME, v.name).
execute()
// Insert categories for the endpoint view
insertCategoriesForView(t, space, endpointDef.name, endpointId, v)
})
upgradeConfigVersion(t, space)
})
invalidateEndpointCachesOnly(space, endpointDef.name)
DomainEndpointDef(
space = space,
name = endpointDef.name,
validateEntityOrder = endpointDef.validateEntityOrder,
collation = endpointDef.collation,
contentRetrievalUrl = endpointDef.contentRetrievalUrl,
scanUrl = endpointDef.scanUrl,
versionGenerationUrl = endpointDef.versionGenerationUrl,
inboundUrl = endpointDef.inboundUrl,
categories = endpointDef.categories,
views = endpointDef.views
)
}
def deleteEndpoint(space:Long, endpoint: String) = {
val upstream = ENDPOINTS.as("upstream")
val downstream = ENDPOINTS.as("downstream")
jooq.execute(t => {
// Remove all pairs that reference the endpoint
val results = t.select(PAIRS.SPACE, PAIRS.NAME).
from(PAIRS).
leftOuterJoin(upstream).on(upstream.ID.equal(PAIRS.UPSTREAM)).
leftOuterJoin(downstream).on(downstream.ID.equal(PAIRS.DOWNSTREAM)).
where(PAIRS.SPACE.equal(space)).
and(upstream.NAME.equal(endpoint).
or(downstream.NAME.equal(endpoint))).fetch()
results.iterator().foreach(r => {
val ref = PairRef(r.getValue(PAIRS.NAME), r.getValue(PAIRS.SPACE))
deletePairWithDependencies(t, ref)
})
deleteCategories(t, space, endpoint)
t.delete(ENDPOINT_VIEWS).
where(ENDPOINT_VIEWS.ENDPOINT.equal(endpointIdByNameAsField(t, endpoint, space))).
execute()
val deleted = t.delete(ENDPOINTS).
where(ENDPOINTS.SPACE.equal(space)).
and(ENDPOINTS.NAME.equal(endpoint)).
execute()
if (deleted == 0) {
throw new MissingObjectException("endpoint")
}
upgradeConfigVersion(t, space)
})
invalidatePairCachesOnly(space)
invalidateEndpointCachesOnly(space, endpoint)
}
def getEndpointDef(space:Long, endpoint: String) = {
getEndpointDefWithDomainName(space, endpoint).withoutDomain()
}
@Deprecated private def getEndpointDefWithDomainName(space:Long, endpoint: String) = {
cachedEndpointsByKey.readThrough(DomainEndpointKey(space, endpoint), () => {
val endpoints = JooqConfigStoreCompanion.listEndpoints(jooq, Some(space), Some(endpoint))
if (endpoints.isEmpty) {
throw new MissingObjectException("endpoint")
} else {
endpoints.head
}
})
}
def listEndpoints(space:Long): Seq[EndpointDef] =
cachedEndpoints.readThrough(space, () => JooqConfigStoreCompanion.listEndpoints(jooq, Some(space))).
map(_.withoutDomain())
def createOrUpdatePair(space:Long, pair: PairDef) = {
pair.validate()
jooq.execute(t => {
val upstream = endpointIdByNameAsField(t, pair.upstreamName, space)
val downstream = endpointIdByNameAsField(t, pair.downstreamName, space)
// Attempt to prevent unnecessary sequence churn when updating pairs
// TODO We should consider splitting out create and update APIs for records that use sequences
val rows = t.update(PAIRS).
set(PAIRS.UPSTREAM, upstream).
set(PAIRS.DOWNSTREAM, downstream).
set(PAIRS.ALLOW_MANUAL_SCANS, pair.allowManualScans).
set(PAIRS.MATCHING_TIMEOUT, pair.matchingTimeout.asInstanceOf[Integer]).
set(PAIRS.SCAN_CRON_SPEC, pair.scanCronSpec).
set(PAIRS.SCAN_CRON_ENABLED, boolean2Boolean(pair.scanCronEnabled)).
set(PAIRS.VERSION_POLICY_NAME, pair.versionPolicyName).
where(PAIRS.SPACE.eq(space)).
and(PAIRS.NAME.eq(pair.key)).
execute()
if (rows == 0) {
val extent = upgradeExtent(t)
t.insertInto(PAIRS).
set(PAIRS.SPACE, space:LONG).
set(PAIRS.NAME, pair.key).
set(PAIRS.EXTENT, extent:LONG).
set(PAIRS.UPSTREAM, upstream).
set(PAIRS.DOWNSTREAM, downstream).
set(PAIRS.ALLOW_MANUAL_SCANS, pair.allowManualScans).
set(PAIRS.MATCHING_TIMEOUT, pair.matchingTimeout.asInstanceOf[Integer]).
set(PAIRS.SCAN_CRON_SPEC, pair.scanCronSpec).
set(PAIRS.SCAN_CRON_ENABLED, boolean2Boolean(pair.scanCronEnabled)).
set(PAIRS.VERSION_POLICY_NAME, pair.versionPolicyName).
onDuplicateKeyUpdate().
set(PAIRS.UPSTREAM, upstream).
set(PAIRS.DOWNSTREAM, downstream).
set(PAIRS.ALLOW_MANUAL_SCANS, pair.allowManualScans).
set(PAIRS.MATCHING_TIMEOUT, pair.matchingTimeout.asInstanceOf[Integer]).
set(PAIRS.SCAN_CRON_SPEC, pair.scanCronSpec).
set(PAIRS.SCAN_CRON_ENABLED, boolean2Boolean(pair.scanCronEnabled)).
set(PAIRS.VERSION_POLICY_NAME, pair.versionPolicyName).
execute()
}
type HasName = {def name: String}
def clearUnused[R <: Record](t:Factory, table:Table[R], namesSource:Iterable[HasName], pairCondition:Condition, nameField:Field[String]) {
val names = namesSource.map(_.name).toSeq
if (names.length == 0) {
t.delete(table).
where(pairCondition).
execute()
} else {
t.delete(table).
where(nameField.notIn(names)).
and(pairCondition).
execute()
}
}
def insertOrUpdate[R <: Record](t:Factory, table:Table[R], finders:Map[_ <: Field[_], _], values:Map[_ <: Field[_], _]) {
t.insertInto(table).
set(finders).
set(values).
onDuplicateKeyUpdate().
set(values).
execute()
}
clearUnused(t, PAIR_VIEWS, pair.views,
PAIR_VIEWS.SPACE.equal(space).and(PAIR_VIEWS.PAIR.equal(pair.key)),
PAIR_VIEWS.NAME)
clearUnused(t, PAIR_REPORTS, pair.reports,
PAIR_REPORTS.SPACE.equal(space).and(PAIR_REPORTS.PAIR.equal(pair.key)),
PAIR_REPORTS.NAME)
clearUnused(t, REPAIR_ACTIONS, pair.repairActions,
REPAIR_ACTIONS.SPACE.equal(space).and(REPAIR_ACTIONS.PAIR.equal(pair.key)),
REPAIR_ACTIONS.NAME)
pair.views.foreach(v => {
insertOrUpdate(t, PAIR_VIEWS,
Map(PAIR_VIEWS.SPACE -> space, PAIR_VIEWS.PAIR -> pair.key, PAIR_VIEWS.NAME -> v.name),
Map(PAIR_VIEWS.SCAN_CRON_SPEC -> v.scanCronSpec, PAIR_VIEWS.SCAN_CRON_ENABLED -> boolean2Boolean(v.scanCronEnabled)))
})
pair.repairActions.foreach(a => {
insertOrUpdate(t, REPAIR_ACTIONS,
Map(REPAIR_ACTIONS.SPACE -> space, REPAIR_ACTIONS.PAIR -> pair.key, REPAIR_ACTIONS.NAME -> a.name),
Map(REPAIR_ACTIONS.URL -> a.url, REPAIR_ACTIONS.SCOPE -> a.scope))
})
pair.reports.foreach(r => {
insertOrUpdate(t, PAIR_REPORTS,
Map(PAIR_REPORTS.SPACE -> space, PAIR_REPORTS.PAIR -> pair.key, PAIR_REPORTS.NAME -> r.name),
Map(PAIR_REPORTS.REPORT_TYPE -> r.reportType, PAIR_REPORTS.TARGET -> r.target))
})
// Clear all previous escalations and then re-add them
t.update(ESCALATION_RULES).
set(ESCALATION_RULES.EXTENT, null:LONG).
set(ESCALATION_RULES.ESCALATION, null:String).
where(ESCALATION_RULES.EXTENT.eq(
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]
)).execute()
t.delete(ESCALATIONS).
where(ESCALATIONS.EXTENT.eq(
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]
)).execute()
pair.escalations.foreach(e => {
// TODO somehow factor out the sub-select to avoid repetitions
t.insertInto(ESCALATIONS).
set(ESCALATIONS.NAME, e.name).
set(ESCALATIONS.EXTENT,
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]).
set(ESCALATIONS.ACTION, e.action).
set(ESCALATIONS.ACTION_TYPE, e.actionType).
set(ESCALATIONS.DELAY, e.delay:INT).
onDuplicateKeyUpdate().
set(ESCALATIONS.ACTION, e.action).
set(ESCALATIONS.ACTION_TYPE, e.actionType).
set(ESCALATIONS.DELAY, e.delay:INT).
execute()
val rule = if (e.rule == null) "*" else e.rule
// Attempt an update to the escalation rules table first to avoid sequence churn
// this means we'll have to potentially attempt the update twice, depending on concurrency
val updateExistingRules
= t.update(ESCALATION_RULES).
set(ESCALATION_RULES.ESCALATION, ESCALATION_RULES.PREVIOUS_ESCALATION).
set(ESCALATION_RULES.EXTENT, ESCALATION_RULES.PREVIOUS_EXTENT).
where(ESCALATION_RULES.RULE.eq(rule)).
and(ESCALATION_RULES.PREVIOUS_ESCALATION.eq(e.name)).
and(ESCALATION_RULES.PREVIOUS_EXTENT.eq(
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]
))
val rows = updateExistingRules.execute()
if (rows == 0) {
// Has just the escalation name changed?
val updatePreviousEscalationName
= t.update(ESCALATION_RULES).
set(ESCALATION_RULES.ESCALATION, e.name).
set(ESCALATION_RULES.PREVIOUS_ESCALATION, e.name).
set(ESCALATION_RULES.EXTENT,
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]
).
where(ESCALATION_RULES.RULE.eq(rule)).
and(ESCALATION_RULES.PREVIOUS_EXTENT.eq(
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]
)).execute()
if (updatePreviousEscalationName == 0) {
val ruleId = idProvider.getId()
try {
t.insertInto(ESCALATION_RULES).
set(ESCALATION_RULES.ID, ruleId:LONG).
set(ESCALATION_RULES.EXTENT,
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]).
set(ESCALATION_RULES.PREVIOUS_EXTENT,
t.select(PAIRS.EXTENT).
from(PAIRS).
where(PAIRS.SPACE.eq(space).
and(PAIRS.NAME.eq(pair.key))).
asField().
asInstanceOf[Field[LONG]]).
set(ESCALATION_RULES.ESCALATION, e.name).
set(ESCALATION_RULES.PREVIOUS_ESCALATION, e.name).
set(ESCALATION_RULES.RULE, rule).
execute()
}
catch {
case x:Exception if x.getCause.isInstanceOf[SQLIntegrityConstraintViolationException] => {
// This should happen as a result of a race condition between the first attempt to update
// and the attempt to insert a new row. If there is a genuine reason for the non-PK unique
// constraint to have caused the constraint violation, then this second attempt to
// update the rule will correctly result in the constraint beingu
updateExistingRules.execute()
}
}
}
} else {
}
})
upgradeConfigVersion(t, space)
})
invalidatePairCachesOnly(space)
val ref = PairRef(space = space, name = pair.key)
pairEventSubscribers.foreach(_.onPairUpdated(ref))
}
def deletePair(space:Long, key: String) = {
jooq.execute(t => {
val ref = PairRef(space = space, name = key)
invalidatePairCachesOnly(space)
deletePairWithDependencies(t, ref)
upgradeConfigVersion(t, space)
pairEventSubscribers.foreach(_.onPairDeleted(ref))
})
}
def listPairs(space:Long) = {
cachedPairs.readThrough(space, () => JooqConfigStoreCompanion.listPairs(jooq, space))
}
def listPairsForEndpoint(space:Long, endpoint:String) = {
cachedPairsByEndpoint.readThrough(
DomainEndpointKey(space, endpoint),
() => JooqConfigStoreCompanion.listPairs(jooq, space, endpoint = Some(endpoint))
)
}
@Deprecated def getEndpoint(space:Long, endpoint: String) = {
val endpointDef = getEndpointDefWithDomainName(space, endpoint)
val ep = Endpoint(
name = endpointDef.name,
domain = Domain(name = endpointDef.domain),
scanUrl = endpointDef.scanUrl,
versionGenerationUrl = endpointDef.versionGenerationUrl,
contentRetrievalUrl = endpointDef.contentRetrievalUrl,
validateEntityOrder = endpointDef.validateEntityOrder,
collation = endpointDef.collation,
categories = endpointDef.categories
)
val views = new util.HashSet[EndpointView]()
endpointDef.views.foreach(v => {
views.add(EndpointView(
name = v.name,
endpoint = ep,
categories = v.categories
))
})
ep.setViews(views)
ep
}
def getPairDef(space:Long, key: String) = {
cachedPairsByKey.readThrough(DomainPairKey(space, key), () => jooq.execute { t =>
val pairs = JooqConfigStoreCompanion.listPairs(jooq, space, key = Some(key))
if (pairs.length == 1) {
pairs(0)
} else {
//throw new MissingObjectException(domain + "/" + key)
// TODO Ideally this code should throw something more descriptive like the above error
// but for now, I'd like to keep this patch small
throw new MissingObjectException("pair")
}
})
}
def getConfigVersion(space:Long) = {
cachedConfigVersions.readThrough(space, () => jooq.execute(t => {
val result = t.select(SPACES.CONFIG_VERSION).
from(SPACES).
where(SPACES.ID.equal(space)).
fetchOne()
if (result == null) {
throw new MissingObjectException("domain")
}
else {
result.getValue(SPACES.CONFIG_VERSION)
}
}))
}
def allConfigOptions(space:Long) = {
cachedDomainConfigOptionsMap.readThrough(space, () => jooq.execute( t => {
val results = t.select(CONFIG_OPTIONS.OPT_KEY, CONFIG_OPTIONS.OPT_VAL).
from(CONFIG_OPTIONS).
where(CONFIG_OPTIONS.SPACE.equal(space)).fetch()
val configs = new java.util.HashMap[String,String]()
results.iterator().foreach(r => {
configs.put(r.getValue(CONFIG_OPTIONS.OPT_KEY), r.getValue(CONFIG_OPTIONS.OPT_VAL))
})
configs
})).toMap
}
def maybeConfigOption(space:Long, key:String) = {
val option = cachedDomainConfigOptions.readThrough(DomainConfigKey(space,key), () => jooq.execute( t => {
val record = t.select(CONFIG_OPTIONS.OPT_VAL).
from(CONFIG_OPTIONS).
where(CONFIG_OPTIONS.SPACE.equal(space)).
and(CONFIG_OPTIONS.OPT_KEY.equal(key)).
fetchOne()
if (record != null) {
record.getValue(CONFIG_OPTIONS.OPT_VAL)
}
else {
// Insert a null byte into as a value for this key in the cache to denote that this key does not
// exist and should not get queried for against the the underlying database
"\u0000"
}
}))
option match {
case "\u0000" => None
case value => Some(value)
}
}
def configOptionOrDefault(space:Long, key: String, defaultVal: String) =
maybeConfigOption(space, key) match {
case Some(str) => str
case None => defaultVal
}
def setConfigOption(space:Long, key:String, value:String) = {
jooq.execute(t => {
t.insertInto(CONFIG_OPTIONS).
set(CONFIG_OPTIONS.SPACE, space:LONG).
set(CONFIG_OPTIONS.OPT_KEY, key).
set(CONFIG_OPTIONS.OPT_VAL, value).
onDuplicateKeyUpdate().
set(CONFIG_OPTIONS.OPT_VAL, value).
execute()
})
invalidateConfigCaches(space)
}
def clearConfigOption(space:Long, key:String) = {
jooq.execute(t => {
t.delete(CONFIG_OPTIONS).
where(CONFIG_OPTIONS.SPACE.equal(space)).
and(CONFIG_OPTIONS.OPT_KEY.equal(key)).
execute()
})
// TODO This is a very coarse grained invalidation
invalidateConfigCaches(space)
}
/**
* Force the DB to uprev the config version column for this particular domain
*/
private def upgradeConfigVersion(t:Factory, space:Long) {
cachedConfigVersions.evict(space)
t.update(SPACES).
set(SPACES.CONFIG_VERSION, SPACES.CONFIG_VERSION.add(1)).
where(SPACES.ID.equal(space)).
execute()
}
def lookupPolicy(space:Long, policy:String) = {
cachedPolicies.readThrough(SpacePolicyKey(space, policy), () => {
jooq.execute(t => {
def searchSpace(spaceId:Long) = {
val result = t.select().
from(POLICIES).
where(POLICIES.SPACE.equal(spaceId).and(POLICIES.NAME.equal(policy))).
fetchOne()
if (result == null) {
None
} else {
Some(PolicyKey(spaceId, policy))
}
}
def searchSpaceOrParent(spaceId:Long, tree:Seq[Long]):PolicyKey = {
searchSpace(spaceId) match {
case Some(k) => k
case None =>
if (tree.length == 0) {
throw new MissingObjectException("policy " + policy)
} else {
searchSpaceOrParent(tree.head, tree.tail)
}
}
}
val tree = ancestorIdTree(t, space)
searchSpaceOrParent(space, tree)
})
})
}
def makeDomainMember(space:Long, userName:String, policy:PolicyKey) = {
jooq.execute(t => {
t.insertInto(MEMBERS).
set(MEMBERS.SPACE, space:LONG).
set(MEMBERS.USERNAME, userName).
set(MEMBERS.POLICY_SPACE, policy.space).
set(MEMBERS.POLICY, policy.name).
onDuplicateKeyIgnore().
execute()
})
invalidateMembershipCache(space)
val member = Member(userName, space, policy.space, policy.name, resolveSpaceName(space))
membershipListener.onMembershipCreated(member)
member
}
def removeDomainMembership(space:Long, userName:String, policy:String) {
jooq.execute(t => {
t.delete(MEMBERS).
where(MEMBERS.SPACE.equal(space)).
and(MEMBERS.USERNAME.equal(userName)).
and(MEMBERS.POLICY.equal(policy)).
execute()
})
invalidateMembershipCache(space)
// TODO: This should include the right space id
val member = Member(userName, space, space, policy, resolveSpaceName(space))
membershipListener.onMembershipRemoved(member)
}
/**
* TODO This should be cached and centralized
*/
@Deprecated private def resolveSpaceName(space:Long) = {
jooq.execute(t => {
val record = t.select(SPACES.NAME).
from(SPACES).
where(SPACES.ID.equal(space)).
fetchOne()
if (record == null) {
throw new MissingObjectException(space.toString)
}
else {
record.getValue(SPACES.NAME)
}
})
}
def listDomainMembers(space:Long) = {
cachedMembers.readThrough(space, () => {
jooq.execute(t => {
val results = t.select(MEMBERS.USERNAME, MEMBERS.POLICY_SPACE, MEMBERS.POLICY).
select(SPACES.NAME).
from(MEMBERS).
join(SPACES).
on(SPACES.ID.equal(MEMBERS.SPACE)).
where(MEMBERS.SPACE.equal(space)).
fetch()
val members = new java.util.ArrayList[Member]()
results.iterator().foreach(r => members.add(Member(
user = r.getValue(MEMBERS.USERNAME),
space = space,
domain = r.getValue(SPACES.NAME),
policySpace = r.getValue(MEMBERS.POLICY_SPACE),
policy = r.getValue(MEMBERS.POLICY)
))
)
members
})
}).toSeq
}
def isBreakerTripped(space:Long, pair: String, name: String) = {
cachedBreakers.readThrough(BreakerKey(space, pair, name), () => jooq.execute(t => {
val c = t.selectCount().
from(BREAKERS).
where(BREAKERS.SPACE.equal(space)).
and(BREAKERS.PAIR.equal(pair)).
and(BREAKERS.NAME.equal(name)).
fetchOne().
getValue(0).asInstanceOf[java.lang.Number]
val breakerPresent = (c != null && c.intValue() > 0)
// We consider a breaker to be tripped (ie, the feature should not be used) when there is a matching row in
// the table.
breakerPresent
}))
}
def tripBreaker(space:Long, pair: String, name: String) {
if (!isBreakerTripped(space, pair, name)) {
jooq.execute(t => {
t.insertInto(BREAKERS).
set(BREAKERS.SPACE, space:LONG).
set(BREAKERS.PAIR, pair).
set(BREAKERS.NAME, name).
onDuplicateKeyIgnore().
execute()
})
cachedBreakers.put(BreakerKey(space, pair, name), true)
}
}
def clearBreaker(space:Long, pair: String, name: String) {
if (isBreakerTripped(space, pair, name)) {
jooq.execute(t => {
t.delete(BREAKERS).
where(BREAKERS.SPACE.equal(space)).
and(BREAKERS.PAIR.equal(pair)).
and(BREAKERS.NAME.equal(name)).
execute()
})
cachedBreakers.put(BreakerKey(space, pair, name), false)
}
}
private def upgradeExtent(t:Factory) : Long = {
val extent = idProvider.getId()
t.insertInto(EXTENTS).
set(EXTENTS.ID, extent:LONG).
onDuplicateKeyIgnore().
execute()
extent
}
}
// These key classes need to be serializable .......
case class DomainEndpointKey(
@BeanProperty var space: Long,
@BeanProperty var endpoint: String = null) {
def this() = this(space = 0)
}
case class DomainPairKey(
@BeanProperty var space: Long,
@BeanProperty var pair: String = null) {
def this() = this(space = 0)
}
case class DomainConfigKey(
@BeanProperty var space: Long,
@BeanProperty var configKey: String = null) {
def this() = this(space = 0)
}
case class SpacePolicyKey(
@BeanProperty var space: Long,
@BeanProperty var policy: String = null) {
def this() = this(space = 0)
}
case class BreakerKey(
@BeanProperty var space: Long,
@BeanProperty var pair:String = null,
@BeanProperty var name:String = null) {
def this() = this(space = 0)
}
case class ConfigOptionByDomainPredicate(
@BeanProperty var space:Long) extends KeyPredicate[DomainConfigKey] {
def this() = this(space = 0)
def constrain(key: DomainConfigKey) = key.space == space
}
case class PairByDomainAndEndpointPredicate(
@BeanProperty var space:Long,
@BeanProperty endpoint:String = null) extends KeyPredicate[DomainEndpointKey] {
def this() = this(space = 0)
def constrain(key: DomainEndpointKey) = key.space == space && key.endpoint == endpoint
}
case class EndpointByDomainPredicate(@BeanProperty var space:Long) extends KeyPredicate[DomainEndpointKey] {
def this() = this(space = 0)
def constrain(key: DomainEndpointKey) = key.space == space
}
case class PairByDomainPredicate(@BeanProperty var space:Long) extends KeyPredicate[DomainPairKey] {
def this() = this(space = 0)
def constrain(key: DomainPairKey) = key.space == space
}
case class BreakerByDomainPredicate(@BeanProperty var space:Long) extends KeyPredicate[BreakerKey] {
def this() = this(space = 0)
def constrain(key: BreakerKey) = key.space == space
}
case class BreakerByPairAndDomainPredicate(
@BeanProperty var space:Long,
@BeanProperty var pair:String = null) extends KeyPredicate[BreakerKey] {
def this() = this(space = 0)
def constrain(key: BreakerKey) = key.space == space && key.pair == pair
}
| 0x6e6562/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/config/JooqDomainConfigStore.scala | Scala | apache-2.0 | 35,459 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.optimize.program
import org.apache.flink.table.api.TableException
import org.apache.flink.table.planner.plan.`trait`.UpdateKindTrait.{beforeAfterOrNone, onlyAfterOrNone}
import org.apache.flink.table.planner.plan.`trait`._
import org.apache.flink.table.planner.plan.nodes.physical.stream._
import org.apache.flink.table.planner.plan.utils._
import org.apache.flink.table.planner.sinks.DataStreamTableSink
import org.apache.flink.table.runtime.operators.join.FlinkJoinType
import org.apache.flink.table.sinks.{AppendStreamTableSink, RetractStreamTableSink, StreamTableSink, UpsertStreamTableSink}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.util.ImmutableBitSet
import scala.collection.JavaConversions._
/**
* An optimize program to infer ChangelogMode for every physical node.
*/
class FlinkChangelogModeInferenceProgram extends FlinkOptimizeProgram[StreamOptimizeContext] {
private val SATISFY_MODIFY_KIND_SET_TRAIT_VISITOR = new SatisfyModifyKindSetTraitVisitor
private val SATISFY_UPDATE_KIND_TRAIT_VISITOR = new SatisfyUpdateKindTraitVisitor
override def optimize(
root: RelNode,
context: StreamOptimizeContext): RelNode = {
// step1: satisfy ModifyKindSet trait
val physicalRoot = root.asInstanceOf[StreamPhysicalRel]
val rootWithModifyKindSet = SATISFY_MODIFY_KIND_SET_TRAIT_VISITOR.visit(
physicalRoot,
// we do not propagate the ModifyKindSet requirement and requester among blocks
// set default ModifyKindSet requirement and requester for root
ModifyKindSetTrait.ALL_CHANGES,
"ROOT")
// step2: satisfy UpdateKind trait
val rootModifyKindSet = getModifyKindSet(rootWithModifyKindSet)
// use the required UpdateKindTrait from parent blocks
val requiredUpdateKindTraits = if (context.isUpdateBeforeRequired) {
Seq(UpdateKindTrait.BEFORE_AND_AFTER)
} else if (rootModifyKindSet.isInsertOnly) {
Seq(UpdateKindTrait.NONE)
} else {
// update_before is not required, and input contains updates
// try ONLY_UPDATE_AFTER first, and then BEFORE_AND_AFTER
Seq(UpdateKindTrait.ONLY_UPDATE_AFTER, UpdateKindTrait.BEFORE_AND_AFTER)
}
val finalRoot = requiredUpdateKindTraits.flatMap { requiredUpdateKindTrait =>
SATISFY_UPDATE_KIND_TRAIT_VISITOR.visit(rootWithModifyKindSet, requiredUpdateKindTrait)
}
// step3: sanity check and return non-empty root
if (finalRoot.isEmpty) {
val plan = FlinkRelOptUtil.toString(root, withChangelogTraits = true)
throw new TableException(
"Can't generate a valid execution plan for the given query:\\n" + plan)
} else {
finalRoot.head
}
}
/**
* A visitor which will try to satisfy the required [[ModifyKindSetTrait]] from root.
*
* <p>After traversed by this visitor, every node should have a correct [[ModifyKindSetTrait]]
* or an exception should be thrown if the planner doesn't support to satisfy the required
* [[ModifyKindSetTrait]].
*/
private class SatisfyModifyKindSetTraitVisitor {
/**
* Try to satisfy the required [[ModifyKindSetTrait]] from root.
*
* <p>Each node should first require a [[ModifyKindSetTrait]] to its children.
* If the trait provided by children does not satisfy the required one,
* it should throw an exception and prompt the user that plan is not supported.
* The required [[ModifyKindSetTrait]] may come from the node's parent,
* or come from the node itself, depending on whether the node will destroy
* the trait provided by children or pass the trait from children.
*
* <p>Each node should provide [[ModifyKindSetTrait]] according to current node's behavior
* and the ModifyKindSetTrait provided by children.
*
* @param rel the node who should satisfy the requiredTrait
* @param requiredTrait the required ModifyKindSetTrait
* @param requester the requester who starts the requirement, used for better exception message
* @return A converted node which satisfy required traits by inputs node of current node.
* Or throws exception if required trait can’t be satisfied.
*/
def visit(
rel: StreamPhysicalRel,
requiredTrait: ModifyKindSetTrait,
requester: String): StreamPhysicalRel = rel match {
case sink: StreamExecSink[_] =>
val (sinkRequiredTrait, name) = sink.sink match {
case _: UpsertStreamTableSink[_] =>
(ModifyKindSetTrait.ALL_CHANGES, "UpsertStreamTableSink")
case _: RetractStreamTableSink[_] =>
(ModifyKindSetTrait.ALL_CHANGES, "RetractStreamTableSink")
case _: AppendStreamTableSink[_] =>
(ModifyKindSetTrait.INSERT_ONLY, "AppendStreamTableSink")
case _: StreamTableSink[_] =>
(ModifyKindSetTrait.INSERT_ONLY, "StreamTableSink")
case ds: DataStreamTableSink[_] =>
if (ds.withChangeFlag) {
(ModifyKindSetTrait.ALL_CHANGES, "toRetractStream")
} else {
(ModifyKindSetTrait.INSERT_ONLY, "toAppendStream")
}
case _ =>
throw new UnsupportedOperationException(
s"Unsupported sink '${sink.sink.getClass.getSimpleName}'")
}
val children = visitChildren(sink, sinkRequiredTrait, name)
val sinkTrait = sink.getTraitSet.plus(ModifyKindSetTrait.EMPTY)
// ignore required trait from context, because sink is the true root
sink.copy(sinkTrait, children).asInstanceOf[StreamPhysicalRel]
case deduplicate: StreamExecDeduplicate =>
// deduplicate only support insert only as input
val children = visitChildren(deduplicate, ModifyKindSetTrait.INSERT_ONLY)
val providedTrait = if (deduplicate.keepLastRow) {
// produce updates if it keeps last row
ModifyKindSetTrait.ALL_CHANGES
} else {
ModifyKindSetTrait.INSERT_ONLY
}
createNewNode(deduplicate, children, providedTrait, requiredTrait, requester)
case agg: StreamExecGroupAggregate =>
// agg support all changes in input
val children = visitChildren(agg, ModifyKindSetTrait.ALL_CHANGES)
val inputModifyKindSet = getModifyKindSet(children.head)
val builder = ModifyKindSet.newBuilder()
.addContainedKind(ModifyKind.INSERT)
.addContainedKind(ModifyKind.UPDATE)
if (inputModifyKindSet.contains(ModifyKind.UPDATE) ||
inputModifyKindSet.contains(ModifyKind.DELETE)) {
builder.addContainedKind(ModifyKind.DELETE)
}
val providedTrait = new ModifyKindSetTrait(builder.build())
createNewNode(agg, children, providedTrait, requiredTrait, requester)
case tagg: StreamExecGroupTableAggregate =>
// table agg support all changes in input
val children = visitChildren(tagg, ModifyKindSetTrait.ALL_CHANGES)
// table aggregate will produce all changes, including deletions
createNewNode(
tagg, children, ModifyKindSetTrait.ALL_CHANGES, requiredTrait, requester)
case window: StreamExecGroupWindowAggregateBase =>
// WindowAggregate and WindowTableAggregate support insert-only in input
val children = visitChildren(window, ModifyKindSetTrait.INSERT_ONLY)
val builder = ModifyKindSet.newBuilder()
.addContainedKind(ModifyKind.INSERT)
if (window.emitStrategy.produceUpdates) {
builder.addContainedKind(ModifyKind.UPDATE)
}
val providedTrait = new ModifyKindSetTrait(builder.build())
createNewNode(window, children, providedTrait, requiredTrait, requester)
case limit: StreamExecLimit =>
// limit support all changes in input
val children = visitChildren(limit, ModifyKindSetTrait.ALL_CHANGES)
val providedTrait = if (getModifyKindSet(children.head).isInsertOnly) {
ModifyKindSetTrait.INSERT_ONLY
} else {
ModifyKindSetTrait.ALL_CHANGES
}
createNewNode(limit, children, providedTrait, requiredTrait, requester)
case _: StreamExecRank | _: StreamExecSortLimit =>
// Rank and SortLimit supports consuming all changes
val children = visitChildren(rel, ModifyKindSetTrait.ALL_CHANGES)
createNewNode(
rel, children, ModifyKindSetTrait.ALL_CHANGES, requiredTrait, requester)
case sort: StreamExecSort =>
// Sort supports consuming all changes
val children = visitChildren(rel, ModifyKindSetTrait.ALL_CHANGES)
// Sort will buffer all inputs, and produce insert-only messages when input is finished
createNewNode(
sort, children, ModifyKindSetTrait.INSERT_ONLY, requiredTrait, requester)
case cep: StreamExecMatch =>
// CEP only supports consuming insert-only and producing insert-only changes
// give a better requester name for exception message
val children = visitChildren(cep, ModifyKindSetTrait.INSERT_ONLY, "Match Recognize")
createNewNode(
cep, children, ModifyKindSetTrait.INSERT_ONLY, requiredTrait, requester)
case _: StreamExecTemporalSort | _: StreamExecOverAggregate | _: StreamExecWindowJoin =>
// TemporalSort, OverAggregate, WindowJoin only support consuming insert-only
// and producing insert-only changes
val children = visitChildren(rel, ModifyKindSetTrait.INSERT_ONLY)
createNewNode(
rel, children, ModifyKindSetTrait.INSERT_ONLY, requiredTrait, requester)
case join: StreamExecJoin =>
// join support all changes in input
val children = visitChildren(rel, ModifyKindSetTrait.ALL_CHANGES)
val leftKindSet = getModifyKindSet(children.head)
val rightKindSet = getModifyKindSet(children.last)
val innerOrSemi = join.flinkJoinType == FlinkJoinType.INNER ||
join.flinkJoinType == FlinkJoinType.SEMI
val providedTrait = if (innerOrSemi) {
// forward left and right modify operations
new ModifyKindSetTrait(leftKindSet.union(rightKindSet))
} else {
// otherwise, it may produce any kinds of changes
ModifyKindSetTrait.ALL_CHANGES
}
createNewNode(join, children, providedTrait, requiredTrait, requester)
case temporalJoin: StreamExecTemporalJoin =>
// currently, temporal join only support insert-only input streams, including right side
val children = visitChildren(temporalJoin, ModifyKindSetTrait.INSERT_ONLY)
// forward left input changes
val leftTrait = children.head.getTraitSet.getTrait(ModifyKindSetTraitDef.INSTANCE)
createNewNode(temporalJoin, children, leftTrait, requiredTrait, requester)
case _: StreamExecCalc | _: StreamExecPythonCalc | _: StreamExecCorrelate |
_: StreamExecPythonCorrelate | _: StreamExecLookupJoin | _: StreamExecExchange |
_: StreamExecExpand | _: StreamExecMiniBatchAssigner |
_: StreamExecWatermarkAssigner =>
// transparent forward requiredTrait to children
val children = visitChildren(rel, requiredTrait, requester)
val childrenTrait = children.head.getTraitSet.getTrait(ModifyKindSetTraitDef.INSTANCE)
// forward children mode
createNewNode(rel, children, childrenTrait, requiredTrait, requester)
case union: StreamExecUnion =>
// transparent forward requiredTrait to children
val children = visitChildren(rel, requiredTrait, requester)
// union provides all possible kinds of children have
val providedKindSet = ModifyKindSet.union(children.map(getModifyKindSet): _*)
createNewNode(
union, children, new ModifyKindSetTrait(providedKindSet), requiredTrait, requester)
case _: StreamExecDataStreamScan | _: StreamExecTableSourceScan | _: StreamExecValues =>
// DataStream, TableSource and Values only support producing insert-only messages
createNewNode(
rel, List(), ModifyKindSetTrait.INSERT_ONLY, requiredTrait, requester)
case scan: StreamExecIntermediateTableScan =>
val providedTrait = new ModifyKindSetTrait(scan.intermediateTable.modifyKindSet)
createNewNode(scan, List(), providedTrait, requiredTrait, requester)
case _ =>
throw new UnsupportedOperationException(
s"Unsupported visit for ${rel.getClass.getSimpleName}")
}
private def visitChildren(
parent: StreamPhysicalRel,
requiredChildrenTrait: ModifyKindSetTrait): List[StreamPhysicalRel] = {
visitChildren(parent, requiredChildrenTrait, getNodeName(parent))
}
private def visitChildren(
parent: StreamPhysicalRel,
requiredChildrenTrait: ModifyKindSetTrait,
requester: String): List[StreamPhysicalRel] = {
val newChildren = for (i <- 0 until parent.getInputs.size()) yield {
visitChild(parent, i, requiredChildrenTrait, requester)
}
newChildren.toList
}
private def visitChild(
parent: StreamPhysicalRel,
childOrdinal: Int,
requiredChildTrait: ModifyKindSetTrait,
requester: String): StreamPhysicalRel = {
val child = parent.getInput(childOrdinal).asInstanceOf[StreamPhysicalRel]
this.visit(child, requiredChildTrait, requester)
}
private def getNodeName(rel: StreamPhysicalRel): String = {
val prefix = "StreamExec"
val typeName = rel.getRelTypeName
if (typeName.startsWith(prefix)) {
typeName.substring(prefix.length)
} else {
typeName
}
}
private def createNewNode(
node: StreamPhysicalRel,
children: List[StreamPhysicalRel],
providedTrait: ModifyKindSetTrait,
requiredTrait: ModifyKindSetTrait,
requestedOwner: String): StreamPhysicalRel = {
if (!providedTrait.satisfies(requiredTrait)) {
val diff = providedTrait.modifyKindSet.minus(requiredTrait.modifyKindSet)
val diffString = diff.getContainedKinds
.toList.sorted // for deterministic error message
.map(_.toString.toLowerCase)
.mkString(" and ")
// creates a new node based on the new children, to have a more correct node description
// e.g. description of GroupAggregate is based on the ModifyKindSetTrait of children
val tempNode = node.copy(node.getTraitSet, children).asInstanceOf[StreamPhysicalRel]
val nodeString = tempNode.getRelDetailedDescription
throw new TableException(
s"$requestedOwner doesn't support consuming $diffString changes " +
s"which is produced by node $nodeString")
}
val newTraitSet = node.getTraitSet.plus(providedTrait)
node.copy(newTraitSet, children).asInstanceOf[StreamPhysicalRel]
}
}
/**
* A visitor which will try to satisfy the required [[UpdateKindTrait]] from root.
*
* <p>After traversed by this visitor, every node should have a correct [[UpdateKindTrait]]
* or returns None if the planner doesn't support to satisfy the required [[UpdateKindTrait]].
*/
private class SatisfyUpdateKindTraitVisitor {
/**
* Try to satisfy the required [[UpdateKindTrait]] from root.
*
* <p>Each node will first require a UpdateKindTrait to its children.
* The required UpdateKindTrait may come from the node's parent,
* or come from the node itself, depending on whether the node will destroy
* the trait provided by children or pass the trait from children.
*
* <p>If the node will pass the children's UpdateKindTrait without destroying it,
* then return a new node with new inputs and forwarded UpdateKindTrait.
*
* <p>If the node will destroy the children's UpdateKindTrait, then the node itself
* needs to be converted, or a new node should be generated to satisfy the required trait,
* such as marking itself not to generate UPDATE_BEFORE,
* or generating a new node to filter UPDATE_BEFORE.
*
* @param rel the node who should satisfy the requiredTrait
* @param requiredTrait the required UpdateKindTrait
* @return A converted node which satisfies required traits by input nodes of current node.
* Or None if required traits cannot be satisfied.
*/
def visit(
rel: StreamPhysicalRel,
requiredTrait: UpdateKindTrait): Option[StreamPhysicalRel] = rel match {
case sink: StreamExecSink[_] =>
val childModifyKindSet = getModifyKindSet(sink.getInput)
val onlyAfter = onlyAfterOrNone(childModifyKindSet)
val beforeAndAfter = beforeAfterOrNone(childModifyKindSet)
val sinkRequiredTraits = sink.sink match {
case _: UpsertStreamTableSink[_] =>
// support both ONLY_AFTER and BEFORE_AFTER, but prefer ONLY_AFTER
Seq(onlyAfter, beforeAndAfter)
case _: RetractStreamTableSink[_] =>
Seq(beforeAndAfter)
case _: AppendStreamTableSink[_] | _: StreamTableSink[_] =>
Seq(UpdateKindTrait.NONE)
case ds: DataStreamTableSink[_] =>
if (ds.withChangeFlag) {
if (ds.needUpdateBefore) {
Seq(beforeAndAfter)
} else {
// support both ONLY_AFTER and BEFORE_AFTER, but prefer ONLY_AFTER
Seq(onlyAfter, beforeAndAfter)
}
} else {
Seq(UpdateKindTrait.NONE)
}
}
val children = sinkRequiredTraits.flatMap(t => visitChildren(sink, t))
if (children.isEmpty) {
None
} else {
val sinkTrait = sink.getTraitSet.plus(UpdateKindTrait.NONE)
Some(sink.copy(sinkTrait, children.head).asInstanceOf[StreamPhysicalRel])
}
case _: StreamExecGroupAggregate | _: StreamExecGroupTableAggregate |
_: StreamExecLimit =>
// Aggregate, TableAggregate and Limit requires update_before if there are updates
val requiredChildTrait = beforeAfterOrNone(getModifyKindSet(rel.getInput(0)))
val children = visitChildren(rel, requiredChildTrait)
// use requiredTrait as providedTrait, because they should support all kinds of UpdateKind
createNewNode(rel, children, requiredTrait)
case _: StreamExecGroupWindowAggregate | _: StreamExecGroupWindowTableAggregate |
_: StreamExecDeduplicate | _: StreamExecTemporalSort | _: StreamExecMatch |
_: StreamExecOverAggregate | _: StreamExecWindowJoin =>
// WindowAggregate, WindowTableAggregate, Deduplicate, TemporalSort, CEP, OverAggregate
// and WindowJoin require nothing about UpdateKind.
val children = visitChildren(rel, UpdateKindTrait.NONE)
createNewNode(rel, children, requiredTrait)
case rank: StreamExecRank =>
val rankStrategies = RankProcessStrategy.analyzeRankProcessStrategies(
rank, rank.partitionKey, rank.orderKey)
visitRankStrategies(rankStrategies, requiredTrait, rankStrategy => rank.copy(rankStrategy))
case sortLimit: StreamExecSortLimit =>
val rankStrategies = RankProcessStrategy.analyzeRankProcessStrategies(
sortLimit, ImmutableBitSet.of(), sortLimit.getCollation)
visitRankStrategies(
rankStrategies,
requiredTrait,
rankStrategy => sortLimit.copy(rankStrategy))
case sort: StreamExecSort =>
val requiredChildTrait = beforeAfterOrNone(getModifyKindSet(sort.getInput))
val children = visitChildren(sort, requiredChildTrait)
createNewNode(sort, children, requiredTrait)
case join: StreamExecJoin =>
val requiredUpdateBeforeByParent = requiredTrait.updateKind == UpdateKind.BEFORE_AND_AFTER
val children = join.getInputs.zipWithIndex.map {
case (child, childOrdinal) =>
val physicalChild = child.asInstanceOf[StreamPhysicalRel]
val needUpdateBefore = !join.inputUniqueKeyContainsJoinKey(childOrdinal)
val inputModifyKindSet = getModifyKindSet(physicalChild)
val childRequiredTrait = if (needUpdateBefore || requiredUpdateBeforeByParent) {
beforeAfterOrNone(inputModifyKindSet)
} else {
onlyAfterOrNone(inputModifyKindSet)
}
this.visit(physicalChild, childRequiredTrait)
}
if (children.exists(_.isEmpty)) {
None
} else {
createNewNode(join, Some(children.flatten.toList), requiredTrait)
}
case temporalJoin: StreamExecTemporalJoin =>
// forward required mode to left input
val left = temporalJoin.getLeft.asInstanceOf[StreamPhysicalRel]
val right = temporalJoin.getRight.asInstanceOf[StreamPhysicalRel]
val newLeftOption = this.visit(left, requiredTrait)
// currently temporal join only support insert-only source as the right side
// so it requires nothing about UpdateKind
val newRightOption = this.visit(right, UpdateKindTrait.NONE)
(newLeftOption, newRightOption) match {
case (Some(newLeft), Some(newRight)) =>
val leftTrait = newLeft.getTraitSet.getTrait(UpdateKindTraitDef.INSTANCE)
createNewNode(temporalJoin, Some(List(newLeft, newRight)), leftTrait)
case _ =>
None
}
case calc: StreamExecCalcBase =>
if (requiredTrait == UpdateKindTrait.ONLY_UPDATE_AFTER &&
calc.getProgram.getCondition != null) {
// we don't expect filter to satisfy ONLY_UPDATE_AFTER update kind,
// to solve the bad case like a single 'cnt < 10' condition after aggregation.
// See FLINK-9528.
None
} else {
// otherwise, forward UpdateKind requirement
visitChildren(rel, requiredTrait) match {
case None => None
case Some(children) =>
val childTrait = children.head.getTraitSet.getTrait(UpdateKindTraitDef.INSTANCE)
createNewNode(rel, Some(children), childTrait)
}
}
case _: StreamExecCorrelate | _: StreamExecPythonCorrelate | _: StreamExecLookupJoin |
_: StreamExecExchange | _: StreamExecExpand | _: StreamExecMiniBatchAssigner |
_: StreamExecWatermarkAssigner =>
// transparent forward requiredTrait to children
visitChildren(rel, requiredTrait) match {
case None => None
case Some(children) =>
val childTrait = children.head.getTraitSet.getTrait(UpdateKindTraitDef.INSTANCE)
createNewNode(rel, Some(children), childTrait)
}
case union: StreamExecUnion =>
val children = union.getInputs.map {
case child: StreamPhysicalRel =>
val childModifyKindSet = getModifyKindSet(child)
val requiredChildTrait = if (childModifyKindSet.isInsertOnly) {
UpdateKindTrait.NONE
} else {
requiredTrait
}
this.visit(child, requiredChildTrait)
}.toList
if (children.exists(_.isEmpty)) {
None
} else {
val updateKinds = children.flatten
.map(_.getTraitSet.getTrait(UpdateKindTraitDef.INSTANCE))
// union can just forward changes, can't actively satisfy to another changelog mode
val providedTrait = if (updateKinds.forall(k => UpdateKindTrait.NONE == k)) {
// if all the children is NO_UPDATE, union is NO_UPDATE
UpdateKindTrait.NONE
} else {
// otherwise, merge update kinds.
val merged = updateKinds
.map(_.updateKind)
.reduce { (l, r) =>
(l, r) match {
case (UpdateKind.NONE, r: UpdateKind) => r
case (l: UpdateKind, UpdateKind.NONE) => l
case (l: UpdateKind, r: UpdateKind) if l == r => l
case (_, _) =>
throw new UnsupportedOperationException(
"UNION doesn't support to union ONLY_UPDATE_AFTER input " +
"and BEFORE_AND_AFTER input.")
}
}
new UpdateKindTrait(merged)
}
createNewNode(union, Some(children.flatten), providedTrait)
}
case _: StreamExecDataStreamScan | _: StreamExecTableSourceScan | _: StreamExecValues =>
createNewNode(rel, Some(List()), UpdateKindTrait.NONE)
case scan: StreamExecIntermediateTableScan =>
val providedTrait = if (scan.intermediateTable.isUpdateBeforeRequired) {
// we can't drop UPDATE_BEFORE if it is required by other parent blocks
UpdateKindTrait.BEFORE_AND_AFTER
} else {
requiredTrait
}
if (!providedTrait.satisfies(requiredTrait)) {
// require ONLY_AFTER but can only provide BEFORE_AND_AFTER
None
} else {
createNewNode(rel, Some(List()), providedTrait)
}
case _ =>
throw new UnsupportedOperationException(
s"Unsupported visit for ${rel.getClass.getSimpleName}")
}
private def visitChildren(
parent: StreamPhysicalRel,
requiredChildrenTrait: UpdateKindTrait): Option[List[StreamPhysicalRel]] = {
val newChildren = for (child <- parent.getInputs) yield {
this.visit(child.asInstanceOf[StreamPhysicalRel], requiredChildrenTrait) match {
case None =>
// return None if one of the children can't satisfy
return None
case Some(newChild) =>
val providedTrait = newChild.getTraitSet.getTrait(UpdateKindTraitDef.INSTANCE)
val childDescription = newChild.getRelDetailedDescription
if (!providedTrait.satisfies(requiredChildrenTrait)) {
throw new TableException(s"Provided trait $providedTrait can't satisfy " +
s"required trait $requiredChildrenTrait. " +
s"This is a bug in planner, please file an issue. \\n" +
s"Current node is $childDescription")
}
newChild
}
}
Some(newChildren.toList)
}
private def createNewNode(
node: StreamPhysicalRel,
childrenOption: Option[List[StreamPhysicalRel]],
providedTrait: UpdateKindTrait): Option[StreamPhysicalRel] = childrenOption match {
case None =>
None
case Some(children) =>
val modifyKindSetTrait = node.getTraitSet.getTrait(ModifyKindSetTraitDef.INSTANCE)
val nodeDescription = node.getRelDetailedDescription
val isUpdateKindValid = providedTrait.updateKind match {
case UpdateKind.NONE =>
!modifyKindSetTrait.modifyKindSet.contains(ModifyKind.UPDATE)
case UpdateKind.BEFORE_AND_AFTER | UpdateKind.ONLY_UPDATE_AFTER =>
modifyKindSetTrait.modifyKindSet.contains(ModifyKind.UPDATE)
}
if (!isUpdateKindValid) {
throw new TableException(s"UpdateKindTrait $providedTrait conflicts with " +
s"ModifyKindSetTrait $modifyKindSetTrait. " +
s"This is a bug in planner, please file an issue. \\n" +
s"Current node is $nodeDescription.")
}
val newTraitSet = node.getTraitSet.plus(providedTrait)
Some(node.copy(newTraitSet, children).asInstanceOf[StreamPhysicalRel])
}
/**
* Try all possible rank strategies and return the first viable new node.
* @param rankStrategies all possible supported rank strategy by current node
* @param requiredUpdateKindTrait the required UpdateKindTrait by parent of rank node
* @param applyRankStrategy a function to apply rank strategy to get a new copied rank node
*/
private def visitRankStrategies(
rankStrategies: Seq[RankProcessStrategy],
requiredUpdateKindTrait: UpdateKindTrait,
applyRankStrategy: RankProcessStrategy => StreamPhysicalRel): Option[StreamPhysicalRel] = {
// go pass every RankProcessStrategy, apply the rank strategy to get a new copied rank node,
// return the first satisfied converted node
for (strategy <- rankStrategies) {
val requiredChildrenTrait = strategy match {
case UpdateFastStrategy(_) => UpdateKindTrait.ONLY_UPDATE_AFTER
case RetractStrategy => UpdateKindTrait.BEFORE_AND_AFTER
case AppendFastStrategy => UpdateKindTrait.NONE
}
val node = applyRankStrategy(strategy)
val children = visitChildren(node, requiredChildrenTrait)
val newNode = createNewNode(node, children, requiredUpdateKindTrait)
if (newNode.isDefined) {
return newNode
}
}
return None
}
}
// -------------------------------------------------------------------------------------------
private def getModifyKindSet(node: RelNode): ModifyKindSet = {
val modifyKindSetTrait = node.getTraitSet.getTrait(ModifyKindSetTraitDef.INSTANCE)
modifyKindSetTrait.modifyKindSet
}
}
| bowenli86/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/optimize/program/FlinkChangelogModeInferenceProgram.scala | Scala | apache-2.0 | 29,988 |
object A {
type X = Int
}
| twitter-forks/sbt | sbt/src/sbt-test/source-dependencies/type-alias/changes/A.scala | Scala | bsd-3-clause | 28 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mapper
package view
import common.Full
import util._
import Helpers._
import scala.xml.NodeSeq
/**
* Provides a number of methods that make complex Mapper-based view snippets
* easier to build.
* @author nafg
*/
object Util {
/**
* Binds all nodes whose names are names of fields on the specified mapper.
* This makes it unnecessary to write repetitious bindings like
* "field1" -> field1.toForm,
* "field2" -> field2.toform
* Instead it automates such bindings but you have to pass it a function
* that will generate a NodeSeq from the field, e.g.,
* (f: MappedField[_,_]) => f.toForm
* Usage: Pass as a Full Box to the bind overload that takes a nodeFailureXform
* argument.
*/
def bindFields[T <: Mapper[T]](mapper: T, nsfn: MappedField[_,T]=>NodeSeq): NodeSeq=>NodeSeq = {
case scala.xml.Elem(_, name, _, _, _*) =>
mapper.fieldByName(name) match {
case Full(field) => nsfn(field)
case _ => NodeSeq.Empty
}
case ns => ns
}
/**
* Iterates over the fields of the specified mapper. If the node currently being processed by bind
* has an attribute "fields" then it is taken as a whitespace-delimited list of fields to iterate
* over; otherwise all form fields are used. The specified function returns a BindParam for doing
* processing specific to that field.
* Returns a bind function (NodeSeq=>NodeSeq) that can be used to bind an xml node that should be
* repeated for each field.
* Usage: if you want to repeat xml markup for each field, the view should use the "field:" prefix
* for field-specific nodes. The snippet should bind the containing (repeating) node to the function
* returned by this method, passing this method the mapper instance whose fields should be used and
* a function that returns BindParams to process the "field:" prefixed nodes.
* This method takes an additional filter function to restrict certain fields from being
* displayed. There is an overload without it too.
*/
def eachField[T<:net.liftweb.mapper.Mapper[T]](
mapper: T,
fn:MappedField[_,T]=>CssSel,
filter: MappedField[_,T]=>Boolean
): NodeSeq=>NodeSeq = {
def fieldBindIfWanted(fieldName: String) = {
mapper.fieldByName(fieldName).filter(filter) match {
case Full(field) =>
Some(fn(field))
case _ =>
None
}
}
"^" #> { ns: NodeSeq =>
val fieldsAttribute = (ns \\ "@fields")
val bind: Seq[CssSel] =
if (fieldsAttribute.nonEmpty) {
for {
fieldName <- fieldsAttribute.text.split("\\\\s+").toIndexedSeq
// the following hackery is brought to you by the Scala compiler not
// properly typing MapperField[_, T] in the context of the for
// comprehension
fieldBind <- fieldBindIfWanted(fieldName)
} yield {
".field" #> fieldBind
}
} else {
mapper.formFields.filter(filter).map {
case field: MappedField[_, T] =>
".field" #> fn(field)
}
}
bind.map(_(ns))
}
}
def eachField[T<:net.liftweb.mapper.Mapper[T]](
mapper: T,
fn: MappedField[_,T] => CssSel
): NodeSeq => NodeSeq = eachField(mapper, fn, (_: MappedField[_,T]) => true)
}
| lift/framework | persistence/mapper/src/main/scala/net/liftweb/mapper/view/Util.scala | Scala | apache-2.0 | 3,976 |
package actors.gcounter
import java.util.UUID
import java.util.concurrent.TimeUnit
import akka.actor._
import com.typesafe.config.ConfigFactory
import crdts.GCounter
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
/**
*
*/
class Client(server: ActorSelection) extends Actor {
private var counter = GCounter[String]("client-" + self.path.name)
override def receive: Receive = {
case ReceiveUpdate(other) =>
counter = counter.merge(other)
case Increment =>
val increment = (Math.random() * 10).asInstanceOf[Int]
counter = counter.increment(increment)
case SendUpdate =>
server ! ReceiveUpdate(counter)
case Print =>
println(s"Counter for ${counter.id} is ${counter.get}.")
}
}
object Client {
def apply()(implicit actorSystem: ActorSystem): ActorRef = Client(FiniteDuration(100, TimeUnit.MILLISECONDS))
def apply(interval: FiniteDuration)(implicit actorSystem: ActorSystem): ActorRef = {
val server = actorSystem.actorSelection("akka.tcp://gcounterSystem@127.0.0.1:2552/user/server")
val client = actorSystem.actorOf(Props(classOf[Client], server), UUID.randomUUID().toString)
implicit val ec = actorSystem.dispatcher
actorSystem.scheduler.schedule(interval, interval, client, SendUpdate)
actorSystem.scheduler.schedule(interval, interval, client, Print)
// Keep incrementing the counter for a while
Future {
(1 to 100) foreach { _ =>
client ! Increment
Thread.sleep(100)
}
println("Stopped incrementing the counter now .. ")
}
client
}
def main(args: Array[String]): Unit = {
implicit val actorSystem = ActorSystem("gcounterSystem", ConfigFactory.load("client.conf"))
Client()
}
}
| maylencita/counters | src/main/scala/actors/gcounter/Client.scala | Scala | cc0-1.0 | 1,772 |
package deploymentzone.actor
import akka.util.ByteString
trait TestBase {
import scala.language.implicitConversions
implicit def noLengthLimit(m: MaterializedMetric): ByteString = m.render(Int.MaxValue).get
}
| edmundnoble/akka-actor-statsd | src/test/scala/deploymentzone/actor/TestBase.scala | Scala | mit | 218 |
package org.example2.usage
//
// SIMPLE
//
class Usage_Simple_1 {
import org.example2.declaration.data.U
import org.example2.declaration.{V, W}
val u: U = ???
val v: V = ???
val w: W = ???
}
class Usage_Simple_2 {
import org.example2.declaration.data.U
import org.example2.declaration.{V, W, Y}
val u: U = ???
val v: V = ???
val w: W = ???
val y: Y = ???
}
class Usage_Simple_3 {
import org.example2.declaration.data.U
import org.example2.declaration.{V, W, Y, Z}
val u: U = ???
val v: V = ???
val w: W = ???
val y: Y = ???
val z: Z = ???
}
//
// SIMPLE SEPARATE IMPORTS
//
class Usage_Simple_SeparateImports_2 {
import org.example2.declaration.{V, W}
import org.example2.declaration.Y
import org.example2.declaration.data.U
val u: U = ???
val v: V = ???
val w: W = ???
val y: Y = ???
}
class Usage_Simple_SeparateImports_3 {
import org.example2.declaration.data.U
import org.example2.declaration.{V, W}
import org.example2.declaration.{Y, Z}
val u: U = ???
val v: V = ???
val w: W = ???
val y: Y = ???
val z: Z = ???
}
//
// RENAMED
//
class Usage_Renamed_1 {
import org.example2.declaration.data.{U => U_Renamed1}
import org.example2.declaration.{V, W => W_Renamed1}
val u: U_Renamed1 = ???
val v: V = ???
val w: W_Renamed1 = ???
}
class Usage_Renamed_2 {
import org.example2.declaration.data.{U => U_Renamed2}
import org.example2.declaration.{Y, V, W => W_Renamed2}
val u: U_Renamed2 = ???
val v: V = ???
val w: W_Renamed2 = ???
val y: Y = ???
}
class Usage_Renamed_3 {
import org.example2.declaration.data.{U => U_Renamed3}
import org.example2.declaration.{Y, Z, V, W => W_Renamed3}
val u: U_Renamed3 = ???
val v: V = ???
val w: W_Renamed3 = ???
val y: Y = ???
val z: Z = ???
}
//
// RENAMED HIDDEN
//
class Usage_Renamed_Hidden_1 {
import org.example2.declaration.data.{U => _}
import org.example2.declaration.{V, W => _}
val v: V = ???
}
class Usage_Renamed_Hidden_2 {
import org.example2.declaration.data.{U => _}
import org.example2.declaration.{Y, V, W => _}
val v: V = ???
val y: Y = ???
}
class Usage_Renamed_Hidden_3 {
import org.example2.declaration.data.{U => _}
import org.example2.declaration.{Y, Z, V, W => _}
val v: V = ???
val y: Y = ???
val z: Z = ???
}
//
// RENAMED SEPARATE IMPORTS
//
class Usage_Renamed_SeparateImports_2 {
import org.example2.declaration.{V, W => W_Renamed2}
import org.example2.declaration.Y
import org.example2.declaration.data.{U => U_Renamed2}
val u: U_Renamed2 = ???
val v: V = ???
val w: W_Renamed2 = ???
val y: Y = ???
}
class Usage_Renamed_SeparateImports_3 {
import org.example2.declaration.data.{U => U_Renamed3}
import org.example2.declaration.{V, W => W_Renamed3}
import org.example2.declaration.{Y, Z}
val u: U_Renamed3 = ???
val v: V = ???
val w: W_Renamed3 = ???
val y: Y = ???
val z: Z = ???
}
//
// RENAMED HIDDEN SEPARATE IMPORTS
//
class Usage_Renamed_Hidden_SeparateImports_2 {
import org.example2.declaration.{V, W => _}
import org.example2.declaration.Y
import org.example2.declaration.data.{U => _}
val v: V = ???
val y: Y = ???
}
class Usage_Renamed_Hidden_SeparateImports_3 {
import org.example2.declaration.data.{U => _}
import org.example2.declaration.{V, W => _}
import org.example2.declaration.{Y, Z}
val v: V = ???
val y: Y = ???
val z: Z = ???
} | JetBrains/intellij-scala | scala/scala-impl/testdata/move/allInOne_LocalImports/after/org/example2/usage/Usage3_move_several.scala | Scala | apache-2.0 | 3,445 |
package org.scalaide.core.internal.project
import java.net.URLClassLoader
import java.util.Properties
import java.util.zip.ZipEntry
import java.util.zip.ZipFile
import scala.Left
import scala.Right
import scala.collection.mutable.Set
import scala.tools.nsc.settings.ScalaVersion
import scala.util.Failure
import scala.util.Success
import scala.util.Try
import org.eclipse.core.runtime.FileLocator
import org.eclipse.core.runtime.IPath
import org.eclipse.core.runtime.Path
import org.eclipse.core.runtime.Platform
import org.eclipse.jdt.core.IClasspathEntry
import org.eclipse.jdt.core.JavaCore
import org.osgi.framework.Bundle
import org.osgi.framework.Version
import org.scalaide.core.IScalaInstallation
import org.scalaide.core.IScalaInstallationChoice
import org.scalaide.core.IScalaModule
import org.scalaide.core.internal.ScalaPlugin
import org.scalaide.util.eclipse.EclipseUtils
import org.scalaide.util.eclipse.OSGiUtils
import org.scalaide.util.internal.CompilerUtils.isBinarySame
import org.scalaide.util.internal.CompilerUtils.shortString
import xsbti.compile.ScalaInstance
sealed trait ScalaInstallationLabel extends Serializable
case class BundledScalaInstallationLabel() extends ScalaInstallationLabel
case class MultiBundleScalaInstallationLabel() extends ScalaInstallationLabel
case class CustomScalaInstallationLabel(label: String) extends ScalaInstallationLabel
/**
* A type that marks the choice of a Labeled Scala Installation : either a Scala Version,
* which will dereference to the latest available bundle with the same binary version, or
* a scala installation hashcode, which will dereference to the Labeled installation which
* hashes to it, if available.
*
* @see ScalaInstallation.resolve
*/
case class ScalaInstallationChoice(marker: Either[ScalaVersion, Int]) extends Serializable with IScalaInstallationChoice {
override def toString() = marker match {
case Left(version) => shortString(version)
case Right(hash) => hash.toString
}
override def equals(o: Any) = PartialFunction.cond(o) {
case that: ScalaInstallationChoice => (marker, that.marker) match {
case (Right(h1), Right(h2)) => h1 == h2
case (Left(v1), Left(v2)) => isBinarySame(v1, v2)
case _ => false
}
}
}
object ScalaInstallationChoice {
def apply(si: LabeledScalaInstallation): ScalaInstallationChoice = ScalaInstallationChoice(Right(si.hashString.hashCode()))
def apply(sv: ScalaVersion): ScalaInstallationChoice = ScalaInstallationChoice(Left(sv))
}
/**
* This class represents a valid Scala installation. It encapsulates
* a Scala version and paths to the standard Scala jar files:
*
* - scala-library.jar
* - scala-compiler.jar
* - scala-reflect.jar
* - others (actors, swing, etc.)
*/
trait ScalaInstallation extends IScalaInstallation {
/** The version of Scala */
def version: ScalaVersion
def compiler: ScalaModule
def library: ScalaModule
def extraJars: Seq[ScalaModule]
/**
* All jars provided by Scala (including the compiler)
* @see The note in [[MultiBundleScalaInstallation]] below
*/
def allJars: Seq[ScalaModule] =
library +: compiler +: extraJars
override def toString() =
s"Scala $version: \\n\\t${allJars.mkString("\\n\\t")})"
def isValid(): Boolean = {
allJars forall (_.isValid())
}
}
/**
* A tag for serializable tagging of Scala Installations
*/
trait LabeledScalaInstallation extends ScalaInstallation {
def label: ScalaInstallationLabel
// to recover bundle-less Bundle values from de-serialized Scala Installations
// this should be relaxed for bundles : our bundles are safe, having one with just the same version should be enough
def similar(that: LabeledScalaInstallation): Boolean =
this.label == that.label && this.compiler == that.compiler && this.library == that.library && this.extraJars.toSet == that.extraJars.toSet
def getName(): Option[String] = PartialFunction.condOpt(label) { case CustomScalaInstallationLabel(tag) => tag }
def hashString: String = {
val jarSeq = allJars map (_.hashString)
getName().fold(jarSeq)(str => str +: jarSeq).mkString
}
override def hashCode() = hashString.hashCode()
override def equals(o: Any) = PartialFunction.cond(o) { case lsi: LabeledScalaInstallation => lsi.hashCode() == this.hashCode() }
}
case class ScalaModule(classJar: IPath, sourceJar: Option[IPath]) extends IScalaModule {
def isValid(): Boolean = {
sourceJar.fold(List(classJar))(List(_, classJar)) forall { path => path.toFile().isFile() }
}
def libraryEntries(): IClasspathEntry = {
JavaCore.newLibraryEntry(classJar, sourceJar.orNull, null)
}
private def relativizedString(path: IPath) = {
path.makeRelativeTo(ScalaPlugin().getStateLocation()).toPortableString()
}
def hashString: String = sourceJar.map { relativizedString }.fold(relativizedString(classJar))(s => relativizedString(classJar) + s)
}
object ScalaModule {
def apply(bundleId: String, classJar: IPath): ScalaModule = {
ScalaModule(classJar, EclipseUtils.computeSourcePath(bundleId, classJar))
}
}
/**
* Represent a version of Scala installed as a bundle containing the necessary jars.
*/
case class BundledScalaInstallation(
override val version: ScalaVersion,
bundle: Bundle,
override val library: ScalaModule,
override val compiler: ScalaModule) extends LabeledScalaInstallation {
import BundledScalaInstallation._
override val label = BundledScalaInstallationLabel()
def osgiVersion = bundle.getVersion()
override lazy val extraJars =
Seq(
findExtraJar(bundle, ScalaReflectPath, ScalaReflectSourcesPath),
findExtraJar(bundle, ScalaSwingPath, ScalaSwingSourcesPath)).flatten
private def findExtraJar(bundle: Bundle, classPath: String, sourcePath: String): Option[ScalaModule] = {
OSGiUtils.pathInBundle(bundle, classPath).map { p =>
ScalaModule(p, OSGiUtils.pathInBundle(bundle, sourcePath))
}
}
}
object BundledScalaInstallation {
val ScalaLibraryPath = "target/jars/scala-library.jar"
val ScalaLibrarySourcesPath = "target/jars/scala-library-src.jar"
val ScalaCompilerPath = "target/jars/scala-compiler.jar"
val ScalaCompilerSourcesPath = "target/jars/scala-compiler-src.jar"
val ScalaReflectPath = "target/jars/scala-reflect.jar"
val ScalaReflectSourcesPath = "target/jars/scala-reflect-src.jar"
val ScalaSwingPath = "target/jars/scala-swing.jar"
val ScalaSwingSourcesPath = "target/jars/scala-swing-src.jar"
def apply(bundle: Bundle): Option[BundledScalaInstallation] = {
for {
scalaLibrary <- OSGiUtils.pathInBundle(bundle, ScalaLibraryPath)
version <- ScalaInstallation.extractVersion(scalaLibrary)
scalaCompiler <- OSGiUtils.pathInBundle(bundle, ScalaCompilerPath)
} yield BundledScalaInstallation(
version,
bundle,
ScalaModule(scalaLibrary, OSGiUtils.pathInBundle(bundle, ScalaLibrarySourcesPath)),
ScalaModule(scalaCompiler, OSGiUtils.pathInBundle(bundle, ScalaCompilerSourcesPath)))
}
val ScalaBundleJarsRegex = "org\\\\.scala-ide\\\\.scala[0-9]{3}\\\\.jars".r
/**
* Find and return the complete bundled Scala installations.
*/
def detectBundledInstallations(): List[BundledScalaInstallation] = {
// find the bundles with the right pattern
val matchingBundles: List[Bundle] =
ScalaPlugin().getBundle().getBundleContext().getBundles().to[List]
.filter { b => ScalaBundleJarsRegex.unapplySeq(b.getSymbolicName()).isDefined }
matchingBundles.flatMap(BundledScalaInstallation(_))
}
}
/**
* Represent a version of Scala installed as a set of bundles, each bundle with an identical version.
*
* TODO: We SHOULD reuse the current class loader if this installation is the platform installation.
*
* @note We don't reuse it because of weird interactions between the OSGi classloader and the compiler-interface.jar,
* resulting in AbstractMethodErrors. The `Reporter` interface is defined in scala-reflect, but implemented in
* compiler-interface.jar (which is NOT a bundle), and `info0` is not seen.
*
* See ticket #1002175
*/
case class MultiBundleScalaInstallation(
override val version: ScalaVersion,
libraryBundleVersion: Version,
override val library: ScalaModule,
override val compiler: ScalaModule) extends LabeledScalaInstallation {
import MultiBundleScalaInstallation._
override val label = MultiBundleScalaInstallationLabel()
def osgiVersion = libraryBundleVersion
override lazy val extraJars = Seq(
findLibraryForBundle(ScalaReflectBundleId, libraryBundleVersion),
findLibraryForBundle(ScalaSwingBundleId, libraryBundleVersion)).flatten
}
object MultiBundleScalaInstallation {
val ScalaLibraryBundleId = "org.scala-lang.scala-library"
val ScalaCompilerBundleId = "org.scala-lang.scala-compiler"
val ScalaSwingBundleId = "org.scala-lang.scala-swing"
val ScalaReflectBundleId = "org.scala-lang.scala-reflect"
val ScalaXmlBundleId = "org.scala-lang.modules.scala-xml"
val ScalaParserCombinatorsBundleId = "org.scala-lang.modules.scala-parser-combinators"
private def bundlePath(bundle: Bundle) =
Path.fromOSString(FileLocator.getBundleFile(bundle).getAbsolutePath())
private def findBundle(bundleId: String, version: Version): Option[Bundle] = {
def doesBundleVersionQualifierEncloseVersionQualifier(bundleQualifier: String, qualifier: String) =
qualifier.intersect(bundleQualifier) == qualifier
Option(Platform.getBundles(bundleId, null)).getOrElse(Array()).to[List].find { bundle =>
val bundleVersion = bundle.getVersion
bundleVersion.getMajor == version.getMajor &&
bundleVersion.getMinor == version.getMinor &&
bundleVersion.getMicro == version.getMicro &&
doesBundleVersionQualifierEncloseVersionQualifier(bundleVersion.getQualifier, version.getQualifier)
}
}
private def findLibraryForBundle(bundleId: String, version: Version): Option[ScalaModule] = {
val classPath = findBundle(bundleId, version).map(bundlePath)
classPath.map(cp => ScalaModule(cp, EclipseUtils.computeSourcePath(bundleId, cp)))
}
def apply(libraryBundle: Bundle): Option[MultiBundleScalaInstallation] = {
val libraryBundleVersion = libraryBundle.getVersion()
for {
version <- ScalaInstallation.extractVersion(bundlePath(libraryBundle))
library = bundlePath(libraryBundle)
compiler <- findLibraryForBundle(ScalaCompilerBundleId, libraryBundleVersion)
} yield MultiBundleScalaInstallation(
version,
libraryBundleVersion,
ScalaModule(bundlePath(libraryBundle), EclipseUtils.computeSourcePath(ScalaLibraryBundleId, library)),
compiler)
}
def detectInstallations(): List[MultiBundleScalaInstallation] = {
val scalaLibraryBundles = Platform.getBundles(ScalaLibraryBundleId, null).to[List]
scalaLibraryBundles.flatMap(MultiBundleScalaInstallation(_))
}
}
object ScalaInstallation {
val installationsTracker = new ScalaInstallationSaver()
private def savedScalaInstallations() = Try(installationsTracker.getSavedInstallations())
lazy val initialScalaInstallations = savedScalaInstallations() match {
case Success(sis) => sis filter (_.isValid()) filter { deserial => !(bundledInstallations ++ multiBundleInstallations exists (_.similar(deserial))) }
// we need to silently fail, as this happens early in initialization
case Failure(throwable) => Nil
}
// This lets you see installs retrieved from serialized bundles as newly-defined custom installations
private def customize(install: LabeledScalaInstallation) = install.label match {
case CustomScalaInstallationLabel(tag) => install
case BundledScalaInstallationLabel() | MultiBundleScalaInstallationLabel() => new LabeledScalaInstallation() {
override def label = new CustomScalaInstallationLabel(s"Scala (legacy with hash ${ScalaInstallationChoice(install).toString()})")
override def compiler = install.compiler
override def library = install.library
override def extraJars = install.extraJars
override def version = install.version
}
}
def scalaInstanceForInstallation(si: IScalaInstallation): ScalaInstance = {
val store = ScalaPlugin().classLoaderStore
val scalaLoader: ClassLoader = store.getOrUpdate(si)(new URLClassLoader(si.allJars.map(_.classJar.toFile.toURI.toURL).toArray, ClassLoader.getSystemClassLoader))
new sbt.ScalaInstance(si.version.unparse, scalaLoader, si.library.classJar.toFile, si.compiler.classJar.toFile, si.extraJars.map(_.classJar.toFile).toList, None)
}
lazy val customInstallations: Set[LabeledScalaInstallation] = initialScalaInstallations.map(customize(_))(collection.breakOut)
/** Return the Scala installation currently running in Eclipse. */
lazy val platformInstallation: LabeledScalaInstallation =
multiBundleInstallations.find(_.version == ScalaVersion.current).get
lazy val bundledInstallations: List[LabeledScalaInstallation] =
BundledScalaInstallation.detectBundledInstallations()
lazy val multiBundleInstallations: List[LabeledScalaInstallation] =
MultiBundleScalaInstallation.detectInstallations()
def availableBundledInstallations: List[LabeledScalaInstallation] = {
multiBundleInstallations ++ bundledInstallations
}
def availableInstallations: List[LabeledScalaInstallation] = {
multiBundleInstallations ++ bundledInstallations ++ customInstallations
}
val LibraryPropertiesPath = "library.properties"
def labelInFile(scalaPath: IPath): Option[String] = {
val scalaJarRegex = """scala-(\\w+)(?:.2\\.\\d+(?:\\.\\d*)?(?:-.*)?)?.jar""".r
scalaPath.toFile().getName() match {
case scalaJarRegex(qualifier) => Some(qualifier + ".properties")
case _ => None
}
}
def extractVersion(scalaLibrary: IPath): Option[ScalaVersion] = {
val propertiesPath = labelInFile(scalaLibrary).getOrElse(LibraryPropertiesPath)
val zipFile = new ZipFile(scalaLibrary.toFile())
try {
def getVersion(propertiesEntry: ZipEntry) = {
val properties = new Properties()
properties.load(zipFile.getInputStream(propertiesEntry))
Option(properties.getProperty("version.number"))
}
for {
propertiesEntry <- Option(zipFile.getEntry(propertiesPath))
version <- getVersion(propertiesEntry)
} yield ScalaVersion(version)
} finally {
zipFile.close()
}
}
def resolve(choice: IScalaInstallationChoice): Option[LabeledScalaInstallation] = choice.marker match {
case Left(version) => availableBundledInstallations.filter { si => isBinarySame(version, si.version) }.sortBy(_.version).lastOption
case Right(hash) => availableInstallations.find(si => ScalaInstallationChoice(si).toString equals hash.toString())
}
}
| dragos/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/project/ScalaInstallation.scala | Scala | bsd-3-clause | 14,827 |
package me.yingrui.segment.math
trait Matrix {
def +(n: Double): Matrix
def +(m: Matrix): Matrix
def +=(m: Matrix): Unit
def -(n: Double): Matrix
def -(m: Matrix): Matrix
def -=(m: Matrix): Unit
def x(n: Double): Matrix
def x(m: Matrix): Matrix
def %(m: Matrix): Matrix
def *=(n: Double): Unit
def *(m: Matrix): Double
def /(n: Double): Matrix
def /(n: Matrix): Matrix
def T: Matrix
def flatten: Array[Double]
def row(i: Int): Matrix
def col(i: Int): Matrix
val row: Int
val col: Int
def isVector: Boolean
def isColumnVector: Boolean
def clear: Unit
def apply(i: Int, j: Int): Double
def update(i: Int, j: Int, value: Double)
def :=(other: Matrix): Unit
def sum: Double
def map(compute: (Double) => Double): Matrix
}
object Matrix {
private val defaultMatrixBuilder: MatrixBuilder = new NDMatrixBuilder()
// private val defaultMatrixBuilder: MatrixBuilder = new DenseMatrixBuilder()
private var matrixBuilder: MatrixBuilder = defaultMatrixBuilder
def setMatrixBuilder(builder: MatrixBuilder) {
matrixBuilder = builder
}
def restoreMatrixBuilder {
matrixBuilder = defaultMatrixBuilder
}
def vector(d: Double*): Matrix = matrixBuilder.vector(d)
def apply(row: Int, col: Int): Matrix = matrixBuilder.apply(row, col)
def apply(size: Int, identity: Boolean = false): Matrix = matrixBuilder.apply(size, identity)
def apply(data: Array[Double]): Matrix = matrixBuilder.apply(data)
def apply(data: Array[Array[Double]]): Matrix = matrixBuilder.apply(data)
def apply(data: Seq[Double]): Matrix = matrixBuilder.apply(data)
def apply(row: Int, col: Int, data: Array[Double]): Matrix = matrixBuilder.apply(row, col, data)
def apply(row: Int, col: Int, data: Array[Boolean]): Matrix = matrixBuilder.applyBoolean(row, col, data)
def randomize(row: Int, col: Int, min: Double, max: Double) = matrixBuilder.randomize(row, col, min, max)
def randomize(row: Int, col: Int): Matrix = matrixBuilder.randomize(row, col, 1e-4D, 0.1D)
implicit class RichMatrix(matrix: Matrix) {
def equalsTo(other: Matrix): Boolean = equals(matrix, other)
private def equals(m: Matrix, n: Matrix): Boolean = {
m.row == n.row && m.col == n.col && doubleArrayEquals(m.flatten, n.flatten)
}
private def doubleArrayEquals(data: Array[Double], other: Array[Double]): Boolean = {
if (data.length == other.length) {
val index = (0 until data.length).find(i => data(i) - other(i) > 0.000000001D || data(i) - other(i) < -0.000000001D)
index match {
case None => true
case _ => false
}
} else {
false
}
}
}
}
| yingrui/mahjong | lib-segment/src/main/scala/me/yingrui/segment/math/Matrix.scala | Scala | gpl-3.0 | 2,699 |
package org.scalaide.core.scalaelements
import org.scalaide.core.internal.jdt.model.ScalaSourceTypeElement
import org.scalaide.core.testsetup.SDTTestUtils
import org.scalaide.core.testsetup.TestProjectSetup
import org.scalaide.logging.HasLogger
import org.junit.AfterClass
import org.junit.BeforeClass
import org.junit.Test
object ScalaElementsNameTest extends TestProjectSetup("scala-elements") {
@BeforeClass
def setup(): Unit = {
SDTTestUtils.enableAutoBuild(false)
}
@AfterClass
def tearDown(): Unit = {
SDTTestUtils.deleteProjects(project)
}
val mainObject = "test.a.Main$"
val extendedTrait = "test.b.c.C"
val clazz = "test.b.B"
val nestedClass = "test.b.B$BB"
val nestedObject = "test.b.B$OB$"
}
class ScalaElementsNameTest extends HasLogger {
import ScalaElementsNameTest._
@Test
def shouldCollectAllJavaTypesWithPkgNotRespodingToFoldersStructure(): Unit = {
cleanProject()
val cu = scalaCompilationUnit("test/ScalaElementExamples.scala")
waitUntilTypechecked(cu)
val allTypes = cu.getAllTypes
val actualTypes = allTypes.collect {
case e: ScalaSourceTypeElement => e
}.map { _.getFullyQualifiedName }.toSet
val expectedTypes = Set(mainObject, extendedTrait, clazz, nestedClass, nestedObject)
assert((expectedTypes & actualTypes) == expectedTypes, s"Expected all in expected types, got difference: ${(expectedTypes & actualTypes).mkString(",")}")
}
}
| scala-ide/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/core/scalaelements/ScalaElementsNameTest.scala | Scala | bsd-3-clause | 1,442 |
package ilc
package feature
package fixpoint
trait Derivation extends functions.Derivation with Syntax with functions.Syntax {
override def derive(t: Term): Term = t match {
case App(Fix(typ), body) =>
//Since the metalanguage doesn't do type inference for us,
//let's do it by hand.
//Situation:
//body: T => T
//t = fix(body)
//fix(body): T
//derive(t): DT
//derive(body) t: DT => DT
//So fix(derive(body) t): DT, and DT must be the type parameter to fix.
App(Fix.tapply(deltaType(typ)), App(derive(body), t))
case _ => super.derive(t)
}
}
| inc-lc/ilc-scala | src/main/scala/ilc/feature/fixpoint/Derivation.scala | Scala | mit | 616 |
/*
* FactorTest.scala
* Factor tests.
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Jan 1, 2009
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.algorithm.factored
import org.scalatest.Matchers
import org.scalatest.PrivateMethodTester
import org.scalatest.WordSpec
import com.cra.figaro.algorithm.Values
import com.cra.figaro.algorithm.factored.factors._
import com.cra.figaro.algorithm.lazyfactored.LazyValues
import com.cra.figaro.algorithm.lazyfactored.Regular
import com.cra.figaro.algorithm.lazyfactored.ValueSet
import com.cra.figaro.algorithm.sampling.ProbEvidenceSampler
import com.cra.figaro.language.Apply
import com.cra.figaro.language.Apply3
import com.cra.figaro.language.Apply4
import com.cra.figaro.language.Apply5
import com.cra.figaro.language.CachingChain
import com.cra.figaro.language.Chain
import com.cra.figaro.language.Condition
import com.cra.figaro.language.Constant
import com.cra.figaro.language.Dist
import com.cra.figaro.language.Flip
import com.cra.figaro.language.Inject
import com.cra.figaro.language.Name.stringToName
import com.cra.figaro.language.NamedEvidence
import com.cra.figaro.language.Reference.stringToReference
import com.cra.figaro.language.Select
import com.cra.figaro.language.Universe
import com.cra.figaro.library.atomic.continuous.Normal
import com.cra.figaro.library.atomic.continuous.Uniform
import com.cra.figaro.library.compound.CPD
import com.cra.figaro.algorithm.factored.ParticleGenerator
import com.cra.figaro.library.compound.If
import com.cra.figaro.algorithm.factored.VariableElimination
class FactorTest extends WordSpec with Matchers with PrivateMethodTester {
"A variable for an element" should {
"have range equal to the element's values" in {
Universe.createNew()
val e1 = Select(0.2 -> 1, 0.3 -> 2, 0.5 -> 3)
val xs = Values()(e1)
val v1 = Variable(e1)
for { x <- v1.range } { xs should contain(x.value) }
//for { x <- xs } { v1.range should contain(x) }
}
"always be equal to another variable for the same element" in {
Universe.createNew()
val e1 = Flip(0.2)
Values()(e1)
val v1 = Variable(e1)
val v2 = Variable(e1)
v1 should equal(v2)
}
"always contain the same id even if the Variable cache is cleared" in {
Universe.createNew()
val e1 = Flip(0.2)
Values()(e1)
val v1 = Variable(e1).id
Variable.clearCache
LazyValues.clear(Universe.universe)
Values()(e1)
val v2 = Variable(e1).id
v1 should equal(v2)
}
"always be equal to a variable with the same id" in {
Universe.createNew()
val e1 = Flip(0.2)
Values()(e1)
val v1 = Variable(e1)
val v2 = new Variable(ValueSet.withStar(Set[Boolean]())) { override val id = v1.id }
v1 == v2 should equal(true)
}
"be different to a variable for a different element with the same definition" in {
Universe.createNew()
val e1 = Flip(0.2)
val e2 = Flip(0.2)
Values()(e1)
Values()(e2)
val v1 = Variable(e1)
val v2 = Variable(e2)
v1 should not equal (v2)
}
}
"A factor" when {
"get the same value for a given set of variable indices as was last set" in {
Universe.createNew()
val e1 = Flip(0.1)
val e2 = Constant(8)
val e3 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e4 = Flip(0.7)
Values()(e1)
Values()(e2)
Values()(e3)
Values()(e4)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val v4 = Variable(e4)
val f = Factory.simpleMake[Double](List(v1, v2, v3, v4))
val indices = List(1, 0, 2, 1)
f.set(indices, 0.2)
f.set(indices, 0.3)
f.get(indices) should equal(0.3)
}
"get updated set of factors for an element when the factors have been updated" in {
Universe.createNew()
val v1 = Flip(0.5)
Values()(v1)
val f1 = Factory.make(v1)(0)
val f1mod = f1.mapTo((d: Double) => 2.0 * d)
Factory.updateFactor(v1, List(f1mod))
Factory.make(v1)(0).get(List(0)) should equal(f1mod.get(List(0)))
}
// "have the first index List be all zeros" in {
// Universe.createNew()
// val e1 = Flip(0.1)
// val e2 = Constant(8)
// val e3 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
// val e4 = Flip(0.7)
// Values()(e1)
// Values()(e2)
// Values()(e3)
// Values()(e4)
// val v1 = Variable(e1)
// val v2 = Variable(e2)
// val v3 = Variable(e3)
// val v4 = Variable(e4)
// val f = Factory.simpleMake[Double](List(v1, v2, v3, v4))
// f.firstIndices should equal(List(0, 0, 0, 0))
// }
"have the next index List carry and add correctly" in {
Universe.createNew()
val e1 = Flip(0.1)
val e2 = Constant(8)
val e3 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e4 = Flip(0.7)
Values()(e1)
Values()(e2)
Values()(e3)
Values()(e4)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val v4 = Variable(e4)
val f = Factory.simpleMake[Double](List(v1, v2, v3, v4))
val ia = List(1, 0, 1, 1)
val indices = f.getIndices
val ar = indices.nextIndices(ia).get
ar should equal(List(1, 0, 2, 0))
}
"produce None when the index Lists are exhausted" in {
Universe.createNew()
val e1 = Flip(0.1)
val e2 = Constant(8)
val e3 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e4 = Flip(0.7)
Values()(e1)
Values()(e2)
Values()(e3)
Values()(e4)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val v4 = Variable(e4)
val f = Factory.simpleMake[Double](List(v1, v2, v3, v4))
val ia = List(1, 0, 2, 1)
val indices = f.getIndices
indices.nextIndices(ia) should equal(None)
}
"compute the union of variables in two factors and the correct index maps when calling unionVars" in {
Universe.createNew()
val e1 = Flip(0.1)
val e2 = Constant(8)
val e3 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e4 = Flip(0.7)
val e5 = Constant('a)
val e6 = Select(0.1 -> 1.5, 0.9 -> 2.5)
Values()(e1)
Values()(e2)
Values()(e3)
Values()(e4)
Values()(e5)
Values()(e6)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val v4 = Variable(e4)
val v5 = Variable(e5)
val v6 = Variable(e6)
val f = Factory.simpleMake[Double](List(v1, v2, v3, v4))
val g = Factory.simpleMake[Double](List(v5, v3, v2, v6))
val unionVars = PrivateMethod[(List[Variable[_]], List[Variable[_]], List[Int], List[Int])]('unionVars)
val (parents, output, indexMap1, indexMap2) = f invokePrivate unionVars(g)
val union = parents ::: output
union should equal(List(v1, v2, v3, v4, v5, v6))
indexMap1 should equal(List(0, 1, 2, 3))
indexMap2 should equal(List(4, 2, 1, 5))
}
"multiplying with another factor" should {
"return the product of the two factors" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Constant(8)
val e3 = Flip(0.1)
val e4 = Flip(0.6)
Values()(e1)
Values()(e2)
Values()(e3)
Values()(e4)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val v4 = Variable(e4)
val f = Factory.simpleMake[Double](List(v1, v2, v3))
val g = Factory.simpleMake[Double](List(v4, v3))
f.set(List(0, 0, 0), 0.0)
f.set(List(1, 0, 0), 0.1)
f.set(List(2, 0, 0), 0.2)
f.set(List(0, 0, 1), 0.3)
f.set(List(1, 0, 1), 0.4)
f.set(List(2, 0, 1), 0.5)
g.set(List(0, 0), 0.6)
g.set(List(1, 0), 0.7)
g.set(List(0, 1), 0.8)
g.set(List(1, 1), 0.9)
val h = f.product(g)
h.variables should equal(List(v1, v2, v3, v4))
h.get(List(0, 0, 0, 0)) should be(0.0 +- 0.0001)
h.get(List(1, 0, 0, 0)) should be(0.06 +- 0.0001)
h.get(List(2, 0, 0, 0)) should be(0.12 +- 0.0001)
h.get(List(0, 0, 1, 0)) should be(0.24 +- 0.0001)
h.get(List(1, 0, 1, 0)) should be(0.32 +- 0.0001)
h.get(List(2, 0, 1, 0)) should be(0.4 +- 0.0001)
h.get(List(0, 0, 0, 1)) should be(0.0 +- 0.0001)
h.get(List(1, 0, 0, 1)) should be(0.07 +- 0.0001)
h.get(List(2, 0, 0, 1)) should be(0.14 +- 0.0001)
h.get(List(0, 0, 1, 1)) should be(0.27 +- 0.0001)
h.get(List(1, 0, 1, 1)) should be(0.36 +- 0.0001)
h.get(List(2, 0, 1, 1)) should be(0.45 +- 0.0001)
}
}
"calling sumOver on a variable" should {
"return the sum over the variable of the factor" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Constant(8)
val e3 = Flip(0.1)
Values()(e1)
Values()(e2)
Values()(e3)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val f = Factory.simpleMake[Double](List(v1, v2, v3))
f.set(List(0, 0, 0), 0.0)
f.set(List(1, 0, 0), 0.1)
f.set(List(2, 0, 0), 0.2)
f.set(List(0, 0, 1), 0.3)
f.set(List(1, 0, 1), 0.4)
f.set(List(2, 0, 1), 0.5)
val g = f.sumOver(v3)
g.variables should equal(List(v1, v2))
g.get(List(0, 0)) should be(0.3 +- 0.0001)
g.get(List(1, 0)) should be(0.5 +- 0.0001)
g.get(List(2, 0)) should be(0.7 +- 0.0001)
}
"return itself if the variable not in the factor" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Constant(8)
val e3 = Flip(0.1)
Values()(e1)
Values()(e2)
Values()(e3)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val f = Factory.simpleMake[Double](List(v1, v2))
f.set(List(0, 0), 0.0)
f.set(List(1, 0), 0.2)
f.set(List(2, 0), 0.4)
val g = f.sumOver(v3)
g.variables should equal(f.variables)
for { indices <- f.getIndices } {
g.get(indices) should equal(f.get(indices))
}
}
"return a factor with all columns of the variable removed, ignoring rows in which " +
"the variable has different values in different columns" in {
Universe.createNew()
val e1 = Flip(0.9)
val e2 = Select(0.2 -> 1, 0.8 -> 2)
Values()(e1)
Values()(e2)
val v1 = Variable(e1)
val v2 = Variable(e2)
val f = Factory.simpleMake[Double](List(v1, v2, v1))
f.set(List(0, 0, 0), 0.1)
f.set(List(1, 0, 0), 0.2)
f.set(List(0, 1, 0), 0.3)
f.set(List(1, 1, 0), 0.4)
f.set(List(0, 0, 1), 0.5)
f.set(List(1, 0, 1), 0.6)
f.set(List(0, 1, 1), 0.7)
f.set(List(1, 1, 1), 0.8)
val g = f.sumOver(v1)
g.variables should equal(List(v2))
g.get(List(0)) should equal(0.1 + 0.6)
g.get(List(1)) should equal(0.3 + 0.8)
}
}
"calling record on a variable" should {
"return the argmax over the values associated with the variable" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Constant(8)
val e3 = Flip(0.1)
Values()(e1)
Values()(e2)
Values()(e3)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val f = Factory.simpleMake[Double](List(v1, v2, v3))
f.set(List(0, 0, 0), 0.6)
f.set(List(1, 0, 0), 0.1)
f.set(List(2, 0, 0), 0.2)
f.set(List(0, 0, 1), 0.3)
f.set(List(1, 0, 1), 0.4)
f.set(List(2, 0, 1), 0.5)
val g = f.recordArgMax(v3.asInstanceOf[Variable[Any]],
(x: Double, y: Double) => x < y)
g.variables should equal(List(v1, v2))
g.get(List(0, 0)) should equal(true)
g.get(List(1, 0)) should equal(false)
g.get(List(2, 0)) should equal(false)
}
}
"after marginalizing to a variable" should {
"return the marginal distribution over the variable" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Constant(8)
val e3 = Flip(0.1)
Values()(e1)
Values()(e2)
Values()(e3)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val f = Factory.simpleMake[Double](List(v1, v2, v3))
f.set(List(0, 0, 0), 0.0)
f.set(List(1, 0, 0), 0.1)
f.set(List(2, 0, 0), 0.2)
f.set(List(0, 0, 1), 0.3)
f.set(List(1, 0, 1), 0.4)
f.set(List(2, 0, 1), 0.5)
val g = f.marginalizeTo(SumProductSemiring().asInstanceOf[Semiring[Double]], v3)
g.variables should equal(List(v3))
val p1 = 0.0 + 0.1 + 0.2
val p2 = 0.3 + 0.4 + 0.5
g.get(List(0)) should be(p1 +- 0.000001)
g.get(List(1)) should be(p2 +- 0.000001)
}
}
}
"after marginalizing to two variables" should {
"return the marginal distribution over the variables" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Flip(0.5)
val e3 = Flip(0.1)
Values()(e1)
Values()(e2)
Values()(e3)
val v1 = Variable(e1)
val v2 = Variable(e2)
val v3 = Variable(e3)
val f = Factory.simpleMake[Double](List(v1, v2, v3))
f.set(List(0, 0, 0), 0.0)
f.set(List(1, 0, 0), 0.05)
f.set(List(2, 0, 0), 0.1)
f.set(List(0, 0, 1), 0.15)
f.set(List(1, 0, 1), 0.2)
f.set(List(2, 0, 1), 0.25)
f.set(List(0, 1, 0), 0.0)
f.set(List(1, 1, 0), 0.05)
f.set(List(2, 1, 0), 0.1)
f.set(List(0, 1, 1), 0.15)
f.set(List(1, 1, 1), 0.2)
f.set(List(2, 1, 1), 0.25)
val g = f.marginalizeTo(SumProductSemiring().asInstanceOf[Semiring[Double]], v1, v3)
g.variables should equal(List(v1, v3))
g.get(List(0, 0)) should be(0.0 +- 0.000001)
g.get(List(1, 0)) should be(0.1 +- 0.000001)
g.get(List(2, 0)) should be(0.2 +- 0.000001)
g.get(List(0, 1)) should be(0.3 +- 0.000001)
g.get(List(1, 1)) should be(0.4 +- 0.000001)
g.get(List(2, 1)) should be(0.5 +- 0.000001)
}
}
"after deduplicating a factor" should {
"have no repeated variables" in {
Universe.createNew()
val e1 = Select(0.2 -> "a", 0.3 -> "b", 0.5 -> "c")
val e2 = Flip(0.3)
Values()(e1)
Values()(e2)
val v1 = Variable(e1)
val v2 = Variable(e2)
val f = Factory.simpleMake[Double](List(v1, v2, v2))
f.set(List(0, 0, 0), 0.06)
f.set(List(0, 0, 1), 0.25)
f.set(List(0, 1, 0), 0.44)
f.set(List(0, 1, 1), 0.25)
f.set(List(1, 0, 0), 0.15)
f.set(List(1, 0, 1), 0.2)
f.set(List(1, 1, 0), 0.15)
f.set(List(1, 1, 1), 0.5)
f.set(List(2, 0, 0), 0.1)
f.set(List(2, 0, 1), 0.25)
f.set(List(2, 1, 0), 0.4)
f.set(List(2, 1, 1), 0.25)
val g = f.deDuplicate()
g.variables.size should be(2)
g.variables.contains(v1) should be(true)
g.variables.contains(v2) should be(true)
if (g.variables.indexOf(v1) == 0) {
g.get(List(0, 0)) should be(0.06 +- 0.000001)
g.get(List(0, 1)) should be(0.25 +- 0.000001)
g.get(List(1, 0)) should be(0.15 +- 0.000001)
g.get(List(1, 1)) should be(0.5 +- 0.000001)
g.get(List(2, 0)) should be(0.1 +- 0.000001)
g.get(List(2, 1)) should be(0.25 +- 0.000001)
} else {
g.get(List(0, 0)) should be(0.06 +- 0.000001)
g.get(List(1, 0)) should be(0.25 +- 0.000001)
g.get(List(0, 1)) should be(0.15 +- 0.000001)
g.get(List(1, 1)) should be(0.5 +- 0.000001)
g.get(List(0, 2)) should be(0.1 +- 0.000001)
g.get(List(1, 2)) should be(0.25 +- 0.000001)
}
}
}
"Making factors from an element" when {
"given a constant" should {
"produce a single factor with one entry whose value is 1.0" in {
Universe.createNew()
val v1 = Constant(7)
Values()(v1)
val List(factor) = Factory.make(v1)
factor.get(List(0)) should equal(1.0)
}
}
"given a simple flip" should {
"produce a single factor in which the first entry is the probability of true " +
"and the second entry is the probability of false" in {
Universe.createNew()
val v1 = Flip(0.3)
Values()(v1)
val List(factor) = Factory.make(v1)
factor.get(List(0)) should equal(0.3)
factor.get(List(1)) should equal(0.7)
}
}
"given a complex flip" should {
"produce a single factor in which each possible value of the parent is associated with two " +
"entries, one each for true and false, with the appropriate probabilities" in {
Universe.createNew()
val v1 = Select(0.2 -> 0.1, 0.8 -> 0.3)
val v2 = Flip(v1)
Values()(v2)
val List(factor) = Factory.make(v2)
val vals = Variable(v1).range
val i1 = vals.indexOf(Regular(0.1))
val i2 = vals.toList.indexOf(Regular(0.3))
factor.get(List(i1, 0)) should equal(0.1)
factor.get(List(i1, 1)) should equal(0.9)
factor.get(List(i2, 0)) should equal(0.3)
factor.get(List(i2, 1)) should equal(0.7)
}
}
"given a simple select" should {
"produce a single factor in which each possible value is associated with the correct probability" in {
Universe.createNew()
val v1 = Select(0.2 -> 1, 0.3 -> 0, 0.1 -> 2, 0.05 -> 5, 0.35 -> 4)
Values()(v1)
val List(factor) = Factory.make(v1)
val vals = Variable(v1).range
val i1 = vals.indexOf(Regular(1))
val i0 = vals.indexOf(Regular(0))
val i2 = vals.indexOf(Regular(2))
val i5 = vals.indexOf(Regular(5))
val i4 = vals.indexOf(Regular(4))
factor.get(List(i1)) should equal(0.2)
factor.get(List(i0)) should equal(0.3)
factor.get(List(i2)) should equal(0.1)
}
}
"given a complex select" should {
"produce a single factor in which values of the parents are associated with " +
"values of the select according to the normalized parent values" in {
Universe.createNew()
val v1 = Select(0.2 -> 0.2, 0.8 -> 0.8)
val v2 = Select(0.4 -> 0.4, 0.6 -> 0.6)
val c1 = Constant(0.1)
val c2 = Constant(0.3)
val c3 = Constant(0.5)
val v3 = Select(v1 -> 1, v2 -> 2, c1 -> 4, c2 -> 5, c3 -> 3)
Values()(v3)
val List(factor) = Factory.make(v3)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v102 = v1Vals.indexOf(Regular(0.2))
val v108 = v1Vals.indexOf(Regular(0.8))
val v204 = v2Vals.indexOf(Regular(0.4))
val v206 = v2Vals.indexOf(Regular(0.6))
val v31 = v3Vals.indexOf(Regular(1))
val v32 = v3Vals.indexOf(Regular(2))
val v34 = v3Vals.indexOf(Regular(4))
val v35 = v3Vals.indexOf(Regular(5))
val v33 = v3Vals.indexOf(Regular(3))
def makeIndices(a: List[Int]): List[Int] = {
val result: Array[Int] = Array.ofDim(a.size)
result(v31) = a(1)
result(v32) = a(2)
result(v33) = a(3)
result(v34) = a(4)
result(v35) = a(5)
result(5) = a(0)
result.toList
}
factor.get(makeIndices(List(v31, v102, v204, 0, 0, 0))) should be(0.2 / 1.5 +- 0.01)
factor.get(makeIndices(List(v32, v102, v204, 0, 0, 0))) should be(0.4 / 1.5 +- 0.01)
factor.get(makeIndices(List(v31, v108, v204, 0, 0, 0))) should be(0.8 / 2.1 +- 0.01)
factor.get(makeIndices(List(v32, v108, v204, 0, 0, 0))) should be(0.4 / 2.1 +- 0.01)
factor.get(makeIndices(List(v31, v102, v206, 0, 0, 0))) should be(0.2 / 1.7 +- 0.01)
factor.get(makeIndices(List(v32, v102, v206, 0, 0, 0))) should be(0.6 / 1.7 +- 0.01)
factor.get(makeIndices(List(v31, v108, v206, 0, 0, 0))) should be(0.8 / 2.3 +- 0.01)
factor.get(makeIndices(List(v32, v108, v206, 0, 0, 0))) should be(0.6 / 2.3 +- 0.01)
}
}
"given a simple dist" should {
"produce a list of factors, one for each outcome and one representing the choice over outcomes; " +
"the factor for an outcome matches the outcome value to the dist value" in {
Universe.createNew()
val v1 = Flip(0.2)
val v2 = Constant(false)
val v3 = Dist(0.3 -> v1, 0.7 -> v2)
Values()(v3)
val v1Vals = Variable(v1).range
val v3Vals = Variable(v3).range
val v1TrueIndex = v1Vals.indexOf(Regular(true))
val v1FalseIndex = v1Vals.indexOf(Regular(false))
val v3TrueIndex = v3Vals.indexOf(Regular(true))
val v3FalseIndex = v3Vals.indexOf(Regular(false))
val v1Index = v3.outcomes.indexOf(v1)
val v2Index = v3.outcomes.indexOf(v2)
val selectFactor :: outcomeFactors = Factory.make(v3)
outcomeFactors.size should equal(2)
val v1Factor = outcomeFactors(v1Index)
val v2Factor = outcomeFactors(v2Index)
selectFactor.get(List(v1Index)) should equal(0.3)
selectFactor.get(List(v2Index)) should equal(0.7)
v1Factor.get(List(v1Index, v1TrueIndex, v3TrueIndex)) should equal(1.0)
v1Factor.get(List(v1Index, v1FalseIndex, v3TrueIndex)) should equal(0.0)
v1Factor.get(List(v1Index, v1TrueIndex, v3FalseIndex)) should equal(0.0)
v1Factor.get(List(v1Index, v1FalseIndex, v3FalseIndex)) should equal(1.0)
for { i <- 0 to 1; j <- 0 to 1 } v1Factor.get(List(v2Index, i, j)) should equal(1.0)
v2Factor.get(List(v2Index, 0, v3FalseIndex)) should equal(1.0)
v2Factor.get(List(v2Index, 0, v3TrueIndex)) should equal(0.0)
for { i <- 0 to 1 } v2Factor.get(List(v1Index, 0, i)) should equal(1.0)
}
}
"given a complex dist" should {
"produce a list of factors, one for each outcome and one representing the choice over outcomes; " +
"the factor for an outcome matches the outcome value to the dist value" in {
Universe.createNew()
val v1 = Select(0.2 -> 0.2, 0.8 -> 0.8)
val v2 = Select(0.4 -> 0.4, 0.6 -> 0.6)
val v3 = Flip(0.2)
val v4 = Constant(false)
val v5 = Dist(v1 -> v3, v2 -> v4)
Values()(v5)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v4Vals = Variable(v4).range
val v5Vals = Variable(v5).range
val v3Index = v5.outcomes.indexOf(v3)
val v4Index = v5.outcomes.indexOf(v4)
val v102 = v1Vals.indexOf(Regular(0.2))
val v108 = v1Vals.indexOf(Regular(0.8))
val v204 = v2Vals.indexOf(Regular(0.4))
val v206 = v2Vals.indexOf(Regular(0.6))
val v3f = v3Vals.indexOf(Regular(false))
val v3t = v3Vals.indexOf(Regular(true))
val v5f = v5Vals.indexOf(Regular(false))
val v5t = v5Vals.indexOf(Regular(true))
val selectFactor :: outcomeFactors = Factory.make(v5)
outcomeFactors.size should equal(2)
val v1Factor = outcomeFactors(v3Index)
val v2Factor = outcomeFactors(v4Index)
selectFactor.get(List(v102, v204, 0)) should be(0.2 / 0.6 +- 0.0001)
selectFactor.get(List(v102, v204, 1)) should be(0.4 / 0.6 +- 0.0001)
selectFactor.get(List(v102, v206, 0)) should be(0.2 / 0.8 +- 0.0001)
selectFactor.get(List(v102, v206, 1)) should be(0.6 / 0.8 +- 0.0001)
selectFactor.get(List(v108, v204, 0)) should be(0.8 / 1.2 +- 0.0001)
selectFactor.get(List(v108, v204, 1)) should be(0.4 / 1.2 +- 0.0001)
selectFactor.get(List(v108, v206, 0)) should be(0.8 / 1.4 +- 0.0001)
selectFactor.get(List(v108, v206, 1)) should be(0.6 / 1.4 +- 0.0001)
v1Factor.get(List(0, v3t, v5t)) should equal(1.0)
v1Factor.get(List(0, v3f, v5t)) should equal(0.0)
v1Factor.get(List(0, v3t, v5f)) should equal(0.0)
v1Factor.get(List(0, v3f, v5f)) should equal(1.0)
for { i <- 0 to 1; j <- 0 to 1 } v1Factor.get(List(1, i, j)) should equal(1.0)
v2Factor.get(List(1, 0, v5f)) should equal(1.0)
v2Factor.get(List(1, 0, v5t)) should equal(0.0)
for { i <- 0 to 0; j <- 0 to 1 } v2Factor.get(List(0, i, j)) should equal(1.0)
}
}
"given an atomic not in the factor" should {
"automatically sample the element" in {
Universe.createNew()
val v1 = Normal(0.0, 1.0)
Values()(v1)
val factor = Factory.make(v1)
factor(0).size should equal(ParticleGenerator.defaultArgSamples)
factor(0).get(List(0)) should equal(1.0 / ParticleGenerator.defaultArgSamples)
}
"correctly create factors for continuous elements through chains" in {
val uni = Universe.createNew()
val elem = If(Flip(0.3), Uniform(0.0, 1.0), Uniform(1.0, 2.0))
ParticleGenerator(uni)
val alg = VariableElimination(elem)
alg.start()
alg.distribution(elem).toList.size should be (14)
}
}
// "given a chain" should {
// "produce a conditional selector for each parent value" in {
// Universe.createNew()
// val v1 = Flip(0.2)
// val v2 = Select(0.1 -> 1, 0.9 -> 2)
// val v3 = Constant(3)
// val v4 = Chain(v1, (b: Boolean) => if (b) v2; else v3)
// Values()(v4)
// val v1Vals = Variable(v1).range
// val v2Vals = Variable(v2).range
// val v4Vals = Variable(v4).range
// val v1t = v1Vals indexOf Regular(true)
// val v1f = v1Vals indexOf Regular(false)
// val v21 = v2Vals indexOf Regular(1)
// val v22 = v2Vals indexOf Regular(2)
// val v41 = v4Vals indexOf Regular(1)
// val v42 = v4Vals indexOf Regular(2)
// val v43 = v4Vals indexOf Regular(3)
//
// val factor = Factory.make(v4)
// val List(v4Factor) = Factory.combineFactors(factor, SumProductSemiring, true)
//
// v4Factor.get(List(v1t, v21, 0, v41)) should equal(1.0)
// v4Factor.get(List(v1t, v22, 0, v41)) should equal(0.0)
// v4Factor.get(List(v1t, v21, 0, v42)) should equal(0.0)
// v4Factor.get(List(v1t, v22, 0, v42)) should equal(1.0)
// v4Factor.get(List(v1t, v21, 0, v43)) should equal(0.0)
// v4Factor.get(List(v1t, v22, 0, v43)) should equal(0.0)
// v4Factor.get(List(v1f, v21, 0, v41)) should equal(0.0)
// v4Factor.get(List(v1f, v22, 0, v41)) should equal(0.0)
// v4Factor.get(List(v1f, v21, 0, v42)) should equal(0.0)
// v4Factor.get(List(v1f, v22, 0, v42)) should equal(0.0)
// v4Factor.get(List(v1f, v21, 0, v43)) should equal(1.0)
// v4Factor.get(List(v1f, v22, 0, v43)) should equal(1.0)
//
// }
//
// "produce a conditional selector for each non-temporary parent value" in {
// Universe.createNew()
// val v1 = Flip(0.2)
// val v4 = Chain(v1, (b: Boolean) => if (b) Select(0.1 -> 1, 0.9 -> 2); else Constant(3))
// Values()(v4)
// val v1Vals = Variable(v1).range
// val v4Vals = Variable(v4).range
//
// val v1t = v1Vals indexOf Regular(true)
// val v1f = v1Vals indexOf Regular(false)
// val v41 = v4Vals indexOf Regular(1)
// val v42 = v4Vals indexOf Regular(2)
// val v43 = v4Vals indexOf Regular(3)
//
// val factor = Factory.make(v4)
// val List(v4Factor) = Factory.combineFactors(factor, SumProductSemiring, true)
//
// v4Factor.get(List(v1t, v41)) should equal(0.1)
// v4Factor.get(List(v1t, v42)) should equal(0.9)
// v4Factor.get(List(v1t, v43)) should equal(0.0)
// v4Factor.get(List(v1f, v41)) should equal(0.0)
// v4Factor.get(List(v1f, v42)) should equal(0.0)
// v4Factor.get(List(v1f, v43)) should equal(1.0)
// }
// }
// "given a CPD with one argument" should {
// "produce a single factor with a case for each parent value" in {
// Universe.createNew()
// val v1 = Flip(0.2)
//
// val v2 = CPD(v1, false -> Flip(0.1), true -> Flip(0.7))
// Values()(v2)
//
// val v1Vals = Variable(v1).range
// val v2Vals = Variable(v2).range
//
// val v1t = v1Vals indexOf Regular(true)
// val v1f = v1Vals indexOf Regular(false)
// val v2t = v2Vals indexOf Regular(true)
// val v2f = v2Vals indexOf Regular(false)
// val v3t = 0
// val v3f = 1
// val v4t = 0
// val v4f = 1
//
// val factor = Factory.make(v2)
// val List(v2Factor) = Factory.combineFactors(factor, SumProductSemiring, true)
//
// v2Factor.get(List(v1t, v3t, v4t, v2t)) should equal(1.0)
// v2Factor.get(List(v1t, v3t, v4f, v2t)) should equal(1.0)
// v2Factor.get(List(v1t, v3f, v4t, v2t)) should equal(0.0)
// v2Factor.get(List(v1t, v3f, v4f, v2t)) should equal(0.0)
// v2Factor.get(List(v1t, v3t, v4t, v2f)) should equal(0.0)
// v2Factor.get(List(v1t, v3t, v4f, v2f)) should equal(0.0)
// v2Factor.get(List(v1t, v3f, v4t, v2f)) should equal(1.0)
// v2Factor.get(List(v1t, v3f, v4f, v2f)) should equal(1.0)
// v2Factor.get(List(v1f, v3t, v4t, v2t)) should equal(1.0)
// v2Factor.get(List(v1f, v3t, v4f, v2t)) should equal(0.0)
// v2Factor.get(List(v1f, v3f, v4t, v2t)) should equal(1.0)
// v2Factor.get(List(v1f, v3f, v4f, v2t)) should equal(0.0)
// v2Factor.get(List(v1f, v3t, v4t, v2f)) should equal(0.0)
// v2Factor.get(List(v1f, v3t, v4f, v2f)) should equal(1.0)
// v2Factor.get(List(v1f, v3f, v4t, v2f)) should equal(0.0)
// v2Factor.get(List(v1f, v3f, v4f, v2f)) should equal(1.0)
// }
// }
"given an apply of one argument" should {
"produce a factor that matches the argument to the result via the function" in {
Universe.createNew()
val v1 = Select(0.3 -> 1, 0.2 -> 2, 0.5 -> 3)
val v2 = Apply(v1, (i: Int) => i % 2)
Values()(v2)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v20 = v2Vals indexOf Regular(0)
val v21 = v2Vals indexOf Regular(1)
val List(factor) = Factory.make(v2)
factor.contains(List(v11, v20)) should equal(false)
factor.get(List(v11, v21)) should equal(1.0)
factor.get(List(v12, v20)) should equal(1.0)
factor.contains(List(v12, v21)) should equal(false)
factor.contains(List(v13, v20)) should equal(false)
factor.get(List(v13, v21)) should equal(1.0)
factor.contents.size should equal(3)
}
}
"given an apply of two arguments" should {
"produce a factor that matches the arguments to the result via the function" in {
Universe.createNew()
val v1 = Select(0.3 -> 1, 0.2 -> 2, 0.5 -> 3)
val v2 = Select(0.5 -> 2, 0.5 -> 3)
val v3 = Apply(v1, v2, (i: Int, j: Int) => i % j)
Values()(v3)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v22 = v2Vals indexOf Regular(2)
val v23 = v2Vals indexOf Regular(3)
val v30 = v3Vals indexOf Regular(0)
val v31 = v3Vals indexOf Regular(1)
val v32 = v3Vals indexOf Regular(2)
val List(factor) = Factory.make(v3)
factor.contains(List(v11, v22, v30)) should equal(false)
factor.get(List(v11, v22, v31)) should equal(1.0)
factor.contains(List(v11, v22, v32)) should equal(false)
factor.contains(List(v11, v23, v30)) should equal(false)
factor.get(List(v11, v23, v31)) should equal(1.0)
factor.contains(List(v11, v23, v32)) should equal(false)
factor.get(List(v12, v22, v30)) should equal(1.0)
factor.contains(List(v12, v22, v31)) should equal(false)
factor.contains(List(v12, v22, v32)) should equal(false)
factor.contains(List(v12, v23, v30)) should equal(false)
factor.contains(List(v12, v23, v31)) should equal(false)
factor.get(List(v12, v23, v32)) should equal(1.0)
factor.contains(List(v13, v22, v30)) should equal(false)
factor.get(List(v13, v22, v31)) should equal(1.0)
factor.contains(List(v13, v22, v32)) should equal(false)
factor.get(List(v13, v23, v30)) should equal(1.0)
factor.contains(List(v13, v23, v31)) should equal(false)
factor.contains(List(v13, v23, v32)) should equal(false)
factor.contents.size should equal(6)
}
}
"given an apply of three arguments" should {
"produce a factor that matches the arguments to the result via the function" in {
Universe.createNew()
val v1 = Select(0.3 -> 1, 0.2 -> 2, 0.5 -> 3)
val v2 = Select(0.5 -> 1, 0.5 -> 2)
val v3 = Constant(1)
val v4: Apply3[Int, Int, Int, Int] = Apply(v1, v2, v3, (i: Int, j: Int, k: Int) => i % (j + k))
Values()(v4)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v4Vals = Variable(v4).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v21 = v2Vals indexOf Regular(1)
val v22 = v2Vals indexOf Regular(2)
val v31 = v3Vals indexOf Regular(1)
val v40 = v4Vals indexOf Regular(0)
val v41 = v4Vals indexOf Regular(1)
val v42 = v4Vals indexOf Regular(2)
val List(factor) = Factory.make(v4)
factor.contains(List(v11, v21, v31, v40)) should equal(false)
factor.get(List(v11, v21, v31, v41)) should equal(1.0)
factor.contains(List(v11, v21, v31, v42)) should equal(false)
factor.contains(List(v11, v22, v31, v40)) should equal(false)
factor.get(List(v11, v22, v31, v41)) should equal(1.0)
factor.contains(List(v11, v22, v31, v42)) should equal(false)
factor.get(List(v12, v21, v31, v40)) should equal(1.0)
factor.contains(List(v12, v21, v31, v41)) should equal(false)
factor.contains(List(v12, v21, v31, v42)) should equal(false)
factor.contains(List(v12, v22, v31, v40)) should equal(false)
factor.contains(List(v12, v22, v31, v41)) should equal(false)
factor.get(List(v12, v22, v31, v42)) should equal(1.0)
factor.contains(List(v13, v21, v31, v40)) should equal(false)
factor.get(List(v13, v21, v31, v41)) should equal(1.0)
factor.contains(List(v13, v21, v31, v42)) should equal(false)
factor.get(List(v13, v22, v31, v40)) should equal(1.0)
factor.contains(List(v13, v22, v31, v41)) should equal(false)
factor.contains(List(v13, v22, v31, v42)) should equal(false)
factor.contents.size should equal(6)
}
}
"given an apply of four arguments" should {
"produce a factor that matches the arguments to the result via the function" in {
Universe.createNew()
val v1 = Select(0.3 -> 1, 0.2 -> 2, 0.5 -> 3)
val v2 = Select(0.5 -> 1, 0.5 -> 2)
val v3 = Constant(1)
val v4 = Flip(0.7)
val v5: Apply4[Int, Int, Int, Boolean, Int] =
Apply(v1, v2, v3, v4, (i: Int, j: Int, k: Int, b: Boolean) => if (b) 0; else i % (j + k))
Values()(v5)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v4Vals = Variable(v4).range
val v5Vals = Variable(v5).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v21 = v2Vals indexOf Regular(1)
val v22 = v2Vals indexOf Regular(2)
val v31 = v3Vals indexOf Regular(1)
val v4true = v4Vals indexOf Regular(true)
val v4false = v4Vals indexOf Regular(false)
val v50 = v5Vals indexOf Regular(0)
val v51 = v5Vals indexOf Regular(1)
val v52 = v5Vals indexOf Regular(2)
val List(factor) = Factory.make(v5)
factor.contains(List(v11, v21, v31, v4false, v50)) should equal(false)
factor.get(List(v11, v21, v31, v4false, v51)) should equal(1.0)
factor.contains(List(v11, v21, v31, v4false, v52)) should equal(false)
factor.contains(List(v11, v22, v31, v4false, v50)) should equal(false)
factor.get(List(v11, v22, v31, v4false, v51)) should equal(1.0)
factor.contains(List(v11, v22, v31, v4false, v52)) should equal(false)
factor.get(List(v12, v21, v31, v4false, v50)) should equal(1.0)
factor.contains(List(v12, v21, v31, v4false, v51)) should equal(false)
factor.contains(List(v12, v21, v31, v4false, v52)) should equal(false)
factor.contains(List(v12, v22, v31, v4false, v50)) should equal(false)
factor.contains(List(v12, v22, v31, v4false, v51)) should equal(false)
factor.get(List(v12, v22, v31, v4false, v52)) should equal(1.0)
factor.contains(List(v13, v21, v31, v4false, v50)) should equal(false)
factor.get(List(v13, v21, v31, v4false, v51)) should equal(1.0)
factor.contains(List(v13, v21, v31, v4false, v52)) should equal(false)
factor.get(List(v13, v22, v31, v4false, v50)) should equal(1.0)
factor.contains(List(v13, v22, v31, v4false, v51)) should equal(false)
factor.contains(List(v13, v22, v31, v4false, v52)) should equal(false)
factor.get(List(v11, v21, v31, v4true, v50)) should equal(1.0)
factor.contains(List(v11, v21, v31, v4true, v51)) should equal(false)
factor.contains(List(v11, v21, v31, v4true, v52)) should equal(false)
factor.get(List(v11, v22, v31, v4true, v50)) should equal(1.0)
factor.contains(List(v11, v22, v31, v4true, v51)) should equal(false)
factor.contains(List(v11, v22, v31, v4true, v52)) should equal(false)
factor.get(List(v12, v21, v31, v4true, v50)) should equal(1.0)
factor.contains(List(v12, v21, v31, v4true, v51)) should equal(false)
factor.contains(List(v12, v21, v31, v4true, v52)) should equal(false)
factor.get(List(v12, v22, v31, v4true, v50)) should equal(1.0)
factor.contains(List(v12, v22, v31, v4true, v51)) should equal(false)
factor.contains(List(v12, v22, v31, v4true, v52)) should equal(false)
factor.get(List(v13, v21, v31, v4true, v50)) should equal(1.0)
factor.contains(List(v13, v21, v31, v4true, v51)) should equal(false)
factor.contains(List(v13, v21, v31, v4true, v52)) should equal(false)
factor.get(List(v13, v22, v31, v4true, v50)) should equal(1.0)
factor.contains(List(v13, v22, v31, v4true, v51)) should equal(false)
factor.contains(List(v13, v22, v31, v4true, v52)) should equal(false)
factor.contents.size should equal(12)
}
}
"given an apply of five arguments" should {
"produce a factor that matches the arguments to the result via the function" in {
Universe.createNew()
val v1 = Select(0.3 -> 1, 0.2 -> 2, 0.5 -> 3)
val v2 = Select(0.5 -> 1, 0.5 -> 2)
val v3 = Constant(1)
val v4 = Flip(0.7)
val v5 = Constant(false)
val v6: Apply5[Int, Int, Int, Boolean, Boolean, Int] =
Apply(v1, v2, v3, v4, v5,
(i: Int, j: Int, k: Int, b: Boolean, c: Boolean) => if (b || c) 0; else i % (j + k))
Values()(v6)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v4Vals = Variable(v4).range
val v5Vals = Variable(v5).range
val v6Vals = Variable(v6).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v21 = v2Vals indexOf Regular(1)
val v22 = v2Vals indexOf Regular(2)
val v31 = v3Vals indexOf Regular(1)
val v4true = v4Vals indexOf Regular(true)
val v4false = v4Vals indexOf Regular(false)
val v5false = v5Vals indexOf Regular(false)
val v60 = v6Vals indexOf Regular(0)
val v61 = v6Vals indexOf Regular(1)
val v62 = v6Vals indexOf Regular(2)
val List(factor) = Factory.make(v6)
factor.contains(List(v11, v21, v31, v4false, v5false, v60)) should equal(false)
factor.get(List(v11, v21, v31, v4false, v5false, v61)) should equal(1.0)
factor.contains(List(v11, v21, v31, v4false, v5false, v62)) should equal(false)
factor.contains(List(v11, v22, v31, v4false, v5false, v60)) should equal(false)
factor.get(List(v11, v22, v31, v4false, v5false, v61)) should equal(1.0)
factor.contains(List(v11, v22, v31, v4false, v5false, v62)) should equal(false)
factor.get(List(v12, v21, v31, v4false, v5false, v60)) should equal(1.0)
factor.contains(List(v12, v21, v31, v4false, v5false, v61)) should equal(false)
factor.contains(List(v12, v21, v31, v4false, v5false, v62)) should equal(false)
factor.contains(List(v12, v22, v31, v4false, v5false, v60)) should equal(false)
factor.contains(List(v12, v22, v31, v4false, v5false, v61)) should equal(false)
factor.get(List(v12, v22, v31, v4false, v5false, v62)) should equal(1.0)
factor.contains(List(v13, v21, v31, v4false, v5false, v60)) should equal(false)
factor.get(List(v13, v21, v31, v4false, v5false, v61)) should equal(1.0)
factor.contains(List(v13, v21, v31, v4false, v5false, v62)) should equal(false)
factor.get(List(v13, v22, v31, v4false, v5false, v60)) should equal(1.0)
factor.contains(List(v13, v22, v31, v4false, v5false, v61)) should equal(false)
factor.contains(List(v13, v22, v31, v4false, v5false, v62)) should equal(false)
factor.get(List(v11, v21, v31, v4true, v5false, v60)) should equal(1.0)
factor.contains(List(v11, v21, v31, v4true, v5false, v61)) should equal(false)
factor.contains(List(v11, v21, v31, v4true, v5false, v62)) should equal(false)
factor.get(List(v11, v22, v31, v4true, v5false, v60)) should equal(1.0)
factor.contains(List(v11, v22, v31, v4true, v5false, v61)) should equal(false)
factor.contains(List(v11, v22, v31, v4true, v5false, v62)) should equal(false)
factor.get(List(v12, v21, v31, v4true, v5false, v60)) should equal(1.0)
factor.contains(List(v12, v21, v31, v4true, v5false, v61)) should equal(false)
factor.contains(List(v12, v21, v31, v4true, v5false, v62)) should equal(false)
factor.get(List(v12, v22, v31, v4true, v5false, v60)) should equal(1.0)
factor.contains(List(v12, v22, v31, v4true, v5false, v61)) should equal(false)
factor.contains(List(v12, v22, v31, v4true, v5false, v62)) should equal(false)
factor.get(List(v13, v21, v31, v4true, v5false, v60)) should equal(1.0)
factor.contains(List(v13, v21, v31, v4true, v5false, v61)) should equal(false)
factor.contains(List(v13, v21, v31, v4true, v5false, v62)) should equal(false)
factor.get(List(v13, v22, v31, v4true, v5false, v60)) should equal(1.0)
factor.contains(List(v13, v22, v31, v4true, v5false, v61)) should equal(false)
factor.contains(List(v13, v22, v31, v4true, v5false, v62)) should equal(false)
factor.contents.size should equal(12)
}
}
"given an Inject" should {
"produce a factor that matches its inputs to the correct sequence" in {
Universe.createNew()
val v1 = Select(0.3 -> 1, 0.2 -> 2, 0.5 -> 3)
val v2 = Select(0.5 -> 4, 0.5 -> 5)
val v3 = Inject(v1, v2)
Values()(v3)
val List(factor) = Factory.make(v3)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v3Vals = Variable(v3).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v24 = v2Vals indexOf Regular(4)
val v25 = v2Vals indexOf Regular(5)
val v314 = v3Vals indexOf Regular(List(1, 4))
val v315 = v3Vals indexOf Regular(List(1, 5))
val v324 = v3Vals indexOf Regular(List(2, 4))
val v325 = v3Vals indexOf Regular(List(2, 5))
val v334 = v3Vals indexOf Regular(List(3, 4))
val v335 = v3Vals indexOf Regular(List(3, 5))
factor.get(List(v11, v24, v314)) should equal(1.0)
factor.get(List(v11, v25, v315)) should equal(1.0)
factor.get(List(v12, v24, v324)) should equal(1.0)
factor.get(List(v12, v25, v325)) should equal(1.0)
factor.get(List(v13, v24, v334)) should equal(1.0)
factor.get(List(v13, v25, v335)) should equal(1.0)
factor.get(List(v11, v25, v314)) should equal(0.0)
factor.get(List(v11, v24, v315)) should equal(0.0)
factor.get(List(v12, v25, v324)) should equal(0.0)
factor.get(List(v12, v24, v325)) should equal(0.0)
factor.get(List(v13, v25, v334)) should equal(0.0)
factor.get(List(v13, v24, v335)) should equal(0.0)
factor.get(List(v12, v24, v314)) should equal(0.0)
factor.get(List(v12, v25, v315)) should equal(0.0)
factor.get(List(v13, v24, v324)) should equal(0.0)
factor.get(List(v13, v25, v325)) should equal(0.0)
factor.get(List(v11, v24, v334)) should equal(0.0)
factor.get(List(v11, v25, v335)) should equal(0.0)
factor.get(List(v12, v25, v314)) should equal(0.0)
factor.get(List(v12, v24, v315)) should equal(0.0)
factor.get(List(v13, v25, v324)) should equal(0.0)
factor.get(List(v13, v24, v325)) should equal(0.0)
factor.get(List(v11, v25, v334)) should equal(0.0)
factor.get(List(v11, v24, v335)) should equal(0.0)
factor.get(List(v13, v24, v314)) should equal(0.0)
factor.get(List(v13, v25, v315)) should equal(0.0)
factor.get(List(v11, v24, v324)) should equal(0.0)
factor.get(List(v11, v25, v325)) should equal(0.0)
factor.get(List(v12, v24, v334)) should equal(0.0)
factor.get(List(v12, v25, v335)) should equal(0.0)
factor.get(List(v13, v25, v314)) should equal(0.0)
factor.get(List(v13, v24, v315)) should equal(0.0)
factor.get(List(v11, v25, v324)) should equal(0.0)
factor.get(List(v11, v24, v324)) should equal(0.0)
factor.get(List(v12, v25, v334)) should equal(0.0)
factor.get(List(v12, v24, v335)) should equal(0.0)
}
}
"given a non-trivial condition and constraint" should {
"produce the correct constraint factors" in {
Universe.createNew()
val v1 = Select(0.2 -> 1, 0.3 -> 2, 0.5 -> 3)
v1.setCondition((i: Int) => i != 2)
v1.setConstraint(((i: Int) => i.toDouble))
Values()(v1)
val List(condFactor, constrFactor, _) = Factory.make(v1)
val v1Vals = Variable(v1).range
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
condFactor.get(List(v11)) should be(1.0 +- 0.000000001)
condFactor.get(List(v12)) should be(0.0 +- 0.000000001)
condFactor.get(List(v13)) should be(1.0 +- 0.000000001)
constrFactor.get(List(v11)) should be(1.0 +- 0.000000001)
constrFactor.get(List(v12)) should be(2.0 +- 0.000000001)
constrFactor.get(List(v13)) should be(3.0 +- 0.000000001)
}
}
"given an element whose expanded values are only *" should {
"produce no factors" in {
Universe.createNew()
val f = Flip(0.5)
val lv = LazyValues()
lv.expandAll(Set((f, -1)))
val factors = Factory.make(f)
factors should be(empty)
}
}
}
"Making a factor for a dependent universe" should {
"produce a correct dependent factor" in {
Universe.createNew()
val x = Flip(0.1)
val y = Select(0.2 -> 1, 0.3 -> 2, 0.5 -> 3)
Values()(x)
Values()(y)
val dependentUniverse = new Universe(List(x, y))
val u1 = Uniform(0.0, 1.0)("", dependentUniverse)
val u2 = Uniform(0.0, 2.0)("", dependentUniverse)
val a = CachingChain(x, y, (x: Boolean, y: Int) => if (x || y < 2) u1; else u2)("a", dependentUniverse)
Values(dependentUniverse)(a)
val evidence = List(NamedEvidence("a", Condition((d: Double) => d < 0.5)))
val factor =
Factory.makeDependentFactor(Universe.universe, dependentUniverse, () => ProbEvidenceSampler.computeProbEvidence(20000, evidence)(dependentUniverse))
val xVar = Variable(x)
val yVar = Variable(y)
val variables = factor.variables
variables.toSet should equal(Set(xVar, yVar))
val xIndex = variables indexOf xVar
val yIndex = variables indexOf yVar
val xFalse = xVar.range indexOf Regular(false)
val xTrue = xVar.range indexOf Regular(true)
val y1 = yVar.range indexOf Regular(1)
val y2 = yVar.range indexOf Regular(2)
val y3 = yVar.range indexOf Regular(3)
// If x is true or y is 1, pe is 0.5; if both false, 0.25.
if (xIndex == 0) {
factor.get(List(xFalse, y2)) should be(0.25 +- 0.01)
factor.get(List(xFalse, y3)) should be(0.25 +- 0.01)
factor.get(List(xFalse, y1)) should be(0.5 +- 0.01)
factor.get(List(xTrue, y1)) should be(0.5 +- 0.01)
factor.get(List(xTrue, y2)) should be(0.5 +- 0.01)
factor.get(List(xTrue, y3)) should be(0.5 +- 0.01)
} else {
factor.get(List(y2, xFalse)) should be(0.25 +- 0.01)
factor.get(List(y3, xFalse)) should be(0.25 +- 0.01)
factor.get(List(y1, xTrue)) should be(0.5 +- 0.01)
factor.get(List(y1, xFalse)) should be(0.5 +- 0.01)
factor.get(List(y2, xFalse)) should be(0.5 +- 0.01)
factor.get(List(y3, xFalse)) should be(0.5 +- 0.01)
}
}
}
"Making factors for multiple universes" should {
"produce the same range of values as if it were in a single universe" when {
"given a simple model with two universes" in {
Universe.createNew()
val v1u1 = Select(0.3 -> 0, 0.5 -> 1, 0.2 -> 3)
val v2u1 = Apply(v1u1, (i: Int) => i % 3)
Universe.createNew()
val v1u2 = Select(0.3 -> 0, 0.5 -> 1, 0.2 -> 3)
Universe.createNew
val v2u3 = Apply(v1u1, (i: Int) => i % 3)
(Variable(v1u1).range) should equal(Variable(v1u2).range)
(Variable(v2u1).range) should equal(Variable(v2u3).range)
}
"given a model with multiple universes" in {
Universe.createNew()
val func = (i: Int, b: Boolean) => if (b) i else i + 1
val v1u1 = Select(0.1 -> 0, 0.2 -> 2, 0.7 -> 5)
val v2u1 = Flip(0.3)
val v3u1 = Apply(v1u1, v2u1, func)
val v4u1 = Flip(0.5)
val v5u1 = Apply(v3u1, v4u1, func)
Universe.createNew()
val v1u2 = Select(0.1 -> 0, 0.2 -> 2, 0.7 -> 5)
Universe.createNew()
val v2u3 = Flip(0.3)
Universe.createNew()
val v3u4 = Apply(v1u1, v2u1, func)
Universe.createNew()
val v4u5 = Flip(0.5)
Universe.createNew()
val v5u6 = Apply(v3u1, v4u1, func)
(Variable(v5u1).range) should equal(Variable(v5u6).range)
}
"given a multi-universe model with Chains" in {
Universe.createNew()
val func1 = (i: Int) => if (i % 2 == 0) Constant(i) else Select(0.4 -> (i - 1), 0.6 -> (i + 1))
val func2 = (i: Int) => if (i % 4 == 0) Select(0.2 -> (i - 1), 0.8 -> (i + 1)) else Constant(i)
val v1u1 = Select(0.2 -> 0, 0.5 -> 3, 0.3 -> 6)
val v2u1 = Chain(v1u1, func1)
val v3u1 = Chain(v2u1, func2)
Universe.createNew()
val v1u2 = Select(0.2 -> 0, 0.5 -> 3, 0.3 -> 6)
Universe.createNew()
val v2u3 = Chain(v1u1, func1)
Universe.createNew()
val v3u4 = Chain(v2u1, func2)
(Variable(v3u1).range) should equal(Variable(v3u4).range)
}
}
"correctly produce a factor between elements in multiple universes" when {
"given a simple model in two universes" in {
Universe.createNew()
val v1 = Select(0.3 -> 0, 0.2 -> 1, 0.4 -> 2, 0.1 -> 3)
Universe.createNew()
val v2 = Apply(v1, (i: Int) => i / 2)
Values()(v2)
val v1Vals = Variable(v1).range
val v2Vals = Variable(v2).range
val v10 = v1Vals indexOf Regular(0)
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v13 = v1Vals indexOf Regular(3)
val v20 = v2Vals indexOf Regular(0)
val v21 = v2Vals indexOf Regular(1)
val List(factor) = Factory.make(v2)
factor.get(List(v10, v20)) should equal(1.0)
factor.contains(List(v10, v21)) should equal(false)
factor.get(List(v11, v20)) should equal(1.0)
factor.contains(List(v11, v21)) should equal(false)
factor.contains(List(v12, v20)) should equal(false)
factor.get(List(v12, v21)) should equal(1.0)
factor.contains(List(v13, v20)) should equal(false)
factor.get(List(v13, v21)) should equal(1.0)
factor.contents.size should equal(4)
}
"given a multi-universe model with Chains" in {
Universe.createNew()
val v1 = Select(0.2 -> 0, 0.7 -> 1, 0.1 -> 2)
Universe.createNew()
val v2 = Constant(2)
Universe.createNew()
val v3 = Select(0.4 -> 0, 0.6 -> 1)
Universe.createNew()
val v4 = Chain(v1, (i: Int) => if (i % 2 == 0) v2 else v3)
Values()(v4)
val v1Vals = Variable(v1).range
val v3Vals = Variable(v3).range
val v4Vals = Variable(v4).range
val v10 = v1Vals indexOf Regular(0)
val v11 = v1Vals indexOf Regular(1)
val v12 = v1Vals indexOf Regular(2)
val v30 = v3Vals indexOf Regular(0)
val v31 = v3Vals indexOf Regular(1)
val v40 = v4Vals indexOf Regular(0)
val v41 = v4Vals indexOf Regular(1)
val v42 = v4Vals indexOf Regular(2)
val factor = Factory.make(v4)
val List(v4Factor) = Factory.combineFactors(factor, SumProductSemiring().asInstanceOf[Semiring[Double]], true)
v4Factor.get(List(v10, v40, v30, 0)) should equal(0.0)
v4Factor.get(List(v10, v40, v31, 0)) should equal(0.0)
v4Factor.get(List(v10, v41, v30, 0)) should equal(0.0)
v4Factor.get(List(v10, v41, v31, 0)) should equal(0.0)
v4Factor.get(List(v10, v42, v30, 0)) should equal(1.0)
v4Factor.get(List(v10, v42, v31, 0)) should equal(1.0)
v4Factor.get(List(v11, v40, v30, 0)) should equal(1.0)
v4Factor.get(List(v11, v40, v31, 0)) should equal(0.0)
v4Factor.get(List(v11, v41, v30, 0)) should equal(0.0)
v4Factor.get(List(v11, v41, v31, 0)) should equal(1.0)
v4Factor.get(List(v11, v42, v30, 0)) should equal(0.0)
v4Factor.get(List(v11, v42, v31, 0)) should equal(0.0)
v4Factor.get(List(v12, v40, v30, 0)) should equal(0.0)
v4Factor.get(List(v12, v40, v31, 0)) should equal(0.0)
v4Factor.get(List(v12, v41, v30, 0)) should equal(0.0)
v4Factor.get(List(v12, v41, v31, 0)) should equal(0.0)
v4Factor.get(List(v12, v42, v30, 0)) should equal(1.0)
v4Factor.get(List(v12, v42, v31, 0)) should equal(1.0)
}
}
}
}
| jyuhuan/figaro | Figaro/src/test/scala/com/cra/figaro/test/algorithm/factored/FactorTest.scala | Scala | bsd-3-clause | 57,155 |
package io
/**
* Created by @davidelnunes on 06-02-2015.
*/
trait TweetWriter {
/**
* What we need to store is the tweet id, the text, and the query
* used to find that tweet.
*
* @param id Tweet ID
* @param text Tweet text
* @param keyword Query used to retrieve the tweet
*/
def write(id: Long, text:String, keyword: String)
def finish(): Unit
}
| davidelnunes/TwitterTools | src/main/scala/io/TweetWriter.scala | Scala | gpl-3.0 | 379 |
import org.scalatest._
import scala.meta._
import scala.meta.internal.{ast => impl}
import scala.meta.internal.parsers.MoreHelpers._
class ParseSuite extends FunSuite with CommonTrees {
val EOL = scala.compat.Platform.EOL
val escapedEOL = if (EOL == "\\n") """\\n""" else """\\r\\n"""
def term(code: String)(implicit dialect: Dialect) = code.parseRule(_.expr())
def pat(code: String)(implicit dialect: Dialect) = code.parseRule(_.pattern())
def tpe(code: String)(implicit dialect: Dialect) = code.parseRule(_.typ())
def topStat(code: String)(implicit dialect: Dialect) = code.parseRule(_.topStatSeq().head)
def templStat(code: String)(implicit dialect: Dialect) = code.parseRule(_.templateStats().head)
def source(code: String)(implicit dialect: Dialect) = code.parseRule(_.compilationUnit())
def tokenize(code: String)(implicit dialect: Dialect) = code.tokens
}
package scala.meta.internal.parsers {
object MoreHelpers {
implicit class XtensionCode(code: String) {
def parseRule[T <: impl.Tree](rule: Parser => T)(implicit dialect: Dialect): T = new Parser(Input.String(code)).parseRule(rule)
}
}
}
| mdemarne/scalameta | tests/src/test/scala/parser/ParseSuite.scala | Scala | bsd-3-clause | 1,134 |
package canThrowStrawman
import language.experimental.erasedDefinitions
class CanThrow[E <: Exception]
infix type throws[R, E <: Exception] = (erased CanThrow[E]) ?=> R
class Fail extends Exception
def raise[E <: Exception](e: E): Nothing throws E = throw e
def foo(x: Boolean): Int throws Fail =
if x then 1 else raise(Fail())
| dotty-staging/dotty | tests/pos/CanThrow.scala | Scala | apache-2.0 | 335 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Properties
import scala.collection._
import kafka.log._
import kafka.utils._
import kafka.admin.AdminUtils
import org.I0Itec.zkclient.{IZkChildListener, ZkClient}
/**
* This class initiates and carries out topic config changes.
*
* It works as follows.
*
* Config is stored under the path
* /brokers/topics/<topic_name>/config
* This znode stores the topic-overrides for this topic (but no defaults) in properties format.
*
* To avoid watching all topics for changes instead we have a notification path
* /brokers/config_changes
* The TopicConfigManager has a child watch on this path.
*
* To update a topic config we first update the topic config properties. Then we create a new sequential
* znode under the change path which contains the name of the topic that was updated, say
* /brokers/config_changes/config_change_13321
* This is just a notification--the actual config change is stored only once under the /brokers/topics/<topic_name>/config path.
*
* This will fire a watcher on all brokers. This watcher works as follows. It reads all the config change notifications.
* It keeps track of the highest config change suffix number it has applied previously. For any previously applied change it finds
* it checks if this notification is larger than a static expiration time (say 10mins) and if so it deletes this notification.
* For any new changes it reads the new configuration, combines it with the defaults, and updates the log config
* for all logs for that topic (if any) that it has.
*
* Note that config is always read from the config path in zk, the notification is just a trigger to do so. So if a broker is
* down and misses a change that is fine--when it restarts it will be loading the full config anyway. Note also that
* if there are two consecutive config changes it is possible that only the last one will be applied (since by the time the
* broker reads the config the both changes may have been made). In this case the broker would needlessly refresh the config twice,
* but that is harmless.
*
* On restart the config manager re-processes all notifications. This will usually be wasted work, but avoids any race conditions
* on startup where a change might be missed between the initial config load and registering for change notifications.
*
*/
class TopicConfigManager(private val zkClient: ZkClient,
private val logManager: LogManager,
private val changeExpirationMs: Long = 15*60*1000,
private val time: Time = SystemTime) extends Logging {
private var lastExecutedChange = -1L
/**
* Begin watching for config changes
*/
def startup() {
ZkUtils.makeSurePersistentPathExists(zkClient, ZkUtils.TopicConfigChangesPath)
zkClient.subscribeChildChanges(ZkUtils.TopicConfigChangesPath, ConfigChangeListener)
processAllConfigChanges()
}
/**
* Process all config changes
*/
private def processAllConfigChanges() {
val configChanges = zkClient.getChildren(ZkUtils.TopicConfigChangesPath)
import JavaConversions._
processConfigChanges((configChanges: mutable.Buffer[String]).sorted)
}
/**
* Process the given list of config changes
*/
private def processConfigChanges(notifications: Seq[String]) {
if (notifications.size > 0) {
info("Processing config change notification(s)...")
val now = time.milliseconds
val logs = logManager.logsByTopicPartition.toBuffer
val logsByTopic = logs.groupBy(_._1.topic).mapValues(_.map(_._2))
for (notification <- notifications) {
val changeId = changeNumber(notification)
if (changeId > lastExecutedChange) {
val changeZnode = ZkUtils.TopicConfigChangesPath + "/" + notification
val (jsonOpt, stat) = ZkUtils.readDataMaybeNull(zkClient, changeZnode)
if(jsonOpt.isDefined) {
val json = jsonOpt.get
val topic = json.substring(1, json.length - 1) // hacky way to dequote
if (logsByTopic.contains(topic)) {
/* combine the default properties with the overrides in zk to create the new LogConfig */
val props = new Properties(logManager.defaultConfig.toProps)
props.putAll(AdminUtils.fetchTopicConfig(zkClient, topic))
val logConfig = LogConfig.fromProps(props)
for (log <- logsByTopic(topic))
log.config = logConfig
info("Processed topic config change %d for topic %s, setting new config to %s.".format(changeId, topic, props))
purgeObsoleteNotifications(now, notifications)
}
}
lastExecutedChange = changeId
}
}
}
}
private def purgeObsoleteNotifications(now: Long, notifications: Seq[String]) {
for(notification <- notifications.sorted) {
val (jsonOpt, stat) = ZkUtils.readDataMaybeNull(zkClient, ZkUtils.TopicConfigChangesPath + "/" + notification)
if(jsonOpt.isDefined) {
val changeZnode = ZkUtils.TopicConfigChangesPath + "/" + notification
if (now - stat.getCtime > changeExpirationMs) {
debug("Purging config change notification " + notification)
ZkUtils.deletePath(zkClient, changeZnode)
} else {
return
}
}
}
}
/* get the change number from a change notification znode */
private def changeNumber(name: String): Long = name.substring(AdminUtils.TopicConfigChangeZnodePrefix.length).toLong
/**
* A listener that applies config changes to logs
*/
object ConfigChangeListener extends IZkChildListener {
override def handleChildChange(path: String, chillins: java.util.List[String]) {
try {
import JavaConversions._
processConfigChanges(chillins: mutable.Buffer[String])
} catch {
case e: Exception => error("Error processing config change:", e)
}
}
}
} | unix1986/universe | tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/server/TopicConfigManager.scala | Scala | bsd-2-clause | 6,795 |
import javax.inject._
import javax.inject.Inject
import play.api.http.DefaultHttpErrorHandler
import play.api._
import play.api.mvc._
import play.api.mvc.Results._
import play.api.routing.Router
import scala.concurrent._
class ErrorHandler @Inject() (env: Environment,
config: Configuration,
sourceMapper: OptionalSourceMapper,
router: Provider[Router]) extends DefaultHttpErrorHandler(env, config, sourceMapper, router) {
override protected def onNotFound(
request: RequestHeader, message: String): Future[Result] = {
Future.successful {
NotFound("Could not find " + request)
}
}
} | juanux/ReactiveWebAppBookExamples | vocabulary-teacher/app/ErrorHandler.scala | Scala | apache-2.0 | 621 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.zk
import java.lang.Iterable
import javax.security.auth.login.Configuration
import scala.collection.JavaConverters._
import kafka.consumer.ConsumerConfig
import kafka.utils.ZkUtils
import kafka.utils.ZKCheckedEphemeral
import kafka.utils.TestUtils
import org.apache.kafka.common.security.JaasUtils
import org.apache.zookeeper.CreateMode
import org.apache.zookeeper.WatchedEvent
import org.apache.zookeeper.Watcher
import org.apache.zookeeper.ZooDefs.Ids
import org.I0Itec.zkclient.exception.ZkNodeExistsException
import org.junit.{After, Before, Test, Assert}
import org.junit.runners.Parameterized
import org.junit.runners.Parameterized.Parameters
import org.junit.runner.RunWith
object ZKEphemeralTest {
@Parameters
def enableSecurityOptions: Iterable[Array[java.lang.Boolean]] =
Seq[Array[java.lang.Boolean]](Array(true), Array(false)).asJava
}
@RunWith(value = classOf[Parameterized])
class ZKEphemeralTest(val secure: Boolean) extends ZooKeeperTestHarness {
val jaasFile = kafka.utils.JaasTestUtils.writeJaasContextsToFile(kafka.utils.JaasTestUtils.zkSections)
val authProvider = "zookeeper.authProvider.1"
var zkSessionTimeoutMs = 1000
@Before
override def setUp() {
if (secure) {
System.setProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM, jaasFile.getAbsolutePath)
Configuration.setConfiguration(null)
System.setProperty(authProvider, "org.apache.zookeeper.server.auth.SASLAuthenticationProvider")
if (!JaasUtils.isZkSecurityEnabled)
fail("Secure access not enabled")
}
super.setUp
}
@After
override def tearDown() {
super.tearDown
System.clearProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM)
System.clearProperty(authProvider)
Configuration.setConfiguration(null)
}
@Test
def testEphemeralNodeCleanup = {
val config = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, "test", "1"))
var zkUtils = ZkUtils(zkConnect, zkSessionTimeoutMs, config.zkConnectionTimeoutMs, JaasUtils.isZkSecurityEnabled())
try {
zkUtils.createEphemeralPathExpectConflict("/tmp/zktest", "node created")
} catch {
case _: Exception =>
}
var testData: String = null
testData = zkUtils.readData("/tmp/zktest")._1
Assert.assertNotNull(testData)
zkUtils.close
zkUtils = ZkUtils(zkConnect, zkSessionTimeoutMs, config.zkConnectionTimeoutMs, JaasUtils.isZkSecurityEnabled())
val nodeExists = zkUtils.pathExists("/tmp/zktest")
Assert.assertFalse(nodeExists)
zkUtils.close()
}
/*****
***** Tests for ZkWatchedEphemeral
*****/
/**
* Tests basic creation
*/
@Test
def testZkWatchedEphemeral = {
testCreation("/zwe-test")
testCreation("/zwe-test-parent/zwe-test")
}
private def testCreation(path: String) {
val zk = zkUtils.zkConnection.getZookeeper
val zwe = new ZKCheckedEphemeral(path, "", zk, JaasUtils.isZkSecurityEnabled())
var created = false
zk.exists(path, new Watcher() {
def process(event: WatchedEvent) {
if(event.getType == Watcher.Event.EventType.NodeCreated) {
created = true
}
}
})
zwe.create()
// Waits until the znode is created
TestUtils.waitUntilTrue(() => zkUtils.pathExists(path),
s"Znode $path wasn't created")
}
/**
* Tests that it fails in the presence of an overlapping
* session.
*/
@Test
def testOverlappingSessions = {
val path = "/zwe-test"
val zk1 = zkUtils.zkConnection.getZookeeper
//Creates a second session
val (zkClient2, zkConnection2) = ZkUtils.createZkClientAndConnection(zkConnect, zkSessionTimeoutMs, zkConnectionTimeout)
val zk2 = zkConnection2.getZookeeper
val zwe = new ZKCheckedEphemeral(path, "", zk2, JaasUtils.isZkSecurityEnabled())
// Creates znode for path in the first session
zk1.create(path, Array[Byte](), Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL)
//Bootstraps the ZKWatchedEphemeral object
val gotException =
try {
zwe.create()
false
} catch {
case _: ZkNodeExistsException => true
}
Assert.assertTrue(gotException)
zkClient2.close()
}
/**
* Tests if succeeds with znode from the same session
*/
@Test
def testSameSession = {
val path = "/zwe-test"
val zk = zkUtils.zkConnection.getZookeeper
// Creates znode for path in the first session
zk.create(path, Array[Byte](), Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL)
val zwe = new ZKCheckedEphemeral(path, "", zk, JaasUtils.isZkSecurityEnabled())
//Bootstraps the ZKWatchedEphemeral object
val gotException =
try {
zwe.create()
false
} catch {
case _: ZkNodeExistsException => true
}
Assert.assertFalse(gotException)
}
}
| wangcy6/storm_app | frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/test/scala/unit/kafka/zk/ZKEphemeralTest.scala | Scala | apache-2.0 | 5,661 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package base
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
* Date: 11.02.2008
*/
object ImportSelector {
def parse(builder: ScalaPsiBuilder): Boolean = {
val importSelectorMarker = builder.mark
builder.getTokenType match {
case ScalaTokenTypes.tIDENTIFIER =>
val sel = builder.mark()
builder.advanceLexer // Ate identifier
sel.done(ScalaElementTypes.REFERENCE)
case _ =>
importSelectorMarker.drop
return false
}
builder.getTokenType match {
case ScalaTokenTypes.tFUNTYPE =>
builder.advanceLexer //Ate =>
builder.getTokenType match {
case ScalaTokenTypes.tUNDER | ScalaTokenTypes.tIDENTIFIER => {
builder.advanceLexer //Ate _ | identifier
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTOR)
return true
}
case _ => {
builder error ErrMsg("identifier.or.wild.sign.expected")
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTOR)
return true
}
}
case _ =>
importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTOR)
return true
}
}
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/base/ImportSelector.scala | Scala | apache-2.0 | 1,402 |
/*
* ObjNodeViewImpl.scala
* (Cord)
*
* Copyright (c) 2015-2020 Hanns Holger Rutz.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.cord
package view
package impl
class ObjNodeViewImpl(val parentView: PatcherView, val elem: ObjNode, elemText: String)
extends RectNodeViewImpl {
init()
protected def boxWidth: Int = elemText.length * 7 + 8
override def toString = s"View of $elem"
override protected def init(): Unit = {
super.init()
import scalatags.JsDom.all._
import scalatags.JsDom.svgAttrs._
import scalatags.JsDom.svgTags._
val textTree = text(cls := "cord-node-name", x := 4, y := 15, elemText).render
peer.appendChild(textTree)
}
def dispose(): Unit = ()
}
| Sciss/Cord | src/main/scala/de/sciss/cord/view/impl/ObjNodeViewImpl.scala | Scala | lgpl-2.1 | 863 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.scuery.support
import xml.{ Elem, Node, NodeSeq }
import org.fusesource.scalate.scuery.XmlHelper._
import org.fusesource.scalate.util.Log
object Rule extends Log {
/**
* Combines multiple rules to a single rule
*/
def apply(a: Rule, b: Rule): Rule = {
if (a.order <= b.order) {
CompositeRule(a, b)
} else {
CompositeRule(b, a)
}
}
def apply(rules: Iterator[Rule]): Rule = apply(rules.toSeq)
def apply(rules: Seq[Rule]): Rule = {
if (rules.size < 2) {
rules(0)
} else {
val list = rules.sortWith(_.order < _.order)
list.tail.foldLeft(list.head)(Rule(_, _))
}
}
}
import Rule._
/**
* Represents manipuluation rules
*
* @version $Revision : 1.1 $
*/
trait Rule {
def apply(node: Node): NodeSeq
/**
* Lets do simple rules first (like setting attributes, removing attributes), then changing contents
* then finally completely transforming the node last
*/
def order: Int = 0
}
case class CompositeRule(first: Rule, second: Rule) extends Rule {
def apply(node: Node) = {
first(node).flatMap { second(_) }
}
def toList: List[Rule] = toList(first) ::: toList(second)
protected def toList(rule: Rule): List[Rule] = rule match {
case c: CompositeRule => c.toList
case _ => rule :: Nil
}
}
case class ReplaceRule(fn: Node => NodeSeq) extends Rule {
def apply(node: Node) = fn(node)
override def order: Int = 100
}
case class ReplaceContentRule(fn: Node => NodeSeq) extends Rule {
def apply(node: Node) = node match {
case e: Elem =>
val contents = fn(e)
debug("Replacing content = " + contents)
replaceContent(e, contents)
case n => n
}
}
case class SetAttributeRule(name: String, fn: (Node) => String) extends Rule {
def apply(node: Node) = node match {
case e: Elem =>
val value = fn(e)
debug("Setting attribute %s to %s", name, value)
setAttribute(e, name, value)
case n => n
}
override def order = -1
}
case class SetSelectiveAttributeRule(name: String, fn: (Node) => String) extends Rule {
def apply(node: Node) = node match {
case e: Elem =>
val value = fn(e)
debug("Selectively setting attribute %s to %s", name, value)
if (e.attribute(name).isDefined) setAttribute(e, name, value) else e
case n => n
}
override def order = -1
}
| scalate/scalate | scalate-core/src/main/scala/org/fusesource/scalate/scuery/support/Rule.scala | Scala | apache-2.0 | 3,124 |
package org.jetbrains.plugins.scala.lang.breadcrumbs
import java.awt.Color
import com.intellij.openapi.editor.colors.{EditorColors, EditorColorsManager, TextAttributesKey}
import com.intellij.psi.PsiElement
import com.intellij.xml.breadcrumbs.{BreadcrumbsPresentationProvider, CrumbPresentation}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScFunctionExpr
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
/**
* User: Dmitry.Naydanov
* Date: 24.06.16.
*/
class ScalaBreadcrumbsPresentationProvider extends BreadcrumbsPresentationProvider {
override def getCrumbPresentations(element: Array[PsiElement]): Array[CrumbPresentation] = {
import ScalaBreadcrumbsPresentationProvider._
if (element.headOption.exists(!_.isInstanceOf[ScalaPsiElement])) return null
element.map {
element => MyCrumbPresentation(getColorKeyFor(element))
}
}
}
/**
* Problem: current API doesn't allow setting color for font or border, only for background. The other colors are taken
* from Java settings, so if a user changes java colors, scala colors might got messed (e.g. if user sets font color to background
* and vice versa). We could take all colors from java if all our elements were equal, but they are not.
* So we have to
* 1) get somehow different colors from Java, so if use changes them, our colors will be changed as well
* 2) correct them slightly if they are too similar to font color
*/
object ScalaBreadcrumbsPresentationProvider {
private val JAVA_HOVERED = EditorColors.BREADCRUMBS_HOVERED
private val JAVA_CURRENT = EditorColors.BREADCRUMBS_CURRENT
private val JAVA_DEFAULT = EditorColors.BREADCRUMBS_DEFAULT
case class MyCrumbPresentation(colorKey: TextAttributesKey) extends CrumbPresentation {
override def getBackgroundColor(selected: Boolean, hovered: Boolean, light: Boolean): Color = {
import com.intellij.ui.ColorUtil._
val c = getColorByKey(colorKey)
val color = if (selected) brighter(c, 1) else if (hovered) darker(c, 1) else c
val fontColor = getAttributesByKey(colorKey).getForegroundColor
val backgroundLum = calculateColorLuminance(color)
val fontLum = calculateColorLuminance(fontColor)
val contrastRatio = (Math.max(backgroundLum, fontLum) + 0.05) / (Math.min(fontLum, backgroundLum) + 0.05)
if (contrastRatio < THRESHOLD) if (backgroundLum < fontLum) darker(color, 1) else brighter(color, 1) else color
}
}
def getColorKeyFor(el: PsiElement): TextAttributesKey = el match {
case _: ScTemplateDefinition => getClassColorKey
case _: ScFunction => getFunctionColorKey
case _: ScFunctionExpr => getFunctionColorKey
case _: ScalaPsiElement => getOtherColorKey
case _ => getFunctionColorKey // why not
}
private def getClassColorKey = JAVA_CURRENT
private def getFunctionColorKey = JAVA_DEFAULT
private def getOtherColorKey = JAVA_HOVERED
@inline private def getAttributesByKey(attributesKey: TextAttributesKey) =
EditorColorsManager.getInstance.getGlobalScheme.getAttributes(attributesKey)
@inline private def getColorByKey(attributesKey: TextAttributesKey) =
getAttributesByKey(attributesKey).getBackgroundColor
//Good idea from Android plugin
private val THRESHOLD = 1.9
def calculateContrastRatio(color1: Color, color2: Color): Double = {
val color1Luminance = calculateColorLuminance(color1)
val color2Luminance = calculateColorLuminance(color2)
(Math.max(color1Luminance, color2Luminance) + 0.05) / (Math.min(color2Luminance, color1Luminance) + 0.05)
}
private def calculateColorLuminance(color: Color) =
calculateLuminanceContribution(color.getRed / 255.0) * 0.2126 +
calculateLuminanceContribution(color.getGreen / 255.0) * 0.7152 +
calculateLuminanceContribution(color.getBlue / 255.0) * 0.0722
private def calculateLuminanceContribution(colorValue: Double) =
if (colorValue <= 0.03928) colorValue / 12.92 else Math.pow((colorValue + 0.055) / 1.055, 2.4)
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/breadcrumbs/ScalaBreadcrumbsPresentationProvider.scala | Scala | apache-2.0 | 4,232 |
package com.github.play2war.plugin.it
import java.net.URL
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.matchers._
import org.scalatest._
import org.codehaus.cargo.container.InstalledLocalContainer
import org.codehaus.cargo.container.installer.ZipURLInstaller
import org.codehaus.cargo.generic.configuration.DefaultConfigurationFactory
import org.codehaus.cargo.container.ContainerType
import org.codehaus.cargo.container.configuration.ConfigurationType
import org.codehaus.cargo.generic.DefaultContainerFactory
import org.codehaus.cargo.container.configuration.LocalConfiguration
import com.gargoylesoftware.htmlunit._
import com.gargoylesoftware.htmlunit.html._
import com.gargoylesoftware.htmlunit.util._
import org.codehaus.cargo.container.deployable.WAR
import org.codehaus.cargo.container.property._
import org.codehaus.cargo.util.log._
import scala.collection.immutable._
import scala.collection.JavaConverters._
import org.apache.commons.io.FileUtils
import java.io.File
trait WarContext {
def context = "/p2wsample"
}
trait ServletContainer {
protected val WAR_KEY = "war.servlet"
def keyServletContainer: String
def keyWarPath: String = WAR_KEY + keyServletContainer
}
trait Servlet31Container extends ServletContainer {
def keyServletContainer = "31"
}
trait Servlet30Container extends ServletContainer {
def keyServletContainer = "30"
}
trait Servlet25Container extends ServletContainer {
def keyServletContainer = "25"
}
trait CargoContainerManager extends WarContext {
var container: InstalledLocalContainer = _
def getContainer = container
def setContainer(container: InstalledLocalContainer) = this.container = container
def containerUrl: String
def containerFileNameInCloudbeesCache: Option[String] = None
def containerName: String
def getJavaVersion: String
def startContainer(warPath: String, stopOnExit: Boolean) {
println("WAR file to deploy: " + warPath)
val containerUrlToDownload: String = containerFileNameInCloudbeesCache.flatMap { c =>
val path = "/private/play-war/cargo-containers/" + c
if (new File(path).exists) {
println("Local container found: " + path)
Option("file://" + path)
} else {
println("Local container not found: " + path)
None
}
}.getOrElse(containerUrl)
println("Download container " + containerName + " from " + containerUrlToDownload + " ...")
val installer = new ZipURLInstaller(new URL(containerUrlToDownload))
println("Download container done")
Option(System.getenv("http_proxy")).foreach { systemProxy =>
println(s"Using system proxy '$systemProxy'")
val uri = new java.net.URI(systemProxy)
val proxy = new org.codehaus.cargo.container.installer.Proxy()
proxy.setHost(uri.getHost)
proxy.setPort(uri.getPort)
installer.setProxy(proxy)
}
println("Install container ...")
installer.install()
println("Install container done")
val configuration: LocalConfiguration = new DefaultConfigurationFactory().createConfiguration(
containerName, ContainerType.INSTALLED, ConfigurationType.STANDALONE).asInstanceOf[LocalConfiguration]
configuration.setProperty(GeneralPropertySet.LOGGING, LoggingLevel.MEDIUM.getLevel)
getJavaVersion match {
case "java8" => {
// Try to set java_home from config property
Option(System.getProperty("java8.home")).foreach { home =>
configuration.setProperty(GeneralPropertySet.JAVA_HOME, home)
}
// Use current JVM otherwise
}
case _ => throw new RuntimeException("Play 2.4 only supports java8")
}
val container =
new DefaultContainerFactory().createContainer(
containerName, ContainerType.INSTALLED, configuration).asInstanceOf[InstalledLocalContainer]
println("Configure container")
container.setHome(installer.getHome)
container.setLogger(new SimpleLogger)
val war = new WAR(warPath.toString)
war.setContext(context)
configuration.addDeployable(war)
println("Start the container " + containerName)
setContainer(container)
container.start()
if (stopOnExit) {
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
stopContainer()
}
})
}
}
def stopContainer() {
val maybeContainer = Option(getContainer)
maybeContainer map { container =>
println("Stop the container " + container.getHome)
container.stop()
} getOrElse {
println("Container already stopped")
}
}
}
trait CargoContainerManagerFixture extends BeforeAndAfterAll with CargoContainerManager {
self: Suite =>
def keyWarPath: String
abstract override def beforeAll(configMap: Map[String, Any]) {
val warPath = configMap.getOrElse(keyWarPath, throw new Exception("no war path defined")).asInstanceOf[String]
startContainer(warPath, stopOnExit = false)
}
abstract override def afterAll() {
stopContainer()
}
}
trait JavaVersion {
def getJavaVersion: String
}
trait Java8 extends JavaVersion {
override def getJavaVersion = "java8"
}
| swatikiran123/play2-war-plugin | project-code/integration-tests/src/test/scala/com/github/play2war/plugin/it/ServerHelpers.scala | Scala | apache-2.0 | 5,186 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.servicemanager.ambari.services
import es.tid.cosmos.servicemanager.services.MapReduce2
object AmbariMapReduce2 extends AmbariService with FileConfiguration {
override val service = MapReduce2
override val components: Seq[ComponentDescription] = Seq(
ComponentDescription.masterComponent("HISTORYSERVER"),
ComponentDescription.masterComponent("MAPREDUCE2_CLIENT").makeClient
)
}
| telefonicaid/fiware-cosmos-platform | ambari-service-manager/src/main/scala/es/tid/cosmos/servicemanager/ambari/services/AmbariMapReduce2.scala | Scala | apache-2.0 | 1,056 |
package org.randi3.model
final object Role extends Enumeration {
val principleInvestigator = Value("Principle Investigator")
val trialAdministrator = Value("Trial Administrator")
val investigator = Value("Investigator")
val monitor = Value("Monitor")
val statistician = Value("Statistician")
} | dschrimpf/randi3-core | src/main/scala/org/randi3/model/Role.scala | Scala | gpl-3.0 | 304 |
package djinni
import djinni.ast._
import djinni.generatorTools._
import djinni.meta._
class ObjcMarshal(spec: Spec) extends Marshal(spec) {
override def typename(tm: MExpr): String = {
val (name, _) = toObjcType(tm)
name
}
def typename(name: String, ty: TypeDef): String = idObjc.ty(name)
override def fqTypename(tm: MExpr): String = typename(tm)
def fqTypename(name: String, ty: TypeDef): String = typename(name, ty)
def nullability(tm: MExpr): Option[String] = {
val nonnull = Some("nonnull")
val nullable = Some("nullable")
tm.base match {
case MOptional => nullable
case MPrimitive(_,_,_,_,_,_,_,_,_,_) => None
case d: MDef => d.defType match {
case DEnum => None
case DInterface => nullable
case DRecord => nonnull
}
case e: MExtern => e.defType match {
case DEnum => None
case DInterface => nullable
case DRecord => if(e.objc.pointer) nonnull else None
}
case _ => nonnull
}
}
override def paramType(tm: MExpr): String = {
nullability(tm).fold("")(_ + " ") + toObjcParamType(tm)
}
override def fqParamType(tm: MExpr): String = paramType(tm)
override def returnType(ret: Option[TypeRef]): String = ret.fold("void")((t: TypeRef) => nullability(t.resolved).fold("")(_ + " ") + toObjcParamType(t.resolved))
override def fqReturnType(ret: Option[TypeRef]): String = returnType(ret)
override def fieldType(tm: MExpr): String = toObjcParamType(tm)
override def toCpp(tm: MExpr, expr: String): String = throw new AssertionError("direct objc to cpp conversion not possible")
override def fqFieldType(tm: MExpr): String = toObjcParamType(tm)
override def fromCpp(tm: MExpr, expr: String): String = throw new AssertionError("direct cpp to objc conversion not possible")
def references(m: Meta, exclude: String = ""): Seq[SymbolReference] = m match {
case o: MOpaque =>
List(ImportRef("<Foundation/Foundation.h>"))
case d: MDef => d.defType match {
case DEnum =>
List(ImportRef(include(d.name)))
case DInterface =>
val ext = d.body.asInstanceOf[Interface].ext
if (ext.cpp && !ext.objc) {
List(ImportRef("<Foundation/Foundation.h>"), DeclRef(s"@class ${typename(d.name, d.body)};", None))
}
else {
List(ImportRef("<Foundation/Foundation.h>"), DeclRef(s"@protocol ${typename(d.name, d.body)};", None))
}
case DRecord =>
val r = d.body.asInstanceOf[Record]
val prefix = if (r.ext.objc) "../" else ""
List(ImportRef(q(spec.objcIncludePrefix + prefix + headerName(d.name))))
}
case e: MExtern => List(ImportRef(e.objc.header))
case p: MParam => List()
}
def headerName(ident: String) = idObjc.ty(ident) + "." + spec.objcHeaderExt
def include(ident: String) = q(spec.objcIncludePrefix + headerName(ident))
def isPointer(td: TypeDecl) = td.body match {
case i: Interface => true
case r: Record => true
case e: Enum => false
}
def boxedTypename(td: TypeDecl) = td.body match {
case i: Interface => typename(td.ident, i)
case r: Record => typename(td.ident, r)
case e: Enum => "NSNumber"
}
// Return value: (Type_Name, Is_Class_Or_Not)
def toObjcType(ty: TypeRef): (String, Boolean) = toObjcType(ty.resolved, false)
def toObjcType(ty: TypeRef, needRef: Boolean): (String, Boolean) = toObjcType(ty.resolved, needRef)
def toObjcType(tm: MExpr): (String, Boolean) = toObjcType(tm, false)
def toObjcType(tm: MExpr, needRef: Boolean): (String, Boolean) = {
def f(tm: MExpr, needRef: Boolean): (String, Boolean) = {
tm.base match {
case MOptional =>
// We use "nil" for the empty optional.
assert(tm.args.size == 1)
val arg = tm.args.head
arg.base match {
case MOptional => throw new AssertionError("nested optional?")
case m => f(arg, true)
}
case o =>
val base = o match {
case p: MPrimitive => if (needRef) (p.objcBoxed, true) else (p.objcName, false)
case MString => ("NSString", true)
case MDate => ("NSDate", true)
case MBinary => ("NSData", true)
case MOptional => throw new AssertionError("optional should have been special cased")
case MList => ("NSArray", true)
case MSet => ("NSSet", true)
case MMap => ("NSDictionary", true)
case d: MDef => d.defType match {
case DEnum => if (needRef) ("NSNumber", true) else (idObjc.ty(d.name), false)
case DRecord => (idObjc.ty(d.name), true)
case DInterface =>
val ext = d.body.asInstanceOf[Interface].ext
if (ext.cpp && !ext.objc)
(idObjc.ty(d.name), true)
else
(s"id<${idObjc.ty(d.name)}>", false)
}
case e: MExtern => e.body match {
case i: Interface => if(i.ext.objc) (s"id<${e.objc.typename}>", false) else (e.objc.typename, true)
case _ => if(needRef) (e.objc.boxed, true) else (e.objc.typename, e.objc.pointer)
}
case p: MParam => throw new AssertionError("Parameter should not happen at Obj-C top level")
}
base
}
}
f(tm, needRef)
}
def toObjcParamType(tm: MExpr): String = {
val (name, needRef) = toObjcType(tm)
name + (if(needRef) " *" else "")
}
}
| eastonhou/djinni_with_cx | src/source/ObjcMarshal.scala | Scala | apache-2.0 | 5,508 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import com.intellij.psi.{PsiField, ResolveState}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScVariable}
import org.jetbrains.plugins.scala.lang.psi.types.Compatibility.Expression
import org.jetbrains.plugins.scala.lang.psi.types.api.Unit
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.lang.resolve.processor.MethodResolveProcessor
import org.jetbrains.plugins.scala.lang.resolve.{ScalaResolveResult, StdKinds}
import org.jetbrains.plugins.scala.macroAnnotations.{Cached, ModCount}
/**
* @author Alexander Podkhalyuzin
*/
class ScAssignStmtImpl(node: ASTNode) extends ScExpressionImplBase(node) with ScAssignStmt {
protected override def innerType: TypeResult = {
getLExpression match {
case call: ScMethodCall => call.`type`()
case _ =>
resolveAssignment match {
case Some(_) =>
mirrorMethodCall match {
case Some(call) => call.`type`()
case None => Right(Unit)
}
case _ => Right(Unit)
}
}
}
@Cached(ModCount.getBlockModificationCount, this)
def resolveAssignment: Option[ScalaResolveResult] = resolveAssignmentInner(shapeResolve = false)
@Cached(ModCount.getBlockModificationCount, this)
def shapeResolveAssignment: Option[ScalaResolveResult] = resolveAssignmentInner(shapeResolve = true)
@Cached(ModCount.getBlockModificationCount, this)
def mirrorMethodCall: Option[ScMethodCall] = {
getLExpression match {
case ref: ScReferenceExpression =>
val text = s"${ref.refName}_=(${getRExpression.map(_.getText).getOrElse("")})"
val mirrorExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(text, getContext, this)
mirrorExpr match {
case call: ScMethodCall =>
call.getInvokedExpr.asInstanceOf[ScReferenceExpression].setupResolveFunctions(
() => resolveAssignment.toArray, () => shapeResolveAssignment.toArray
)
Some(call)
case _ => None
}
case methodCall: ScMethodCall =>
val invokedExpr = methodCall.getInvokedExpr
val text = s"${invokedExpr.getText}.update(${methodCall.args.exprs.map(_.getText).mkString(",")}," +
s" ${getRExpression.map(_.getText).getOrElse("")}"
val mirrorExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(text, getContext, this)
//todo: improve performance: do not re-evaluate resolve to "update" method
mirrorExpr match {
case call: ScMethodCall => Some(call)
case _ => None
}
case _ => None
}
}
private def resolveAssignmentInner(shapeResolve: Boolean): Option[ScalaResolveResult] = {
getLExpression match {
case ref: ScReferenceExpression =>
ref.bind() match {
case Some(r) =>
ScalaPsiUtil.nameContext(r.element) match {
case _: ScVariable => None
case c: ScClassParameter if c.isVar => None
case _: PsiField => None
case fun: ScFunction if ScalaPsiUtil.isViableForAssignmentFunction(fun) =>
val processor = new MethodResolveProcessor(ref, ScalaNamesUtil.clean(fun.name) + "_=",
getRExpression.map(expr => List(Seq(new Expression(expr)))).getOrElse(Nil), Nil, ref.getPrevTypeInfoParams,
isShapeResolve = shapeResolve, kinds = StdKinds.methodsOnly)
r.fromType match {
case Some(tp) => processor.processType(tp, ref)
case None =>
fun.getContext match {
case d: ScDeclarationSequenceHolder =>
d.processDeclarations(processor, ResolveState.initial(), fun, ref)
case _ =>
}
}
val candidates = processor.candidatesS
if (candidates.size == 1) Some(candidates.toArray.apply(0))
else None
case _ => None
}
case _ => None
}
case _ => None
}
}
override def toString: String = "AssignStatement"
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScAssignStmtImpl.scala | Scala | apache-2.0 | 4,518 |
package views.html
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import java.lang._
import java.util._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import play.api.i18n._
import play.core.j.PlayMagicForJava._
import play.mvc._
import play.data._
import play.api.data.Field
import play.mvc.Http.Context.Implicit._
import views.html._
/**/
object Index extends BaseScalaTemplate[play.api.templates.HtmlFormat.Appendable,Format[play.api.templates.HtmlFormat.Appendable]](play.api.templates.HtmlFormat) with play.api.templates.Template1[String,play.api.templates.HtmlFormat.Appendable] {
/**/
def apply/*1.2*/(message: String):play.api.templates.HtmlFormat.Appendable = {
_display_ {
Seq[Any](format.raw/*1.19*/("""
"""),_display_(Seq[Any](/*3.2*/Main("Home")/*3.14*/ {_display_(Seq[Any](format.raw/*3.16*/("""
<div class="container">
<p>"""),_display_(Seq[Any](/*5.10*/message)),format.raw/*5.17*/("""</p>
</div>
""")))})),format.raw/*7.2*/("""
"""))}
}
def render(message:String): play.api.templates.HtmlFormat.Appendable = apply(message)
def f:((String) => play.api.templates.HtmlFormat.Appendable) = (message) => apply(message)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Mon Oct 14 10:46:57 HST 2013
SOURCE: /home/matt/workspace/digits/app/views/Index.scala.html
HASH: 7f4ddebc1e6947453e5715666f0ed17bc78cf9bb
MATRIX: 774->1|885->18|922->21|942->33|981->35|1053->72|1081->79|1128->96
LINES: 26->1|29->1|31->3|31->3|31->3|33->5|33->5|35->7
-- GENERATED --
*/
| MattCCieslak/digits | target/scala-2.10/src_managed/main/views/html/Index.template.scala | Scala | mit | 1,875 |
package org.gDanix.podapp_ng_server.controllers.v0.common
import org.gDanix.podapp_ng_server.dao.AccesoDAO
trait Accesses {
val accesses: AccesoDAO
} | gDanix/PODApp-ng-server | app/org/gDanix/podapp_ng_server/controllers/v0/common/Accesses.scala | Scala | apache-2.0 | 153 |
package cromwell.engine.backend.runtimeattributes
/**
* Decides is a call/job continues upon a specific return code.
*/
sealed trait ContinueOnReturnCode {
/**
* Returns true if the call is a success based on the return code.
*
* @param returnCode Return code from the process / script.
* @return True if the call is a success.
*/
final def continueFor(returnCode: Int): Boolean = {
this match {
case ContinueOnReturnCodeFlag(continue) => continue || returnCode == 0
case ContinueOnReturnCodeSet(returnCodes) => returnCodes.contains(returnCode)
}
}
}
/**
* Continues based on a generic true / false flag, that when false, only zero return codes continue.
* @param continue If true, all return codes are valid for continuing.
*/
case class ContinueOnReturnCodeFlag(continue: Boolean) extends ContinueOnReturnCode
/**
* Continues only if the call/job return code is found in returnCodes.
* @param returnCodes Inclusive set of return codes that specify a job success.
*/
case class ContinueOnReturnCodeSet(returnCodes: Set[Int]) extends ContinueOnReturnCode | dgtester/cromwell | src/main/scala/cromwell/engine/backend/runtimeattributes/ContinueOnReturnCode.scala | Scala | bsd-3-clause | 1,119 |
import scala.concurrent.duration._
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import io.gatling.jdbc.Predef._
class TDCSimulation2 extends Simulation {
val httpProtocol = http
.baseURL("http://localhost:8765")
.inferHtmlResources()
.acceptHeader("application/json, text/plain, */*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("en-US,en;q=0.5")
.userAgentHeader("Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:50.0) Gecko/20100101 Firefox/50.0")
.disableFollowRedirect
val scn = scenario("Todas as funcionalidades")
.exec(http("Foo Bar Request")
.get("/foo/call")
.check(status.is(200)))
.exec(http("Foo Request")
.get("/foo/info")
.check(status.is(200)))
.exec(http("Bar Request")
.get("/bar/info")
.check(status.is(200)))
setUp(
scn.inject(
atOnceUsers(10),
constantUsersPerSec(40) during(30 seconds),
constantUsersPerSec(70) during(30 seconds),
constantUsersPerSec(100) during(30 seconds),
constantUsersPerSec(130) during(30 seconds)
).protocols(httpProtocol)
)
} | rsdomingues/netflixmicroservices-tdc2017 | gatling/TDCSimulation2.scala | Scala | gpl-3.0 | 1,120 |
package pl.touk.nussknacker.ui.initialization
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.tags.Slow
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FlatSpec, Matchers}
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.test.PatientScalaFutures
import pl.touk.nussknacker.ui.api.helpers.TestFactory.mapProcessingTypeDataProvider
import pl.touk.nussknacker.ui.api.helpers._
import pl.touk.nussknacker.ui.process.migrate.TestMigrations
import pl.touk.nussknacker.ui.process.repository.ProcessRepository.CreateProcessAction
import scala.concurrent.ExecutionContextExecutor
@Slow
class InitializationOnPostgresItSpec
extends FlatSpec
with ScalatestRouteTest
with Matchers
with PatientScalaFutures
with BeforeAndAfterEach
with BeforeAndAfterAll
with WithPostgresDbTesting {
import Initialization.nussknackerUser
private implicit val ds: ExecutionContextExecutor = system.dispatcher
private val processId = "proc1"
private val migrations = mapProcessingTypeDataProvider(TestProcessingTypes.Streaming -> new TestMigrations(1, 2))
private lazy val repository = TestFactory.newFetchingProcessRepository(db, Some(1))
private lazy val repositoryManager = TestFactory.newDBRepositoryManager(db)
private lazy val writeRepository = TestFactory.newWriteProcessRepository(db)
private def sampleDeploymentData(processId: String) =
ProcessTestData.validProcessWithId(processId).toCanonicalProcess
it should "migrate processes" in {
saveSampleProcess()
Initialization.init(migrations, db, "env1")
repository.fetchProcessesDetails[Unit]().futureValue.map(d => (d.name, d.modelVersion)) shouldBe List(("proc1", Some(2)))
}
it should "migrate processes when fragments present" in {
saveSampleProcess("sub1", subprocess = true)
saveSampleProcess("id1")
Initialization.init(migrations, db, "env1")
repository.fetchProcessesDetails[Unit]().futureValue.map(d => (d.name, d.modelVersion)) shouldBe List(("id1", Some(2)))
}
private def saveSampleProcess(processName: String = processId, subprocess: Boolean = false): Unit = {
val action = CreateProcessAction(ProcessName(processName), "RTM", sampleDeploymentData(processId), TestProcessingTypes.Streaming, subprocess)
repositoryManager
.runInTransaction(writeRepository.saveNewProcess(action))
.futureValue
}
it should "run initialization transactionally" in {
saveSampleProcess()
val exception = intercept[RuntimeException](
Initialization.init(mapProcessingTypeDataProvider(TestProcessingTypes.Streaming -> new TestMigrations(1, 2, 5)), db, "env1"))
exception.getMessage shouldBe "made to fail.."
repository.fetchProcessesDetails[Unit]().futureValue.map(d => (d.name, d.modelVersion)) shouldBe List(("proc1", Some(1)))
}
}
| TouK/nussknacker | ui/server/src/test/scala/pl/touk/nussknacker/ui/initialization/InitializationOnPostgresItSpec.scala | Scala | apache-2.0 | 2,879 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar.compression
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.columnar.{ColumnBuilder, NativeColumnBuilder}
import org.apache.spark.sql.types.AtomicType
/**
* A stackable trait that builds optionally compressed byte buffer for a column. Memory layout of
* the final byte buffer is:
* {{{
* .----------------------- Null count N (4 bytes)
* | .------------------- Null positions (4 x N bytes, empty if null count is zero)
* | | .------------- Compression scheme ID (4 bytes)
* | | | .--------- Compressed non-null elements
* V V V V
* +---+-----+---+---------+
* | | ... | | ... ... |
* +---+-----+---+---------+
* \\-------/ \\-----------/
* header body
* }}}
*/
private[columnar] trait CompressibleColumnBuilder[T <: AtomicType]
extends ColumnBuilder with Logging {
this: NativeColumnBuilder[T] with WithCompressionSchemes =>
var compressionEncoders: Seq[Encoder[T]] = _
abstract override def initialize(
initialSize: Int,
columnName: String,
useCompression: Boolean): Unit = {
compressionEncoders =
if (useCompression) {
schemes.filter(_.supports(columnType)).map(_.encoder[T](columnType))
} else {
Seq(PassThrough.encoder(columnType))
}
super.initialize(initialSize, columnName, useCompression)
}
protected def isWorthCompressing(encoder: Encoder[T]) = {
encoder.compressionRatio < 0.8
}
private def gatherCompressibilityStats(row: InternalRow, ordinal: Int): Unit = {
var i = 0
while (i < compressionEncoders.length) {
compressionEncoders(i).gatherCompressibilityStats(row, ordinal)
i += 1
}
}
abstract override def appendFrom(row: InternalRow, ordinal: Int): Unit = {
super.appendFrom(row, ordinal)
if (!row.isNullAt(ordinal)) {
gatherCompressibilityStats(row, ordinal)
}
}
override def build(): ByteBuffer = {
val nonNullBuffer = buildNonNulls()
val encoder: Encoder[T] = {
val candidate = compressionEncoders.minBy(_.compressionRatio)
if (isWorthCompressing(candidate)) candidate else PassThrough.encoder(columnType)
}
// Header = null count + null positions
val headerSize = 4 + nulls.limit()
val compressedSize = if (encoder.compressedSize == 0) {
nonNullBuffer.remaining()
} else {
encoder.compressedSize
}
val compressedBuffer = ByteBuffer
// Reserves 4 bytes for compression scheme ID
.allocate(headerSize + 4 + compressedSize)
.order(ByteOrder.nativeOrder)
// Write the header
.putInt(nullCount)
.put(nulls)
logDebug(s"Compressor for [$columnName]: $encoder, ratio: ${encoder.compressionRatio}")
encoder.compress(nonNullBuffer, compressedBuffer)
}
}
| chenc10/Spark-PAF | sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala | Scala | apache-2.0 | 3,750 |
/*
* Copyright 2013 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import models.Admin
import play.api.data._
import play.api.data.Forms._
object AdminForm {
val LOGIN_ID = "loginId"
val LOGIN_ID_MIN = 1
val LOGIN_ID_MAX = 32
val NICKNAME = "nickname"
val NICKNAME_MIN = 1
val NICKNAME_MAX = 256
val PASSWORD = "password"
val PASSWORD_MIN = 1
val PASSWORD_MAX = 32
val CONFIRM = "confirm"
val CONFIRM_MIN = 1
val CONFIRM_MAX = 32
val adminForm: Form[Admin] = Form(mapping(
LOGIN_ID -> nonEmptyText(LOGIN_ID_MIN, LOGIN_ID_MAX),
NICKNAME -> nonEmptyText(NICKNAME_MIN, NICKNAME_MAX))(Admin.apply)(Admin.unapply))
val passwdForm: Form[(String, String)] = Form(tuple(
PASSWORD -> nonEmptyText(PASSWORD_MIN, PASSWORD_MAX),
CONFIRM -> nonEmptyText(CONFIRM_MIN, CONFIRM_MAX)))
}
| agwlvssainokuni/lifelog | lifelog-admin/app/controllers/AdminForm.scala | Scala | apache-2.0 | 1,376 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*************************************************************************************
*/
package com.normation.utils
import scala.xml.{Node,NodeSeq}
import net.liftweb.common._
import scala.xml.XML
import scala.xml.Elem
import org.xml.sax.SAXParseException
import java.io.InputStream
import com.normation.exceptions.TechnicalException
object XmlUtils {
/**
* Retrieve exactly one element of the given name in
* the given node children.
* If subtree is set to true (default to false), look
* for all tree elements, not only direct children.
*/
def getUniqueNode(root:Node, nodeName:String, subtree:Boolean = false) : Box[Node] = {
def checkCardinality(nodes:NodeSeq) : Box[Node] = {
if(nodes.size < 1) Failure("No node found for name %s in %s children with scope %s".format(nodeName, root, if(subtree) "subtree" else "one level"))
else if(nodes.size > 1 ) Failure("More than one node found for name %s in %s children with scope %s".format(nodeName, root, if(subtree) "subtree" else "one level"))
else Full(nodes.head)
}
if(subtree) {
checkCardinality((root.child:NodeSeq) \\\\ nodeName)
} else {
checkCardinality(root \\ nodeName)
}
}
//text version of XmlUtils.getUniqueNode with a default value
//The default value is also applied if node exists but its text is empty
def getUniqueNodeText(root: Node, nodeName: String, default: String) =
getUniqueNode(root, nodeName).map(_.text.trim) match {
case x: EmptyBox => default
case Full(x) => x match {
case "" | null => default
case text => text
}
}
def getAttributeText(node: Node, name: String, default: String) = {
val seq = node \\ ("@" + name)
if (seq.isEmpty) default else seq.head.text
}
/**
* Parse the file denoted by input stream (filePath is only
* for explicit error messages)
*/
def parseXml(is: InputStream, filePath : Option[String] = None) : Box[Elem] = {
val name = filePath.getOrElse("[unknown]")
for {
doc <- try {
Full(XML.load(is))
} catch {
case e: SAXParseException =>0
Failure("Unexpected issue with the XML file %s: %s".format(name, e.getMessage), Full(e), Empty)
case e: java.net.MalformedURLException =>
Failure("XML file not found: " + name, Full(e), Empty)
}
nonEmpty <- if (doc.isEmpty) {
Failure("Error when parsing XML file: '%s': the parsed document is empty".format(name))
} else {
Full("ok")
}
} yield {
doc
}
}
/**
* Trim spaces from an XML elem
*/def trim(elt:Elem) = scala.xml.Utility.trim(elt) match {
case e:Elem => e
case x => throw new TechnicalException("Bad returned type for xml.trim. Awaiting an Elem, got: " + x)
}
}
| fanf/rudder-commons | utils/src/main/scala/com/normation/utils/XmlUtils.scala | Scala | apache-2.0 | 3,669 |
/*
* Copyright (C) 2017 Michael Dippery <michael@monkey-robot.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mipadi.jupiter.io
/** Provides mechanisms for working with the terminal. */
package object terminal
| mdippery/jupiter | src/main/scala/com/mipadi/jupiter/io/terminal/package.scala | Scala | apache-2.0 | 745 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.random
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* A class with pseudorandom behavior.
*/
@DeveloperApi
trait Pseudorandom {
/** Set random seed. */
def setSeed(seed: Long)
}
| yelshater/hadoop-2.3.0 | spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/util/random/Pseudorandom.scala | Scala | apache-2.0 | 1,047 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import matchers.BePropertyMatcher
import matchers.BePropertyMatchResult
import Matchers._
class ShouldBePropertyMatcherSpec extends FunSpec with ReturnsNormallyThrowsAssertion with BookPropertyMatchers {
// Checking for a specific size
describe("The be (BePropertyMatcher) syntax") {
case class MyFile(
val name: String,
val file: Boolean,
val isDirectory: Boolean
)
class FileBePropertyMatcher extends BePropertyMatcher[MyFile] {
def apply(file: MyFile) = {
new BePropertyMatchResult(file.file, "file")
}
}
class DirectoryBePropertyMatcher extends BePropertyMatcher[MyFile] {
def apply(file: MyFile) = {
new BePropertyMatchResult(file.isDirectory, "directory")
}
}
def file = new FileBePropertyMatcher
def directory = new DirectoryBePropertyMatcher
val myFile = new MyFile("temp.txt", true, false)
val book = new Book("A Tale of Two Cities", "Dickens", 1859, 45, true)
val badBook = new Book("A Tale of Two Cities", "Dickens", 1859, 45, false)
it("should do nothing if the property is true") {
book should be (goodRead)
book should be a (goodRead)
book should be an (goodRead)
// book shouldBe goodRead
// book shouldBe a (goodRead)
// book shouldBe an (goodRead)
}
it("should throw TestFailedException if the property is false") {
val caught1 = intercept[TestFailedException] {
badBook should be (goodRead)
}
assert(caught1.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,false) was not goodRead")
val caught2 = intercept[TestFailedException] {
badBook should be a (goodRead)
}
assert(caught2.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,false) was not a goodRead")
val caught3 = intercept[TestFailedException] {
badBook should be an (goodRead)
}
assert(caught3.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,false) was not an goodRead")
/*
val caught4 = intercept[TestFailedException] {
badBook shouldBe goodRead
}
assert(caught4.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,false) was not goodRead")
val caught5 = intercept[TestFailedException] {
badBook shouldBe a (goodRead)
}
assert(caught5.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,false) was not a goodRead")
val caught6 = intercept[TestFailedException] {
badBook shouldBe an (goodRead)
}
assert(caught6.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,false) was not an goodRead")
*/
}
it("should do nothing if the property is false, when used with not") {
badBook should not be (goodRead)
badBook should not be a (goodRead)
badBook should not be an (goodRead)
}
it("should throw TestFailedException if the property is true, when used with not") {
val caught1 = intercept[TestFailedException] {
book should not be (goodRead)
}
assert(caught1.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was goodRead")
val caught2 = intercept[TestFailedException] {
book should not be a (goodRead)
}
assert(caught2.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was a goodRead")
val caught3 = intercept[TestFailedException] {
book should not be an (goodRead)
}
assert(caught3.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was an goodRead")
val caught4 = intercept[TestFailedException] {
book should not (be (goodRead))
}
assert(caught4.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was goodRead")
val caught5 = intercept[TestFailedException] {
book should not (be a (goodRead))
}
assert(caught5.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was a goodRead")
val caught6 = intercept[TestFailedException] {
book should not (be an (goodRead))
}
assert(caught6.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was an goodRead")
val caught7 = intercept[TestFailedException] {
book should (not (be (goodRead)))
}
assert(caught7.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was goodRead")
val caught8 = intercept[TestFailedException] {
book should (not (be a (goodRead)))
}
assert(caught8.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was a goodRead")
val caught9 = intercept[TestFailedException] {
book should (not (be an (goodRead)))
}
assert(caught9.getMessage === "Book(A Tale of Two Cities,Dickens,1859,45,true) was an goodRead")
}
it("should do nothing if the the property returns true, when used in a logical-and expression") {
myFile should ((be (file)) and (be (file)))
myFile should (be (file) and (be (file)))
myFile should (be (file) and be (file))
myFile should ((be a (file)) and (be a (file)))
myFile should (be a (file) and (be a (file)))
myFile should (be a (file) and be a (file))
myFile should ((be an (file)) and (be an (file)))
myFile should (be an (file) and (be an (file)))
myFile should (be an (file) and be an (file))
}
it("should throw TestFailedException if at least one of the properties returns false, when used in a logical-and expression") {
// first false
val caught1 = intercept[TestFailedException] {
myFile should ((be (directory)) and (be (file)))
}
assert(caught1.getMessage === "MyFile(temp.txt,true,false) was not directory")
val caught2 = intercept[TestFailedException] {
myFile should (be (directory) and (be (file)))
}
assert(caught2.getMessage === "MyFile(temp.txt,true,false) was not directory")
val caught3 = intercept[TestFailedException] {
myFile should (be (directory) and be (file))
}
assert(caught3.getMessage === "MyFile(temp.txt,true,false) was not directory")
val caught4 = intercept[TestFailedException] {
myFile should ((be a (directory)) and (be a (file)))
}
assert(caught4.getMessage === "MyFile(temp.txt,true,false) was not a directory")
val caught5 = intercept[TestFailedException] {
myFile should (be a (directory) and (be a (file)))
}
assert(caught5.getMessage === "MyFile(temp.txt,true,false) was not a directory")
val caught6 = intercept[TestFailedException] {
myFile should (be a (directory) and be a (file))
}
assert(caught6.getMessage === "MyFile(temp.txt,true,false) was not a directory")
val caught7 = intercept[TestFailedException] {
myFile should ((be an (directory)) and (be an (file)))
}
assert(caught7.getMessage === "MyFile(temp.txt,true,false) was not an directory")
val caught8 = intercept[TestFailedException] {
myFile should (be an (directory) and (be an (file)))
}
assert(caught8.getMessage === "MyFile(temp.txt,true,false) was not an directory")
val caught9 = intercept[TestFailedException] {
myFile should (be an (directory) and be an (file))
}
assert(caught9.getMessage === "MyFile(temp.txt,true,false) was not an directory")
// second false
val caught10 = intercept[TestFailedException] {
myFile should ((be (file)) and (be (directory)))
}
assert(caught10.getMessage === "MyFile(temp.txt,true,false) was file, but MyFile(temp.txt,true,false) was not directory")
val caught11 = intercept[TestFailedException] {
myFile should (be (file) and (be (directory)))
}
assert(caught11.getMessage === "MyFile(temp.txt,true,false) was file, but MyFile(temp.txt,true,false) was not directory")
val caught12 = intercept[TestFailedException] {
myFile should (be (file) and be (directory))
}
assert(caught12.getMessage === "MyFile(temp.txt,true,false) was file, but MyFile(temp.txt,true,false) was not directory")
val caught13 = intercept[TestFailedException] {
myFile should ((be a (file)) and (be a (directory)))
}
assert(caught13.getMessage === "MyFile(temp.txt,true,false) was a file, but MyFile(temp.txt,true,false) was not a directory")
val caught14 = intercept[TestFailedException] {
myFile should (be a (file) and (be a (directory)))
}
assert(caught14.getMessage === "MyFile(temp.txt,true,false) was a file, but MyFile(temp.txt,true,false) was not a directory")
val caught15 = intercept[TestFailedException] {
myFile should (be a (file) and be a (directory))
}
assert(caught15.getMessage === "MyFile(temp.txt,true,false) was a file, but MyFile(temp.txt,true,false) was not a directory")
val caught16 = intercept[TestFailedException] {
myFile should ((be an (file)) and (be an (directory)))
}
assert(caught16.getMessage === "MyFile(temp.txt,true,false) was an file, but MyFile(temp.txt,true,false) was not an directory")
val caught17 = intercept[TestFailedException] {
myFile should (be an (file) and (be an (directory)))
}
assert(caught17.getMessage === "MyFile(temp.txt,true,false) was an file, but MyFile(temp.txt,true,false) was not an directory")
val caught18 = intercept[TestFailedException] {
myFile should (be an (file) and be an (directory))
}
assert(caught18.getMessage === "MyFile(temp.txt,true,false) was an file, but MyFile(temp.txt,true,false) was not an directory")
// both false
val caught19 = intercept[TestFailedException] {
myFile should ((be (directory)) and (be (directory)))
}
assert(caught19.getMessage === "MyFile(temp.txt,true,false) was not directory")
val caught20 = intercept[TestFailedException] {
myFile should (be (directory) and (be (directory)))
}
assert(caught20.getMessage === "MyFile(temp.txt,true,false) was not directory")
val caught21 = intercept[TestFailedException] {
myFile should (be (directory) and be (directory))
}
assert(caught21.getMessage === "MyFile(temp.txt,true,false) was not directory")
val caught22 = intercept[TestFailedException] {
myFile should ((be a (directory)) and (be a (directory)))
}
assert(caught22.getMessage === "MyFile(temp.txt,true,false) was not a directory")
val caught23 = intercept[TestFailedException] {
myFile should (be a (directory) and (be a (directory)))
}
assert(caught23.getMessage === "MyFile(temp.txt,true,false) was not a directory")
val caught24 = intercept[TestFailedException] {
myFile should (be a (directory) and be a (directory))
}
assert(caught24.getMessage === "MyFile(temp.txt,true,false) was not a directory")
val caught25 = intercept[TestFailedException] {
myFile should ((be an (directory)) and (be an (directory)))
}
assert(caught25.getMessage === "MyFile(temp.txt,true,false) was not an directory")
val caught26 = intercept[TestFailedException] {
myFile should (be an (directory) and (be an (directory)))
}
assert(caught26.getMessage === "MyFile(temp.txt,true,false) was not an directory")
val caught27 = intercept[TestFailedException] {
myFile should (be an (directory) and be an (directory))
}
assert(caught27.getMessage === "MyFile(temp.txt,true,false) was not an directory")
}
it("should do nothing if the property returns true, when used in a logical-or expression") {
// second true
myFile should ((be (directory)) or (be (file)))
myFile should (be (directory) or (be (file)))
myFile should (be (directory) or be (file))
myFile should ((be a (directory)) or (be a (file)))
myFile should (be a (directory) or (be a (file)))
myFile should (be a (directory) or be a (file))
myFile should ((be an (directory)) or (be an (file)))
myFile should (be an (directory) or (be an (file)))
myFile should (be an (directory) or be an (file))
// first true
myFile should ((be (file)) or (be (directory)))
myFile should (be (file) or (be (directory)))
myFile should (be (file) or be (directory))
myFile should ((be a (file)) or (be a (directory)))
myFile should (be a (file) or (be a (directory)))
myFile should (be a (file) or be a (directory))
myFile should ((be an (file)) or (be an (directory)))
myFile should (be an (file) or (be an (directory)))
myFile should (be an (file) or be an (directory))
// both true
myFile should ((be (file)) or (be (file)))
myFile should (be (file) or (be (file)))
myFile should (be (file) or be (file))
myFile should ((be a (file)) or (be a (file)))
myFile should (be a (file) or (be a (file)))
myFile should (be a (file) or be a (file))
myFile should ((be an (file)) or (be an (file)))
myFile should (be an (file) or (be an (file)))
myFile should (be an (file) or be an (file))
}
it("should throw TestFailedException if the both properties return false, when used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
myFile should ((be (directory)) or (be (directory)))
}
assert(caught1.getMessage === "MyFile(temp.txt,true,false) was not directory, and MyFile(temp.txt,true,false) was not directory")
val caught2 = intercept[TestFailedException] {
myFile should (be (directory) or (be (directory)))
}
assert(caught2.getMessage === "MyFile(temp.txt,true,false) was not directory, and MyFile(temp.txt,true,false) was not directory")
val caught3 = intercept[TestFailedException] {
myFile should (be (directory) or be (directory))
}
assert(caught3.getMessage === "MyFile(temp.txt,true,false) was not directory, and MyFile(temp.txt,true,false) was not directory")
val caught4 = intercept[TestFailedException] {
myFile should ((be a (directory)) or (be a (directory)))
}
assert(caught4.getMessage === "MyFile(temp.txt,true,false) was not a directory, and MyFile(temp.txt,true,false) was not a directory")
val caught5 = intercept[TestFailedException] {
myFile should (be a (directory) or (be a (directory)))
}
assert(caught5.getMessage === "MyFile(temp.txt,true,false) was not a directory, and MyFile(temp.txt,true,false) was not a directory")
val caught6 = intercept[TestFailedException] {
myFile should (be a (directory) or be a (directory))
}
assert(caught6.getMessage === "MyFile(temp.txt,true,false) was not a directory, and MyFile(temp.txt,true,false) was not a directory")
val caught7 = intercept[TestFailedException] {
myFile should ((be an (directory)) or (be an (directory)))
}
assert(caught7.getMessage === "MyFile(temp.txt,true,false) was not an directory, and MyFile(temp.txt,true,false) was not an directory")
val caught8 = intercept[TestFailedException] {
myFile should (be an (directory) or (be an (directory)))
}
assert(caught8.getMessage === "MyFile(temp.txt,true,false) was not an directory, and MyFile(temp.txt,true,false) was not an directory")
val caught9 = intercept[TestFailedException] {
myFile should (be an (directory) or be an (directory))
}
assert(caught9.getMessage === "MyFile(temp.txt,true,false) was not an directory, and MyFile(temp.txt,true,false) was not an directory")
}
it("should do nothing if the property returns false, when used in a logical-and expression with not") {
myFile should (not (be (directory)) and not (be (directory)))
myFile should ((not be (directory)) and (not be (directory)))
myFile should (not be (directory) and not be (directory))
myFile should (not (be a (directory)) and not (be a (directory)))
myFile should ((not be a (directory)) and (not be a (directory)))
myFile should (not be a (directory) and not be a (directory))
myFile should (not (be an (directory)) and not (be an (directory)))
myFile should ((not be an (directory)) and (not be an (directory)))
myFile should (not be an (directory) and not be an (directory))
}
it("should throw TestFailedException if at least one property returns false, when used in a logical-and expression with not") {
// second false
val caught1 = intercept[TestFailedException] {
myFile should (not (be (directory)) and not (be (file)))
}
assert(caught1.getMessage === "MyFile(temp.txt,true,false) was not directory, but MyFile(temp.txt,true,false) was file")
val caught2 = intercept[TestFailedException] {
myFile should ((not be (directory)) and (not be (file)))
}
assert(caught2.getMessage === "MyFile(temp.txt,true,false) was not directory, but MyFile(temp.txt,true,false) was file")
val caught3 = intercept[TestFailedException] {
myFile should (not be (directory) and not be (file))
}
assert(caught3.getMessage === "MyFile(temp.txt,true,false) was not directory, but MyFile(temp.txt,true,false) was file")
val caught4 = intercept[TestFailedException] {
myFile should (not (be a (directory)) and not (be a (file)))
}
assert(caught4.getMessage === "MyFile(temp.txt,true,false) was not a directory, but MyFile(temp.txt,true,false) was a file")
val caught5 = intercept[TestFailedException] {
myFile should ((not be a (directory)) and (not be a (file)))
}
assert(caught5.getMessage === "MyFile(temp.txt,true,false) was not a directory, but MyFile(temp.txt,true,false) was a file")
val caught6 = intercept[TestFailedException] {
myFile should (not be a (directory) and not be a (file))
}
assert(caught6.getMessage === "MyFile(temp.txt,true,false) was not a directory, but MyFile(temp.txt,true,false) was a file")
val caught7 = intercept[TestFailedException] {
myFile should (not (be an (directory)) and not (be an (file)))
}
assert(caught7.getMessage === "MyFile(temp.txt,true,false) was not an directory, but MyFile(temp.txt,true,false) was an file")
val caught8 = intercept[TestFailedException] {
myFile should ((not be an (directory)) and (not be an (file)))
}
assert(caught8.getMessage === "MyFile(temp.txt,true,false) was not an directory, but MyFile(temp.txt,true,false) was an file")
val caught9 = intercept[TestFailedException] {
myFile should (not be an (directory) and not be an (file))
}
assert(caught9.getMessage === "MyFile(temp.txt,true,false) was not an directory, but MyFile(temp.txt,true,false) was an file")
// first false
val caught10 = intercept[TestFailedException] {
myFile should (not (be (file)) and not (be (directory)))
}
assert(caught10.getMessage === "MyFile(temp.txt,true,false) was file")
val caught11 = intercept[TestFailedException] {
myFile should ((not be (file)) and (not be (directory)))
}
assert(caught11.getMessage === "MyFile(temp.txt,true,false) was file")
val caught12 = intercept[TestFailedException] {
myFile should (not be (file) and not be (directory))
}
assert(caught12.getMessage === "MyFile(temp.txt,true,false) was file")
val caught13 = intercept[TestFailedException] {
myFile should (not (be a (file)) and not (be a (directory)))
}
assert(caught13.getMessage === "MyFile(temp.txt,true,false) was a file")
val caught14 = intercept[TestFailedException] {
myFile should ((not be a (file)) and (not be a (directory)))
}
assert(caught14.getMessage === "MyFile(temp.txt,true,false) was a file")
val caught15 = intercept[TestFailedException] {
myFile should (not be a (file) and not be a (directory))
}
assert(caught15.getMessage === "MyFile(temp.txt,true,false) was a file")
val caught16 = intercept[TestFailedException] {
myFile should (not (be an (file)) and not (be an (directory)))
}
assert(caught16.getMessage === "MyFile(temp.txt,true,false) was an file")
val caught17 = intercept[TestFailedException] {
myFile should ((not be an (file)) and (not be an (directory)))
}
assert(caught17.getMessage === "MyFile(temp.txt,true,false) was an file")
val caught18 = intercept[TestFailedException] {
myFile should (not be an (file) and not be an (directory))
}
assert(caught18.getMessage === "MyFile(temp.txt,true,false) was an file")
// both false
val caught19 = intercept[TestFailedException] {
myFile should (not (be (file)) and not (be (file)))
}
assert(caught19.getMessage === "MyFile(temp.txt,true,false) was file")
val caught20 = intercept[TestFailedException] {
myFile should ((not be (file)) and (not be (file)))
}
assert(caught20.getMessage === "MyFile(temp.txt,true,false) was file")
val caught21 = intercept[TestFailedException] {
myFile should (not be (file) and not be (file))
}
assert(caught21.getMessage === "MyFile(temp.txt,true,false) was file")
val caught22 = intercept[TestFailedException] {
myFile should (not (be a (file)) and not (be a (file)))
}
assert(caught22.getMessage === "MyFile(temp.txt,true,false) was a file")
val caught23 = intercept[TestFailedException] {
myFile should ((not be a (file)) and (not be a (file)))
}
assert(caught23.getMessage === "MyFile(temp.txt,true,false) was a file")
val caught24 = intercept[TestFailedException] {
myFile should (not be a (file) and not be a (file))
}
assert(caught24.getMessage === "MyFile(temp.txt,true,false) was a file")
val caught25 = intercept[TestFailedException] {
myFile should (not (be an (file)) and not (be an (file)))
}
assert(caught25.getMessage === "MyFile(temp.txt,true,false) was an file")
val caught26 = intercept[TestFailedException] {
myFile should ((not be an (file)) and (not be an (file)))
}
assert(caught26.getMessage === "MyFile(temp.txt,true,false) was an file")
val caught27 = intercept[TestFailedException] {
myFile should (not be an (file) and not be an (file))
}
assert(caught27.getMessage === "MyFile(temp.txt,true,false) was an file")
}
it("should do nothing if the property returns false, when used in a logical-or expression with not") {
// first true
myFile should (not (be (directory)) or not (be (file)))
myFile should ((not be (directory)) or (not be (file)))
myFile should (not be (directory) or not be (file))
myFile should (not (be a (directory)) or not (be a (file)))
myFile should ((not be a (directory)) or (not be a (file)))
myFile should (not be a (directory) or not be a (file))
myFile should (not (be an (directory)) or not (be an (file)))
myFile should ((not be an (directory)) or (not be an (file)))
myFile should (not be an (directory) or not be an (file))
// second true
myFile should (not (be (file)) or not (be (directory)))
myFile should ((not be (file)) or (not be (directory)))
myFile should (not be (file) or not be (directory))
myFile should (not (be a (file)) or not (be a (directory)))
myFile should ((not be a (file)) or (not be a (directory)))
myFile should (not be a (file) or not be a (directory))
myFile should (not (be an (file)) or not (be an (directory)))
myFile should ((not be an (file)) or (not be an (directory)))
myFile should (not be an (file) or not be an (directory))
// both true
myFile should (not (be (directory)) or not (be (directory)))
myFile should ((not be (directory)) or (not be (directory)))
myFile should (not be (directory) or not be (directory))
myFile should (not (be a (directory)) or not (be a (directory)))
myFile should ((not be a (directory)) or (not be a (directory)))
myFile should (not be a (directory) or not be a (directory))
myFile should (not (be an (directory)) or not (be an (directory)))
myFile should ((not be an (directory)) or (not be an (directory)))
myFile should (not be an (directory) or not be an (directory))
}
it("should throw TestFailedException if both properties return false, when used in a logical-or expression with not") {
val caught1 = intercept[TestFailedException] {
myFile should (not (be (file)) or not (be (file)))
}
assert(caught1.getMessage === "MyFile(temp.txt,true,false) was file, and MyFile(temp.txt,true,false) was file")
val caught2 = intercept[TestFailedException] {
myFile should ((not be (file)) or (not be (file)))
}
assert(caught2.getMessage === "MyFile(temp.txt,true,false) was file, and MyFile(temp.txt,true,false) was file")
val caught3 = intercept[TestFailedException] {
myFile should (not be (file) or not be (file))
}
assert(caught3.getMessage === "MyFile(temp.txt,true,false) was file, and MyFile(temp.txt,true,false) was file")
val caught4 = intercept[TestFailedException] {
myFile should (not (be a (file)) or not (be a (file)))
}
assert(caught4.getMessage === "MyFile(temp.txt,true,false) was a file, and MyFile(temp.txt,true,false) was a file")
val caught5 = intercept[TestFailedException] {
myFile should ((not be a (file)) or (not be a (file)))
}
assert(caught5.getMessage === "MyFile(temp.txt,true,false) was a file, and MyFile(temp.txt,true,false) was a file")
val caught6 = intercept[TestFailedException] {
myFile should (not be a (file) or not be a (file))
}
assert(caught6.getMessage === "MyFile(temp.txt,true,false) was a file, and MyFile(temp.txt,true,false) was a file")
val caught7 = intercept[TestFailedException] {
myFile should (not (be an (file)) or not (be an (file)))
}
assert(caught7.getMessage === "MyFile(temp.txt,true,false) was an file, and MyFile(temp.txt,true,false) was an file")
val caught8 = intercept[TestFailedException] {
myFile should ((not be an (file)) or (not be an (file)))
}
assert(caught8.getMessage === "MyFile(temp.txt,true,false) was an file, and MyFile(temp.txt,true,false) was an file")
val caught9 = intercept[TestFailedException] {
myFile should (not be an (file) or not be an (file))
}
assert(caught9.getMessage === "MyFile(temp.txt,true,false) was an file, and MyFile(temp.txt,true,false) was an file")
}
}
describe("the compose method on BePropertyMatcher") {
it("should return another BePropertyMatcher") {
val book1 = new Book("A Tale of Two Cities", "Dickens", 1859, 45, true)
val book2 = new Book("The Handmaid's Tail", "Atwood", 1985, 200, true)
val badBook = new Book("Some Bad Book", "Bad Author", 1999, 150, false)
case class Library(books: List[Book])
val goodLibrary = Library(List(book1, book2))
val badLibrary = Library(List(badBook, book1, book2))
val filledWithGoodReads = goodRead compose { (lib: Library) => lib.books.head }
goodLibrary should be (filledWithGoodReads)
badLibrary should not be (filledWithGoodReads)
// goodLibrary shouldBe filledWithGoodReads
}
}
describe("A factory method on BePropertyMatcher's companion object") {
it("should produce a be-property-matcher that executes the passed function when its apply is called") {
val f = { (s: String) => BePropertyMatchResult(s.isEmpty, "empty") }
val empty = BePropertyMatcher(f)
"" should be (empty)
"x" should not be (empty)
"xx" should not be (empty)
"xxx" should not be (empty)
"xxxx" should not be (empty)
// "" shouldBe empty
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/ShouldBePropertyMatcherSpec.scala | Scala | apache-2.0 | 29,007 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.enhancement
trait WarSoul extends FavoredSoulEnhancement
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/enhancement/WarSoul.scala | Scala | apache-2.0 | 755 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.json
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.json.{JacksonParser, JSONOptionsInRead}
import org.apache.spark.sql.connector.read.PartitionReader
import org.apache.spark.sql.execution.datasources.PartitionedFile
import org.apache.spark.sql.execution.datasources.json.JsonDataSource
import org.apache.spark.sql.execution.datasources.v2._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.SerializableConfiguration
/**
* A factory used to create JSON readers.
*
* @param sqlConf SQL configuration.
* @param broadcastedConf Broadcast serializable Hadoop Configuration.
* @param dataSchema Schema of JSON files.
* @param readDataSchema Required schema of JSON files.
* @param partitionSchema Schema of partitions.
* @param parsedOptions Options for parsing JSON files.
* @param filters The filters pushed down to JSON datasource.
*/
case class JsonPartitionReaderFactory(
sqlConf: SQLConf,
broadcastedConf: Broadcast[SerializableConfiguration],
dataSchema: StructType,
readDataSchema: StructType,
partitionSchema: StructType,
parsedOptions: JSONOptionsInRead,
filters: Seq[Filter]) extends FilePartitionReaderFactory {
override def buildReader(partitionedFile: PartitionedFile): PartitionReader[InternalRow] = {
val actualSchema =
StructType(readDataSchema.filterNot(_.name == parsedOptions.columnNameOfCorruptRecord))
val parser = new JacksonParser(
actualSchema,
parsedOptions,
allowArrayAsStructs = true,
filters)
val iter = JsonDataSource(parsedOptions).readFile(
broadcastedConf.value.value,
partitionedFile,
parser,
readDataSchema)
val fileReader = new PartitionReaderFromIterator[InternalRow](iter)
new PartitionReaderWithPartitionValues(fileReader, readDataSchema,
partitionSchema, partitionedFile.partitionValues)
}
}
| maropu/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonPartitionReaderFactory.scala | Scala | apache-2.0 | 2,903 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import org.apache.flink.api.common.ExecutionConfig
import org.apache.flink.api.common.functions._
import org.apache.flink.api.common.state.{AggregatingStateDescriptor, ListStateDescriptor, ReducingStateDescriptor}
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation}
import org.apache.flink.api.java.functions.KeySelector
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.operators.{OneInputStreamOperator, OutputTypeConfigurable}
import org.apache.flink.streaming.api.scala.function.{ProcessWindowFunction, WindowFunction}
import org.apache.flink.streaming.api.transformations.OneInputTransformation
import org.apache.flink.streaming.api.windowing.assigners._
import org.apache.flink.streaming.api.windowing.evictors.CountEvictor
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.triggers.{CountTrigger, EventTimeTrigger, ProcessingTimeTrigger, Trigger}
import org.apache.flink.streaming.api.windowing.windows.{TimeWindow, Window}
import org.apache.flink.streaming.runtime.operators.windowing._
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord
import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness
import org.apache.flink.util.Collector
import org.junit.Assert._
import org.junit.Test
/**
* These tests verify that the api calls on [[WindowedStream]] instantiate the correct
* window operator.
*
* We also create a test harness and push one element into the operator to verify
* that we get some output.
*/
class WindowTranslationTest {
// --------------------------------------------------------------------------
// rich function tests
// --------------------------------------------------------------------------
/**
* .reduce() does not support [[RichReduceFunction]], since the reduce function is used
* internally in a [[org.apache.flink.api.common.state.ReducingState]].
*/
@Test(expected = classOf[UnsupportedOperationException])
def testReduceWithRichReducerFails() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val source = env.fromElements(("hello", 1), ("hello", 2))
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
source
.keyBy(0)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.reduce(new RichReduceFunction[(String, Int)] {
override def reduce(value1: (String, Int), value2: (String, Int)) = null
})
fail("exception was not thrown")
}
/**
* .reduce() does not support [[RichReduceFunction]], since the reduce function is used
* internally in a [[org.apache.flink.api.common.state.ReducingState]].
*/
@Test(expected = classOf[UnsupportedOperationException])
def testAggregateWithRichFunctionFails() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val source = env.fromElements(("hello", 1), ("hello", 2))
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
source
.keyBy(0)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.aggregate(new DummyRichAggregator())
fail("exception was not thrown")
}
// --------------------------------------------------------------------------
// merging window checks
// --------------------------------------------------------------------------
@Test
def testMergingAssignerWithNonMergingTriggerFails() {
// verify that we check for trigger compatibility
val env = StreamExecutionEnvironment.getExecutionEnvironment
val windowedStream = env.fromElements("Hello", "Ciao")
.keyBy(x => x)
.window(EventTimeSessionWindows.withGap(Time.seconds(5)))
try
windowedStream.trigger(new Trigger[String, TimeWindow]() {
def onElement(
element: String,
timestamp: Long,
window: TimeWindow,
ctx: Trigger.TriggerContext) = null
def onProcessingTime(time: Long, window: TimeWindow, ctx: Trigger.TriggerContext) = null
def onEventTime(time: Long, window: TimeWindow, ctx: Trigger.TriggerContext) = null
override def canMerge = false
def clear(window: TimeWindow, ctx: Trigger.TriggerContext) {}
})
catch {
case _: UnsupportedOperationException =>
// expected
// use a catch to ensure that the exception is thrown by the fold
return
}
fail("The trigger call should fail.")
}
@Test
def testMergingWindowsWithEvictor() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(EventTimeSessionWindows.withGap(Time.seconds(1)))
.evictor(CountEvictor.of(2))
.process(new TestProcessWindowFunction)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[EventTimeSessionWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
// --------------------------------------------------------------------------
// reduce() tests
// --------------------------------------------------------------------------
@Test
def testReduceEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.reduce(new DummyReducer)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.reduce(new DummyReducer)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceEventTimeWithScalaFunction() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.reduce( (x, _) => x )
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithWindowFunctionEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.reduce(
new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x))
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithWindowFunctionProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
.reduce(
new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x))
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithProcessWindowFunctionEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.reduce(
new DummyReducer,
new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x))
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithProcessWindowFunctionProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
.reduce(
new DummyReducer,
new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x))
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testApplyWithPreReducerEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.apply(
new DummyReducer, new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x))
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testApplyWithPreReducerAndEvictor() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.evictor(CountEvictor.of(100))
.apply(
new DummyReducer,
new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach ( x => out.collect(x))
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithWindowFunctionEventTimeWithScalaFunction() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.reduce(
{ (x, _) => x },
{
(_: String, _: TimeWindow, in: Iterable[(String, Int)], out: Collector[(String, Int)]) =>
in foreach { x => out.collect(x)}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
// --------------------------------------------------------------------------
// aggregate() tests
// --------------------------------------------------------------------------
@Test
def testAggregateEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.aggregate(new DummyAggregator())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.aggregate(new DummyAggregator())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithWindowFunctionEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.aggregate(new DummyAggregator(), new TestWindowFunction())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithWindowFunctionProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
.aggregate(new DummyAggregator(), new TestWindowFunction())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithProcessWindowFunctionEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.aggregate(new DummyAggregator(), new TestProcessWindowFunction())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithProcessWindowFunctionProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(1)))
.aggregate(new DummyAggregator(), new TestProcessWindowFunction())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithWindowFunctionEventTimeWithScalaFunction() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1)))
.aggregate(
new DummyAggregator(),
{ (_: String, _: TimeWindow, in: Iterable[(String, Int)], out: Collector[(String, Int)]) =>
in foreach { x => out.collect(x)}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[AggregatingStateDescriptor[_, _, _]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
// --------------------------------------------------------------------------
// apply() tests
// --------------------------------------------------------------------------
@Test
def testApplyEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.apply(
new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testApplyProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.apply(
new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testProcessEventTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.process(
new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testProcessProcessingTime() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.process(
new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[ProcessingTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingProcessingTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testApplyEventTimeWithScalaFunction() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.apply { (key, window, in, out: Collector[(String, Int)]) =>
in foreach { x => out.collect(x)}
}
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithCustomTrigger() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.trigger(CountTrigger.of(1))
.reduce(new DummyReducer)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ReducingStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testApplyWithCustomTrigger() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.trigger(CountTrigger.of(1))
.apply(
new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testProcessWithCustomTrigger() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.trigger(CountTrigger.of(1))
.process(
new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[CountTrigger[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithEvictor() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.evictor(CountEvictor.of(100))
.reduce(new DummyReducer)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[
EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testReduceWithEvictorAndProcessFunction() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.evictor(CountEvictor.of(100))
.reduce(new DummyReducer, new TestProcessWindowFunction)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[
EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithEvictor() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.evictor(CountEvictor.of(100))
.aggregate(new DummyAggregator())
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testAggregateWithEvictorAndProcessFunction() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(SlidingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.evictor(CountEvictor.of(100))
.aggregate(new DummyAggregator(), new TestProcessWindowFunction)
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[WindowOperator[_, _, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[WindowOperator[String, (String, Int), _, (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[SlidingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testApplyWithEvictor() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.evictor(CountEvictor.of(100))
.apply(
new WindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
@Test
def testProcessWithEvictor() {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
val source = env.fromElements(("hello", 1), ("hello", 2))
val window1 = source
.keyBy(_._1)
.window(TumblingEventTimeWindows.of(Time.seconds(1), Time.milliseconds(100)))
.evictor(CountEvictor.of(100))
.process(
new ProcessWindowFunction[(String, Int), (String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, Int)]): Unit = input foreach {x => out.collect((x._1, x._2))}
})
val transform = window1
.javaStream
.getTransformation
.asInstanceOf[OneInputTransformation[(String, Int), (String, Int)]]
val operator = transform.getOperator
assertTrue(operator.isInstanceOf[EvictingWindowOperator[_, _, _, _ <: Window]])
val winOperator = operator
.asInstanceOf[EvictingWindowOperator[String, (String, Int), (String, Int), _ <: Window]]
assertTrue(winOperator.getTrigger.isInstanceOf[EventTimeTrigger])
assertTrue(winOperator.getEvictor.isInstanceOf[CountEvictor[_]])
assertTrue(winOperator.getWindowAssigner.isInstanceOf[TumblingEventTimeWindows])
assertTrue(winOperator.getStateDescriptor.isInstanceOf[ListStateDescriptor[_]])
processElementAndEnsureOutput[String, (String, Int), (String, Int)](
winOperator,
winOperator.getKeySelector,
BasicTypeInfo.STRING_TYPE_INFO,
("hello", 1))
}
/**
* Ensure that we get some output from the given operator when pushing in an element and
* setting watermark and processing time to `Long.MaxValue`.
*/
@throws[Exception]
private def processElementAndEnsureOutput[K, IN, OUT](
operator: OneInputStreamOperator[IN, OUT],
keySelector: KeySelector[IN, K],
keyType: TypeInformation[K],
element: IN) {
val testHarness =
new KeyedOneInputStreamOperatorTestHarness[K, IN, OUT](operator, keySelector, keyType)
if (operator.isInstanceOf[OutputTypeConfigurable[String]]) {
// use a dummy type since window functions just need the ExecutionConfig
// this is also only needed for Fold, which we're getting rid off soon.
operator.asInstanceOf[OutputTypeConfigurable[String]]
.setOutputType(BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig)
}
testHarness.open()
testHarness.setProcessingTime(0)
testHarness.processWatermark(Long.MinValue)
testHarness.processElement(new StreamRecord[IN](element, 0))
// provoke any processing-time/event-time triggers
testHarness.setProcessingTime(Long.MaxValue)
testHarness.processWatermark(Long.MaxValue)
// we at least get the two watermarks and should also see an output element
assertTrue(testHarness.getOutput.size >= 3)
testHarness.close()
}
}
class DummyReducer extends ReduceFunction[(String, Int)] {
override def reduce(value1: (String, Int), value2: (String, Int)): (String, Int) = {
value1
}
}
class DummyAggregator extends AggregateFunction[(String, Int), (String, Int), (String, Int)] {
override def createAccumulator(): (String, Int) = ("", 0)
override def merge(a: (String, Int), b: (String, Int)): (String, Int) = a
override def getResult(accumulator: (String, Int)): (String, Int) = accumulator
override def add(value: (String, Int), accumulator: (String, Int)): (String, Int) = accumulator
}
class DummyRichAggregator extends RichAggregateFunction[(String, Int), (String, Int), (String, Int)]
{
override def createAccumulator(): (String, Int) = ("", 0)
override def merge(a: (String, Int), b: (String, Int)): (String, Int) = a
override def getResult(accumulator: (String, Int)): (String, Int) = accumulator
override def add(value: (String, Int), accumulator: (String, Int)): (String, Int) = accumulator
}
class TestWindowFunction
extends WindowFunction[(String, Int), (String, String, Int), String, TimeWindow] {
override def apply(
key: String,
window: TimeWindow,
input: Iterable[(String, Int)],
out: Collector[(String, String, Int)]): Unit = {
input.foreach(e => out.collect((e._1, e._1, e._2)))
}
}
class TestProcessWindowFunction
extends ProcessWindowFunction[(String, Int), (String, String, Int), String, TimeWindow] {
override def process(
key: String,
window: Context,
input: Iterable[(String, Int)],
out: Collector[(String, String, Int)]): Unit = {
input.foreach(e => out.collect((e._1, e._1, e._2)))
}
}
| darionyaphet/flink | flink-streaming-scala/src/test/scala/org/apache/flink/streaming/api/scala/WindowTranslationTest.scala | Scala | apache-2.0 | 54,150 |
package com.rklaehn.radixtree
import algebra.Eq
private object Opt {
def apply[A](a: A): Opt[A] = new Opt(a)
def empty[A]: Opt[A] = new Opt[A](null.asInstanceOf[A])
def fromOption[A](a: Option[A]) = a match {
case Some(x) => Opt(x)
case None => Opt.empty[A]
}
}
private class Opt[+A](val ref: A) extends AnyVal {
def isDefined: Boolean = ref != null
def isEmpty: Boolean = ref == null
def get: A = if (ref == null) throw new NoSuchElementException("Opt.empty.get") else ref
def map[B](f: A => B): Opt[B] =
if (ref == null) Opt.empty else Opt(f(ref))
def toOption: Option[A] = if (ref == null) None else Some(ref)
override def toString: String =
if (ref == null) "Opt.empty" else s"Opt($ref)"
}
| rklaehn/radixtree | src/main/scala/com/rklaehn/radixtree/Opt.scala | Scala | apache-2.0 | 737 |
package org.elasticsearch.spark.sql
import scala.collection.Map
import org.apache.commons.logging.Log
import org.apache.commons.logging.LogFactory
import org.apache.spark.Partition
import org.apache.spark.SparkContext
import org.apache.spark.TaskContext
import org.apache.spark.sql.catalyst.expressions.Row
import org.elasticsearch.hadoop.cfg.Settings
import org.elasticsearch.hadoop.rest.InitializationUtils
import org.elasticsearch.hadoop.rest.RestService.PartitionDefinition
import org.elasticsearch.spark.rdd.AbstractEsRDD
import org.elasticsearch.spark.rdd.AbstractEsRDDIterator
import org.elasticsearch.spark.rdd.EsPartition
// while we could have just wrapped the ScalaEsRDD and unpack the top-level data into a Row the issue is the underlying Maps are StructTypes
// and as such need to be mapped as Row resulting in either nested wrapping or using a ValueReader and which point wrapping becomes unyielding since the class signatures clash
private[spark] class ScalaEsRowRDD(
@transient sc: SparkContext,
params: Map[String, String] = Map.empty,
schema: MappingUtils.Schema)
extends AbstractEsRDD[Row](sc, params) {
override def compute(split: Partition, context: TaskContext): ScalaEsRowRDDIterator = {
new ScalaEsRowRDDIterator(context, split.asInstanceOf[EsPartition].esPartition, schema)
}
}
private[spark] class ScalaEsRowRDDIterator(
context: TaskContext,
partition: PartitionDefinition,
schema: MappingUtils.Schema)
extends AbstractEsRDDIterator[Row](context, partition) {
override def getLogger() = LogFactory.getLog(classOf[ScalaEsRowRDD])
override def initReader(settings: Settings, log: Log) = {
InitializationUtils.setValueReaderIfNotSet(settings, classOf[ScalaRowValueReader], log)
// parse the structure and save the order (requested by Spark) for each Row (root and nested)
// since the data returned from Elastic is likely to not be in the same order
MappingUtils.setRowOrder(settings, schema.struct)
}
override def createValue(value: Array[Object]): Row = {
// drop the ID
value(1).asInstanceOf[ScalaEsRow]
}
} | holdenk/elasticsearch-hadoop | spark/sql-12/src/main/scala/org/elasticsearch/spark/sql/ScalaEsRowRDD.scala | Scala | apache-2.0 | 2,104 |
package com.sksamuel.elastic4s.http.search.aggs
import com.sksamuel.elastic4s.http.ScriptBuilderFn
import com.sksamuel.elastic4s.json.{XContentBuilder, XContentFactory}
import com.sksamuel.elastic4s.searches.aggs.GeoCentroidAggregation
object GeoCentroidAggregationBuilder {
def apply(agg: GeoCentroidAggregation): XContentBuilder = {
val builder = XContentFactory.obj.startObject("geo_centroid")
agg.field.foreach(builder.field("field", _))
agg.format.foreach(builder.field("format", _))
agg.missing.foreach(builder.autofield("missing", _))
agg.script.foreach { script =>
builder.rawField("script", ScriptBuilderFn(script))
}
builder.endObject()
AggMetaDataFn(agg, builder)
builder.endObject()
}
}
| Tecsisa/elastic4s | elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/search/aggs/GeoCentroidAggregationBuilder.scala | Scala | apache-2.0 | 751 |
/**
* This file is part of mycollab-web.
*
* mycollab-web is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-web is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-web. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.shell.view
import com.esofthead.mycollab.core.utils.ScalaUtils
import com.esofthead.mycollab.eventmanager.ApplicationEventListener
import com.esofthead.mycollab.module.crm.view.{CrmModulePresenter, CrmModuleScreenData}
import com.esofthead.mycollab.module.file.view.{FileModuleScreenData, IFileModulePresenter}
import com.esofthead.mycollab.module.project.view.ProjectModulePresenter
import com.esofthead.mycollab.module.project.view.parameters.ProjectModuleScreenData
import com.esofthead.mycollab.module.user.accountsettings.view.AccountModulePresenter
import com.esofthead.mycollab.premium.module.user.accountsettings.view.AccountModuleScreenData
import com.esofthead.mycollab.shell.events.ShellEvent
import com.esofthead.mycollab.vaadin.mvp.{AbstractController, PresenterResolver}
import com.google.common.eventbus.Subscribe
/**
* @author MyCollab Ltd
* @since 5.0.9
*/
class MainViewController(val container: MainView) extends AbstractController {
bind()
private def bind(): Unit = {
this.register(new ApplicationEventListener[ShellEvent.GotoCrmModule]() {
@Subscribe def handle(event: ShellEvent.GotoCrmModule) {
val crmModulePresenter: CrmModulePresenter = PresenterResolver.getPresenter(classOf[CrmModulePresenter])
val screenData: CrmModuleScreenData.GotoModule = new CrmModuleScreenData.GotoModule(ScalaUtils.stringConvertSeqToArray(event.getData))
crmModulePresenter.go(container, screenData)
}
})
this.register(new ApplicationEventListener[ShellEvent.GotoProjectModule]() {
@Subscribe def handle(event: ShellEvent.GotoProjectModule) {
val prjPresenter: ProjectModulePresenter = PresenterResolver.getPresenter(classOf[ProjectModulePresenter])
val screenData: ProjectModuleScreenData.GotoModule = new ProjectModuleScreenData.GotoModule(ScalaUtils.stringConvertSeqToArray(event.getData))
prjPresenter.go(container, screenData)
}
})
this.register(new ApplicationEventListener[ShellEvent.GotoUserAccountModule]() {
@Subscribe def handle(event: ShellEvent.GotoUserAccountModule) {
val presenter: AccountModulePresenter = PresenterResolver.getPresenter(classOf[AccountModulePresenter])
presenter.go(container, new AccountModuleScreenData.GotoModule(ScalaUtils.stringConvertSeqToArray(event.getData)))
}
})
this.register(new ApplicationEventListener[ShellEvent.GotoFileModule]() {
@Subscribe def handle(event: ShellEvent.GotoFileModule) {
val fileModulePresenter: IFileModulePresenter = PresenterResolver.getPresenter(classOf[IFileModulePresenter])
val screenData: FileModuleScreenData.GotoModule = new FileModuleScreenData.GotoModule(ScalaUtils.stringConvertSeqToArray(event.getData))
fileModulePresenter.go(container, screenData)
}
})
}
}
| maduhu/mycollab | mycollab-web/src/main/scala/com/esofthead/mycollab/shell/view/MainViewController.scala | Scala | agpl-3.0 | 3,725 |
def +[T >: A](e: T)(implicit comparator: Comparator[T]): Tree[T] = this match {
case EmptyTree => Node(EmptyTree, e, EmptyTree)
case Node(left, elem, right) if comparator.compare(e,elem) < 0 => Node(left+e, elem, right)
case Node(left, elem, right) => Node(left, elem, right+e)
}
| lkuczera/scalatypeclasses | steps/Treeplus.scala | Scala | mit | 287 |
package ruc.irm.extractor.keyword
// Copyright 2013 trananh
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import java.io._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/** A simple binary file reader.
*
* @constructor Create a binary file reader.
* @param file The binary file to be read.
* @author trananh
*/
class VecBinaryReader(val file: File) {
/** Overloaded constructor */
def this(filename: String) = this(new File(filename))
/** ASCII values for common delimiter characters */
private val SPACE = 32
private val LF = 10
/** Open input streams */
private val fis = new FileInputStream(file)
private val bis = new BufferedInputStream(fis)
private val dis = new DataInputStream(bis)
/** Close the stream. */
def close(): Unit = {
dis.close()
bis.close()
fis.close()
}
/** Read the next byte.
*
* @return The next byte from the file.
*/
def read(): Byte = dis.readByte()
/** Read the next token as a string, using the provided delimiters as breaking points.
*
* @param delimiters ASCII code of delimiter characters (default to SPACE and LINE-FEED).
* @return String representation of the next token.
*/
def readToken(delimiters: Set[Int] = Set(SPACE, LF)): String = {
val bytes = new ArrayBuffer[Byte]()
val sb = new StringBuilder()
var byte = dis.readByte()
while (!delimiters.contains(byte)) {
bytes.append(byte)
byte = dis.readByte()
}
sb.append(new String(bytes.toArray[Byte])).toString()
}
/** Read next 4 bytes as a floating-point number.
*
* @return The floating-point value of the next 4 bytes.
*/
def readFloat(): Float = {
// We need to reverse the byte order here due to endian-compatibility.
java.lang.Float.intBitsToFloat(java.lang.Integer.reverseBytes(dis.readInt()))
}
}
/** A Scala port of the word2vec model. This interface allows the user to access the vector representations
* output by the word2vec tool, as well as perform some common operations on those vectors. It does NOT
* implement the actual continuous bag-of-words and skip-gram architectures for computing the vectors.
*
* More information on word2vec can be found here: https://code.google.com/p/word2vec/
*
* Example usage:
* {{{
* val model = new ruc.irm.extractor.keyword.Word2Vec()
* model.load("vectors.bin")
* val results = model.distance(List("france"), N = 10)
*
* model.pprint(results)
* }}}
*
* @constructor Create a word2vec model.
* @author trananh
*/
class Word2Vec {
/** Map of words and their associated vector representations */
private val vocab = new mutable.HashMap[String, Array[Float]]()
/** Number of words */
private var numWords = 0
/** Number of floating-point values associated with each word (i.e., length of the vectors) */
private var vecSize = 0
/** Load data from a binary file.
*
* @param filename Path to file containing word projections in the BINARY FORMAT.
* @param limit Maximum number of words to load from file (a.k.a. max vocab size).
* @param normalize Normalize the loaded vectors if true (default to true).
*/
def load(filename: String, limit: Integer = Int.MaxValue, normalize: Boolean = true): Word2Vec = {
// Check edge case
val file = new File(filename)
if (!file.exists()) {
throw new FileNotFoundException("Binary vector file not found <" + file.toString + ">")
}
// Create new reader to read data
val reader = new VecBinaryReader(file)
// Read header info
numWords = Integer.parseInt(reader.readToken())
vecSize = Integer.parseInt(reader.readToken())
println("\\nFile contains " + numWords + " words with vector size " + vecSize)
// Read the vocab words and their associated vector representations
var word = ""
val vector = new Array[Float](vecSize)
var normFactor = 1f
for (_ <- 0 until math.min(numWords, limit)) {
// Read the word
word = reader.readToken()
// Read the vector representation (each vector contains vecSize number of floats)
for (i <- 0 until vector.length) vector(i) = reader.readFloat()
// Store the normalized vector representation, keyed by the word
normFactor = if (normalize) magnitude(vector).toFloat else 1f
vocab.put(word, vector.map(_ / normFactor))
// Eat up the next delimiter character
reader.read()
}
println("Loaded " + math.min(numWords, limit) + " words.\\n")
// Finally, close the reader
reader.close()
this
}
/** Return the number of words in the vocab.
*
* @return Number of words in the vocab.
*/
def wordsCount: Int = numWords
/** Size of the vectors.
*
* @return Size of the vectors.
*/
def vectorSize: Int = vecSize
/** Clear internal data. */
def clear(): Unit = {
vocab.clear()
numWords = 0
vecSize = 0
}
/** Check if the word is present in the vocab map.
*
* @param word Word to be checked.
* @return True if the word is in the vocab map.
*/
def contains(word: String): Boolean = {
vocab.get(word).isDefined
}
/** Get the vector representation for the word.
*
* @param word Word to retrieve vector for.
* @return The vector representation of the word.
*/
def vector(word: String): Array[Float] = {
vocab.getOrElse(word, Array[Float]())
}
val vectorInDouble = (word: String) => vector(word).map(_.toDouble)
/** Compute the Euclidean distance between two vectors.
*
* @param vec1 The first vector.
* @param vec2 The other vector.
* @return The Euclidean distance between the two vectors.
*/
def euclidean(vec1: Array[Float], vec2: Array[Float]): Double = {
assert(vec1.length == vec2.length, "Uneven vectors!")
var sum = 0.0
for (i <- 0 until vec1.length) sum += math.pow(vec1(i) - vec2(i), 2)
math.sqrt(sum)
}
/** Compute the Euclidean distance between the vector representations of the words.
*
* @param word1 The first word.
* @param word2 The other word.
* @return The Euclidean distance between the vector representations of the words.
*/
def euclidean(word1: String, word2: String): Double = {
assert(contains(word1) && contains(word2), "Out of dictionary word! " + word1 + " or " + word2)
euclidean(vocab.get(word1).get, vocab.get(word2).get)
}
/** Compute the cosine similarity score between two vectors.
*
* @param vec1 The first vector.
* @param vec2 The other vector.
* @return The cosine similarity score of the two vectors.
*/
def cosine(vec1: Array[Float], vec2: Array[Float]): Double = {
assert(vec1.length == vec2.length, "Uneven vectors!")
var dot, sum1, sum2 = 0.0
for (i <- 0 until vec1.length) {
dot += (vec1(i) * vec2(i))
sum1 += (vec1(i) * vec1(i))
sum2 += (vec2(i) * vec2(i))
}
dot / (math.sqrt(sum1) * math.sqrt(sum2))
}
/** Compute the cosine similarity score between the vector representations of the words.
*
* @param word1 The first word.
* @param word2 The other word.
* @return The cosine similarity score between the vector representations of the words.
*/
def cosine(word1: String, word2: String): Double = {
assert(contains(word1) && contains(word2), "Out of dictionary word! " + word1 + " or " + word2)
cosine(vocab.get(word1).get, vocab.get(word2).get)
}
/** Compute the magnitude of the vector.
*
* @param vec The vector.
* @return The magnitude of the vector.
*/
def magnitude(vec: Array[Float]): Double = {
math.sqrt(vec.foldLeft(0.0) { (sum, x) => sum + (x * x) })
}
/** Normalize the vector.
*
* @param vec The vector.
* @return A normalized vector.
*/
def normalize(vec: Array[Float]): Array[Float] = {
val mag = magnitude(vec).toFloat
vec.map(_ / mag)
}
/** Find the vector representation for the given list of word(s) by aggregating (summing) the
* vector for each word.
*
* @param input The input word(s).
* @return The sum vector (aggregated from the input vectors).
*/
def sumVector(input: List[String]): Array[Float] = {
// Find the vector representation for the input. If multiple words, then aggregate (sum) their vectors.
input.foreach(w => assert(contains(w), "Out of dictionary word! " + w))
val vector = new Array[Float](vecSize)
input.foreach(w => for (j <- 0 until vector.length) vector(j) += vocab.get(w).get(j))
vector
}
/** Find N closest terms in the vocab to the given vector, using only words from the in-set (if defined)
* and excluding all words from the out-set (if non-empty). Although you can, it doesn't make much
* sense to define both in and out sets.
*
* @param vector The vector.
* @param inSet Set of words to consider. Specify None to use all words in the vocab (default behavior).
* @param outSet Set of words to exclude (default to empty).
* @param N The maximum number of terms to return (default to 40).
* @return The N closest terms in the vocab to the given vector and their associated cosine similarity scores.
*/
def nearestNeighbors(vector: Array[Float], inSet: Option[Set[String]] = None,
outSet: Set[String] = Set[String](), N: Integer = 40)
: List[(String, Float)] = {
// For performance efficiency, we maintain the top/closest terms using a priority queue.
// Note: We invert the distance here because a priority queue will dequeue the highest priority element,
// but we would like it to dequeue the lowest scoring element instead.
val top = new mutable.PriorityQueue[(String, Float)]()(Ordering.by(-_._2))
// Iterate over each token in the vocab and compute its cosine score to the input.
var dist = 0f
val iterator = if (inSet.isDefined) vocab.filterKeys(k => inSet.get.contains(k)).iterator else vocab.iterator
iterator.foreach(entry => {
// Skip tokens in the out set
if (!outSet.contains(entry._1)) {
dist = cosine(vector, entry._2).toFloat
if (top.size < N || top.head._2 < dist) {
top.enqueue((entry._1, dist))
if (top.length > N) {
// If the queue contains over N elements, then dequeue the highest priority element
// (which will be the element with the lowest cosine score).
top.dequeue()
}
}
}
})
// Return the top N results as a sorted list.
assert(top.length <= N)
top.toList.sortWith(_._2 > _._2)
}
/** Find the N closest terms in the vocab to the input word(s).
*
* @param input The input word(s).
* @param N The maximum number of terms to return (default to 40).
* @return The N closest terms in the vocab to the input word(s) and their associated cosine similarity scores.
*/
def distance(input: List[String], N: Integer = 40): List[(String, Float)] = {
// Check for edge cases
if (input.size == 0) return List[(String, Float)]()
input.foreach(w => {
if (!contains(w)) {
println("Out of dictionary word! " + w)
return List[(String, Float)]()
}
})
// Find the vector representation for the input. If multiple words, then aggregate (sum) their vectors.
val vector = sumVector(input)
nearestNeighbors(normalize(vector), outSet = input.toSet, N = N)
}
/** Find the N closest terms in the vocab to the analogy:
* - [word1] is to [word2] as [word3] is to ???
*
* The algorithm operates as follow:
* - Find a vector approximation of the missing word = vec([word2]) - vec([word1]) + vec([word3]).
* - Return words closest to the approximated vector.
*
* @param word1 First word in the analogy [word1] is to [word2] as [word3] is to ???.
* @param word2 Second word in the analogy [word1] is to [word2] as [word3] is to ???
* @param word3 Third word in the analogy [word1] is to [word2] as [word3] is to ???.
* @param N The maximum number of terms to return (default to 40).
* @return The N closest terms in the vocab to the analogy and their associated cosine similarity scores.
*/
def analogy(word1: String, word2: String, word3: String, N: Integer = 40): List[(String, Float)] = {
// Check for edge cases
if (!contains(word1) || !contains(word2) || !contains(word3)) {
println("Out of dictionary word! " + Array(word1, word2, word3).mkString(" or "))
return List[(String, Float)]()
}
// Find the vector approximation for the missing analogy.
val vector = new Array[Float](vecSize)
for (j <- 0 until vector.length)
vector(j) = vocab.get(word2).get(j) - vocab.get(word1).get(j) + vocab.get(word3).get(j)
nearestNeighbors(normalize(vector), outSet = Set(word1, word2, word3), N = N)
}
/** Rank a set of words by their respective distance to some central term.
*
* @param word The central word.
* @param set Set of words to rank.
* @return Ordered list of words and their associated scores.
*/
def rank(word: String, set: Set[String]): List[(String, Float)] = {
// Check for edge cases
if (set.size == 0) return List[(String, Float)]()
(set + word).foreach(w => {
if (!contains(w)) {
println("Out of dictionary word! " + w)
return List[(String, Float)]()
}
})
nearestNeighbors(vocab.get(word).get, inSet = Option(set), N = set.size)
}
/** Pretty print the list of words and their associated scores.
*
* @param words List of (word, score) pairs to be printed.
*/
def pprint(words: List[(String, Float)]) = {
println("\\n%50s".format("Word") + (" " * 7) + "Cosine distance\\n" + ("-" * 72))
println(words.map(s => "%50s".format(s._1) + (" " * 7) + "%15f".format(s._2)).mkString("\\n"))
println()
}
}
object Word2Vec {
def apply(modelFile: String) = {
new Word2Vec().load(modelFile)
}
}
/** ********************************************************************************
* Demo of the Scala ported word2vec model.
* ********************************************************************************
*/
object RunWord2Vec {
/** Demo. */
def main(args: Array[String]): Unit = {
// Load word2vec model from binary file.
val model = new Word2Vec()
//model.load("../word2vec-scala/vectors.bin")
model.load("./data/wiki/word2vec.bin")
// distance: Find N closest words
println("`中国`最近的词语:")
model.pprint(model.distance(List("中国"), N = 10))
println("\\n`北京`和`首都`都距离比较近的词语列表:")
model.pprint(model.distance(List("北京", "首都")))
println("\\n`姚明`,`体育`,`篮球`")
model.pprint(model.distance(List("姚明", "体育", "篮球")))
// analogy: "king" is to "queen", as "man" is to ?
// model.pprint(model.analogy("king", "queen", "man", N = 10))
// rank: Rank a set of words by their respective distance to the central term
model.pprint(model.rank("苹果", Set("手机", "水果", "公司", "环境")))
}
}
| iamxiatian/x-extractor | src/main/scala/ruc/irm/extractor/keyword/Word2Vec.scala | Scala | gpl-3.0 | 15,680 |
package io.iohk.ethereum.rlp
import akka.util.ByteString
import io.iohk.ethereum.rlp.RLPImplicitConversions._
import io.iohk.ethereum.rlp.RLPImplicits._
import io.iohk.ethereum.utils.Logger
import org.scalacheck.{Arbitrary, Gen}
import org.bouncycastle.util.encoders.Hex
import org.scalatestplus.scalacheck.{ScalaCheckDrivenPropertyChecks, ScalaCheckPropertyChecks}
import scala.language.implicitConversions
import scala.util.Try
import org.scalatest.funsuite.AnyFunSuite
class RLPSuite extends AnyFunSuite with ScalaCheckPropertyChecks with ScalaCheckDrivenPropertyChecks with Logger {
test("nextElementIndex of empty data") {
val maybeIndex = Try { nextElementIndex(Array.emptyByteArray, 0) }
assert(maybeIndex.isFailure)
}
test("Decoding of empty data") {
val maybeDecoded = Try { decode[Array[Byte]](Array.emptyByteArray) }
assert(maybeDecoded.isFailure)
}
test("Decoding failure: Passing RLPValue when RLPList is expected") {
val data = encode(0.toLong)
val maybeSeqObtained = Try { decode[Seq[Long]](data)(seqEncDec()) }
assert(maybeSeqObtained.isFailure)
}
test("Decoding failure: Passing RLPList when RLPValue is expected") {
val data = RLP.encode(RLPList("cat", "dog"))
val maybeByteObtained = Try { decode[Byte](data) }
val maybeShortObtained = Try { decode[Short](data) }
val maybeIntObtained = Try { decode[Int](data) }
val maybeLongObtained = Try { decode[Long](data) }
val maybeBigIntObtained = Try { decode[BigInt](data) }
val maybeStringObtained = Try { decode[String](data) }
val maybeByteArrayObtained = Try { decode[Array[Byte]](data) }
assert(maybeByteObtained.isFailure)
assert(maybeShortObtained.isFailure)
assert(maybeIntObtained.isFailure)
assert(maybeLongObtained.isFailure)
assert(maybeStringObtained.isFailure)
assert(maybeByteArrayObtained.isFailure)
assert(maybeBigIntObtained.isFailure)
}
test("Decoding failure: Passing an RLPValue larger than expected") {
val num: BigInt = 16 * BigInt(Long.MaxValue)
val data = encode(num)
val maybeByteObtained = Try { decode[Byte](data) }
val maybeShortObtained = Try { decode[Short](data) }
val maybeIntObtained = Try { decode[Int](data) }
val maybeLongObtained = Try { decode[Long](data) }
assert(maybeByteObtained.isFailure)
assert(maybeShortObtained.isFailure)
assert(maybeIntObtained.isFailure)
assert(maybeLongObtained.isFailure)
}
test("Byte Encoding") {
val expected = Array[Byte](0x80.toByte)
val data = encode(0: Byte)
assert(expected sameElements data)
val dataObtained = decode[Byte](data)
val obtained: Byte = dataObtained
assert((0: Byte) == obtained)
val expected2 = Array[Byte](0x78.toByte)
val data2 = encode(120: Byte)
assert(expected2 sameElements data2)
val dataObtained2 = decode[Byte](data2)
val obtained2: Byte = dataObtained2
assert((120: Byte) == obtained2)
val expected3 = Array[Byte](0x7f.toByte)
val data3 = encode(127: Byte)
assert(expected3 sameElements data3)
val dataObtained3 = decode[Byte](data3)
val obtained3: Byte = dataObtained3
assert((127: Byte) == obtained3)
forAll(Gen.choose[Byte](Byte.MinValue, Byte.MaxValue)) { (aByte: Byte) =>
{
val data = encode(aByte)
val dataObtained = decode[Byte](data)
val obtained: Byte = dataObtained
assert(aByte == obtained)
}
}
}
test("Short Encoding") {
val expected4 = Array[Byte](0x82.toByte, 0x76.toByte, 0x5f.toByte)
val data4 = encode(30303.toShort)
assert(expected4 sameElements data4)
val dataObtained4 = decode[Short](data4)
val obtained4: Short = dataObtained4
assert((30303: Short) == obtained4)
val expected5 = Array[Byte](0x82.toByte, 0x4e.toByte, 0xea.toByte)
val data5 = encode(20202.toShort)
assert(expected5 sameElements data5)
val dataObtained5 = decode[Short](data5)
val obtained5: Short = dataObtained5
assert((20202: Short) == obtained5)
val expected6 = Array[Byte](0x82.toByte, 0x9d.toByte, 0x0a.toByte)
val data6 = encode(40202.toShort)
assert(expected6 sameElements data6)
val dataObtained6 = decode[Short](data6)
val obtained6: Short = dataObtained6
assert(40202.toShort == obtained6)
val expected7 = Array[Byte](0x7f.toByte)
val data7 = encode(127.toShort)
assert(expected7 sameElements data7)
val dataObtained7 = decode[Short](data7)
val obtained7: Short = dataObtained7
assert(127.toShort == obtained7)
val expected8 = Array[Byte](0x80.toByte)
val data8 = encode(0.toShort)
assert(expected8 sameElements data8)
val dataObtained8 = decode[Short](data8)
val obtained8: Short = dataObtained8
assert(0.toShort == obtained8)
forAll(Gen.choose[Short](Short.MinValue, Short.MaxValue)) { (aShort: Short) =>
{
val data = encode(aShort)
val dataObtained = decode[Short](data)
val obtained: Short = dataObtained
assert(aShort == obtained)
}
}
}
test("String encoding") {
val expected = Array[Byte](0x80.toByte)
val data = encode("")
assert(expected sameElements data)
val dataObtained = decode[String](data)
val obtained: String = dataObtained
assert("" == obtained)
val expected2 = Array[Byte](
0x90.toByte,
0x45.toByte,
0x74.toByte,
0x68.toByte,
0x65.toByte,
0x72.toByte,
0x65.toByte,
0x75.toByte,
0x6d.toByte,
0x4a.toByte,
0x20.toByte,
0x43.toByte,
0x6c.toByte,
0x69.toByte,
0x65.toByte,
0x6e.toByte,
0x74.toByte
)
val data2 = encode("EthereumJ Client")
assert(expected2 sameElements data2)
val dataObtained2 = decode[String](data2)
val obtained2: String = dataObtained2
assert("EthereumJ Client" == obtained2)
val expected3 = Array[Byte](
0xad.toByte,
0x45.toByte,
0x74.toByte,
0x68.toByte,
0x65.toByte,
0x72.toByte,
0x65.toByte,
0x75.toByte,
0x6d.toByte,
0x28.toByte,
0x2b.toByte,
0x2b.toByte,
0x29.toByte,
0x2f.toByte,
0x5a.toByte,
0x65.toByte,
0x72.toByte,
0x6f.toByte,
0x47.toByte,
0x6f.toByte,
0x78.toByte,
0x2f.toByte,
0x76.toByte,
0x30.toByte,
0x2e.toByte,
0x35.toByte,
0x2e.toByte,
0x30.toByte,
0x2f.toByte,
0x6e.toByte,
0x63.toByte,
0x75.toByte,
0x72.toByte,
0x73.toByte,
0x65.toByte,
0x73.toByte,
0x2f.toByte,
0x4c.toByte,
0x69.toByte,
0x6e.toByte,
0x75.toByte,
0x78.toByte,
0x2f.toByte,
0x67.toByte,
0x2b.toByte,
0x2b.toByte
)
val data3 = encode("Ethereum(++)/ZeroGox/v0.5.0/ncurses/Linux/g++")
assert(expected3 sameElements data3)
val dataObtained3 = decode[String](data3)
val obtained3: String = dataObtained3
assert("Ethereum(++)/ZeroGox/v0.5.0/ncurses/Linux/g++" == obtained3)
val expected4 = Array[Byte](
0xb8.toByte,
0x5a.toByte,
0x45.toByte,
0x74.toByte,
0x68.toByte,
0x65.toByte,
0x72.toByte,
0x65.toByte,
0x75.toByte,
0x6d.toByte,
0x28.toByte,
0x2b.toByte,
0x2b.toByte,
0x29.toByte,
0x2f.toByte,
0x5a.toByte,
0x65.toByte,
0x72.toByte,
0x6f.toByte,
0x47.toByte,
0x6f.toByte,
0x78.toByte,
0x2f.toByte,
0x76.toByte,
0x30.toByte,
0x2e.toByte,
0x35.toByte,
0x2e.toByte,
0x30.toByte,
0x2f.toByte,
0x6e.toByte,
0x63.toByte,
0x75.toByte,
0x72.toByte,
0x73.toByte,
0x65.toByte,
0x73.toByte,
0x2f.toByte,
0x4c.toByte,
0x69.toByte,
0x6e.toByte,
0x75.toByte,
0x78.toByte,
0x2f.toByte,
0x67.toByte,
0x2b.toByte,
0x2b.toByte,
0x45.toByte,
0x74.toByte,
0x68.toByte,
0x65.toByte,
0x72.toByte,
0x65.toByte,
0x75.toByte,
0x6d.toByte,
0x28.toByte,
0x2b.toByte,
0x2b.toByte,
0x29.toByte,
0x2f.toByte,
0x5a.toByte,
0x65.toByte,
0x72.toByte,
0x6f.toByte,
0x47.toByte,
0x6f.toByte,
0x78.toByte,
0x2f.toByte,
0x76.toByte,
0x30.toByte,
0x2e.toByte,
0x35.toByte,
0x2e.toByte,
0x30.toByte,
0x2f.toByte,
0x6e.toByte,
0x63.toByte,
0x75.toByte,
0x72.toByte,
0x73.toByte,
0x65.toByte,
0x73.toByte,
0x2f.toByte,
0x4c.toByte,
0x69.toByte,
0x6e.toByte,
0x75.toByte,
0x78.toByte,
0x2f.toByte,
0x67.toByte,
0x2b.toByte,
0x2b.toByte
)
val data4 = encode("Ethereum(++)/ZeroGox/v0.5.0/ncurses/Linux/g++Ethereum(++)/ZeroGox/v0.5.0/ncurses/Linux/g++")
assert(expected4 sameElements data4)
val dataObtained4 = decode[String](data4)
val obtained4: String = dataObtained4
assert("Ethereum(++)/ZeroGox/v0.5.0/ncurses/Linux/g++Ethereum(++)/ZeroGox/v0.5.0/ncurses/Linux/g++" == obtained4)
val strGen = (n: Int) => Gen.choose(0, n).flatMap(long => Gen.listOfN(long, Gen.alphaChar).map(_.mkString))
forAll(strGen(10000)) { (aString: String) =>
{
val data = encode(aString)
val dataObtained = decode[String](data)
val obtained: String = dataObtained
assert(aString == obtained)
}
}
}
test("Int Encoding") {
val expected = Array[Byte](0x80.toByte)
val data = encode(0)
assert(expected sameElements data)
val dataObtained = decode[Int](data)
val obtained: Int = dataObtained
assert(0 == obtained)
val expected2 = Array(0x78.toByte)
val data2 = encode(120)
assert(expected2 sameElements data2)
val dataObtained2 = decode[Int](data2)
val obtained2: Int = dataObtained2
assert(120 == obtained2)
val expected3 = Array(0x7f.toByte)
val data3 = encode(127)
assert(expected3 sameElements data3)
val dataObtained3 = decode[Int](data3)
val obtained3: Int = dataObtained3
assert(127 == obtained3)
val expected4 = Array(0x82.toByte, 0x76.toByte, 0x5f.toByte)
val data4 = encode(30303)
assert(expected4 sameElements data4)
val dataObtained4 = decode[Int](data4)
val obtained4: Int = dataObtained4
assert(30303 == obtained4)
val expected5 = Array(0x82.toByte, 0x4e.toByte, 0xea.toByte)
val data5 = encode(20202)
assert(expected5 sameElements data5)
val dataObtained5 = decode[Int](data5)
val obtained5: Int = dataObtained5
assert(20202 == obtained5)
val expected6 = Array(0x83.toByte, 1.toByte, 0.toByte, 0.toByte)
val data6 = encode(65536)
assert(expected6 sameElements data6)
val dataObtained6 = decode[Int](data6)
val obtained6: Int = dataObtained6
assert(65536 == obtained6)
val expected7 = Array(0x84.toByte, 0x80.toByte, 0x00.toByte, 0x00.toByte, 0x00.toByte)
val data7 = encode(Integer.MIN_VALUE)
assert(expected7 sameElements data7)
val dataObtained7 = decode[Int](data7)
val obtained7: Int = dataObtained7
assert(Integer.MIN_VALUE == obtained7)
val expected8 = Array(0x84.toByte, 0x7f.toByte, 0xff.toByte, 0xff.toByte, 0xff.toByte)
val data8 = encode(Integer.MAX_VALUE)
assert(expected8 sameElements data8)
val dataObtained8 = decode[Int](data8)
val obtained8: Int = dataObtained8
assert(Integer.MAX_VALUE == obtained8)
val expected9 = Array(0x84.toByte, 0xff.toByte, 0xff.toByte, 0xff.toByte, 0xff.toByte)
val data9 = encode(0xffffffff)
assert(expected9 sameElements data9)
val dataObtained9 = decode[Int](data9)
val obtained9: Int = dataObtained9
assert(0xffffffff == obtained9)
forAll(Gen.choose[Int](Int.MinValue, Int.MaxValue)) { (anInt: Int) =>
{
val data = encode(anInt)
val dataObtained = decode[Int](data)
val obtained: Int = dataObtained
assert(anInt == obtained)
}
}
}
test("Long Encoding") {
forAll(Gen.choose[Long](0, Long.MaxValue)) { (aLong: Long) =>
{
val data = encode(aLong)
val dataObtained = decode[Long](data)
val obtained: Long = dataObtained
assert(aLong == obtained)
}
}
}
test("BigInt Encoding") {
val expected = Array[Byte](0x80.toByte)
val data = encode(BigInt(0))
assert(expected sameElements data)
val dataObtained = decode[BigInt](data)
val obtained: BigInt = dataObtained
assert(BigInt(0) == obtained)
val bigInt = BigInt("100102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f", 16)
val expected2 = "a0100102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"
val data2 = encode(bigInt)
assert(expected2 equals Hex.toHexString(data2))
val dataObtained2 = decode[BigInt](data2)
val obtained2: BigInt = dataObtained2
assert(bigInt == obtained2)
forAll(Arbitrary.arbitrary[BigInt]) { (aBigIntSigned: BigInt) =>
{
val aBigInt = aBigIntSigned.abs
val data = encode(aBigInt)
val dataObtained = decode[BigInt](data)
val obtained: BigInt = dataObtained
assert(aBigInt == obtained)
}
}
}
test("Byte Array Encoding") {
val byteArr =
"ce73660a06626c1b3fda7b18ef7ba3ce17b6bf604f9541d3c6c654b7ae88b239407f659c78f419025d785727ed017b6add21952d7e12007373e321dbc31824ba"
val byteArray: Array[Byte] = Hex.decode(byteArr)
val expected = "b840" + byteArr
val data = encode(byteArray)
assert(expected equals Hex.toHexString(data))
val dataObtained = decode[Array[Byte]](data)
val obtained: Array[Byte] = dataObtained
assert(byteArray sameElements obtained)
val shouldBeError = Try {
val byteArray255Elements = Array.fill(255)(0x1.toByte)
encode(byteArray255Elements)
}
assert(shouldBeError.isSuccess)
forAll(Gen.nonEmptyListOf(Arbitrary.arbitrary[Byte])) { (aByteList: List[Byte]) =>
{
val data = encode(aByteList.toArray)
val dataObtained = decode[Array[Byte]](data)
val obtained: Array[Byte] = dataObtained
assert(aByteList.toArray sameElements obtained)
}
}
}
test("Encode ByteString") {
forAll(Gen.nonEmptyListOf(Arbitrary.arbitrary[Byte])) { (aByteList: List[Byte]) =>
{
val byteString = ByteString(aByteList.toArray)
val data = encode(byteString)
val dataObtained = decode[ByteString](data)
val obtained: ByteString = dataObtained
assert(byteString equals obtained)
}
}
}
test("Encode Seq") {
forAll(Gen.nonEmptyListOf(Gen.choose[Long](0, Long.MaxValue))) { (aLongList: List[Long]) =>
{
val aLongSeq: Seq[Long] = aLongList
val data = encode(aLongSeq)(seqEncDec())
val dataObtained: Seq[Long] = decode[Seq[Long]](data)(seqEncDec())
assert(aLongSeq equals dataObtained)
}
}
}
test("Encode Empty List") {
val expected = "c0"
val data = encode(Seq[Any]())
assert(expected == Hex.toHexString(data))
val dataObtained = decode[Seq[Any]](data)
val obtained: Seq[Any] = dataObtained
assert(obtained.isEmpty)
}
test("Encode Short List") {
val expected = "c88363617483646f67"
val data = RLP.encode(RLPList("cat", "dog"))
assert(expected == Hex.toHexString(data))
val dataObtained = decode[Seq[String]](data)(stringSeqEncDec)
val obtained = dataObtained
assert(Seq("cat", "dog") equals obtained)
val expected2 = "cc83646f6783676f6483636174"
val data2 = RLP.encode(RLPList("dog", "god", "cat"))
assert(expected2 == Hex.toHexString(data2))
val dataObtained2 = decode[Seq[String]](data2)(stringSeqEncDec)
val obtained2 = dataObtained2
assert(Seq("dog", "god", "cat") equals obtained2)
}
test("Encode Long List") {
val list = Seq("cat", "Lorem ipsum dolor sit amet, consectetur adipisicing elit")
val expected =
"f83e83636174b8384c6f72656d20697073756d20646f6c6f722073697420616d65742c20636f6e7365637465747572206164697069736963696e6720656c6974"
val data = RLP.encode(RLPList(list.map(i => toEncodeable(i)): _*))
assert(expected == Hex.toHexString(data))
val dataObtained = decode[Seq[String]](data)(stringSeqEncDec)
val obtained = dataObtained
assert(list equals obtained)
}
test("Encode multilist") {
val expected = "cc01c48363617483646f67c102"
val multilist1 = MultiList1(1, Seq("cat"), "dog", Seq(2))
val data = encode(multilist1)(MultiList1.encDec)
assert(expected == Hex.toHexString(data))
val dataObtained = decode[MultiList1](data)
val obtained = dataObtained
assert(multilist1 equals obtained)
val multilist2 = MultiList2(Seq("cat", "dog"), Seq(1, 2))
val expected2 = "cdc88363617483646f67c20102c0"
val data2 = encode(multilist2)(MultiList2.encDec)
assert(expected2 == Hex.toHexString(data2))
val dataObtained2 = decode[MultiList2](data2)
val obtained2 = dataObtained2
assert(multilist2 equals obtained2)
}
test("Encode Empty List Of List") {
val emptyListOfList = EmptyListOfList()
val expected = "c4c2c0c0c0"
val data = encode(emptyListOfList)(EmptyListOfList.encDec)
assert(expected == Hex.toHexString(data))
val dataObtained = decode[EmptyListOfList](data)
val obtained = dataObtained
assert(emptyListOfList equals obtained)
}
test("Encode Rep Of Two List Of List") {
val twoListOfList = RepOfTwoListOfList()
val expected = "c7c0c1c0c3c0c1c0"
val data = encode(twoListOfList)(RepOfTwoListOfList.encDec)
assert(expected == Hex.toHexString(data))
val dataObtained = decode[RepOfTwoListOfList](data)
val obtained = dataObtained
assert(twoListOfList equals obtained)
}
test("https://github.com/ethereum/tests/blob/master/rlptest.txt") {
for (input: (RLPEncodeable, String) <- rlpTestData) {
val data = RLP.encode(input._1)
assert(input._2 == Hex.toHexString(data))
val dataObtained = RLP.rawDecode(data)
val obtained: RLPEncodeable = dataObtained
val encodedAgain = RLP.encode(obtained)
assert(data sameElements encodedAgain)
}
}
test("SimpleBlock encoding") {
val tx0 = TestSimpleTransaction(1, "cat")
val tx1 = TestSimpleTransaction(2, "dog")
val block = TestSimpleBlock(127, -127: Short, "horse", 1000, Seq(tx0, tx1), Seq(1, 2))
val data = encode(block)(TestSimpleBlock.encDec)
val dataObtained = decode[TestSimpleBlock](data)
val obtained: TestSimpleBlock = dataObtained
assert(block equals obtained)
}
test("Partial Data Parse Test") {
val hex: String = "000080c180000000000000000000000042699b1104e93abf0008be55f912c2ff"
val data = Hex.decode(hex)
val decoded: Seq[Int] = decode[Seq[Int]](data.splitAt(3)._2)
assert(1 == decoded.length)
assert(0 == decoded.head)
}
test("Multiple partial decode") {
val seq1 = RLPList("cat", "dog")
val seq2 = RLPList(23, 10, 1986)
val seq3 = RLPList("cat", "Lorem ipsum dolor sit amet, consectetur adipisicing elit")
val data = Seq(RLP.encode(seq1), RLP.encode(seq2), RLP.encode(seq3)).reduce(_ ++ _)
val decoded1 = decode[Seq[String]](data)
assert(decoded1 equals "cat" :: "dog" :: Nil)
val secondItemIndex = nextElementIndex(data, 0)
val decoded2 = decode[Seq[Int]](data.drop(secondItemIndex))
assert(decoded2 equals 23 :: 10 :: 1986 :: Nil)
val thirdItemIndex = nextElementIndex(data, secondItemIndex)
val decoded3 = decode[Seq[String]](data.drop(thirdItemIndex))
assert(decoded3 equals Seq("cat", "Lorem ipsum dolor sit amet, consectetur adipisicing elit"))
}
implicit def emptySeqEncDec: RLPEncoder[Seq[Any]] with RLPDecoder[Seq[Any]] = new RLPEncoder[Seq[Any]]
with RLPDecoder[Seq[Any]] {
override def encode(obj: Seq[Any]): RLPEncodeable = RLPList()
override def decode(rlp: RLPEncodeable): Seq[Any] = rlp match {
case l: RLPList if l.items.isEmpty => Seq()
case _ => throw new Exception("src is not an empty Seq")
}
}
implicit val stringSeqEncDec = new RLPEncoder[Seq[String]] with RLPDecoder[Seq[String]] {
override def encode(strings: Seq[String]): RLPEncodeable = RLPList(strings.map(stringEncDec.encode): _*)
override def decode(rlp: RLPEncodeable): Seq[String] = rlp match {
case l: RLPList => l.items.map(item => item: String)
case _ => throw new RuntimeException("Invalid String Seq Decoder")
}
}
implicit def stringSeqFromEncodeable(rlp: RLPEncodeable)(implicit dec: RLPDecoder[Seq[String]]): Seq[String] =
dec.decode(rlp)
implicit val intSeqEncDec = new RLPEncoder[Seq[Int]] with RLPDecoder[Seq[Int]] {
override def encode(ints: Seq[Int]): RLPEncodeable = ints: RLPList
override def decode(rlp: RLPEncodeable): Seq[Int] = rlp match {
case l: RLPList => l.items.map(item => item: Int)
case _ => throw new RuntimeException("Invalid Int Seq Decoder")
}
}
implicit def intSeqFromEncodeable(rlp: RLPEncodeable)(implicit dec: RLPDecoder[Seq[Int]]): Seq[Int] = dec.decode(rlp)
case class MultiList1(number: Int, seq1: Seq[String], string: String, seq2: Seq[Int])
object MultiList1 {
implicit val encDec = new RLPEncoder[MultiList1] with RLPDecoder[MultiList1] {
override def encode(obj: MultiList1): RLPEncodeable = {
import obj._
RLPList(number, seq1, string, seq2)
}
override def decode(rlp: RLPEncodeable): MultiList1 = rlp match {
case l: RLPList => MultiList1(l.items.head, l.items(1), l.items(2), l.items(3))
case _ => throw new RuntimeException("Invalid Int Seq Decoder")
}
}
}
case class MultiList2(seq1: Seq[String], seq2: Seq[Int], seq3: Seq[Any] = Seq())
object MultiList2 {
implicit val encDec = new RLPEncoder[MultiList2] with RLPDecoder[MultiList2] {
override def encode(obj: MultiList2): RLPEncodeable = {
import obj._
RLPList(seq1, seq2, seq3)
}
override def decode(rlp: RLPEncodeable): MultiList2 = rlp match {
case l: RLPList => MultiList2(l.items.head, l.items(1), emptySeqEncDec.decode(l.items(2)))
case _ => throw new RuntimeException("Invalid Int Seq Decoder")
}
}
}
case class EmptyListOfList()
object EmptyListOfList {
val instance = Seq(RLPList(RLPList(), RLPList()), RLPList())
implicit val encDec = new RLPEncoder[EmptyListOfList] with RLPDecoder[EmptyListOfList] {
override def encode(obj: EmptyListOfList): RLPEncodeable = RLPList(instance: _*)
override def decode(rlp: RLPEncodeable): EmptyListOfList = rlp match {
case l: RLPList =>
l.items match {
case items if items == instance => EmptyListOfList()
case _ => throw new RuntimeException("Invalid EmptyListOfList Decoder")
}
case _ => throw new RuntimeException("Invalid EmptyListOfList Decoder")
}
}
}
case class RepOfTwoListOfList()
object RepOfTwoListOfList {
val instance = Seq(RLPList(), RLPList(RLPList()), RLPList(RLPList(), RLPList(RLPList())))
implicit val encDec = new RLPEncoder[RepOfTwoListOfList] with RLPDecoder[RepOfTwoListOfList] {
override def encode(obj: RepOfTwoListOfList): RLPEncodeable = RLPList(instance: _*)
override def decode(rlp: RLPEncodeable): RepOfTwoListOfList = rlp match {
case l: RLPList =>
l.items match {
case items if items == instance => RepOfTwoListOfList()
case _ => throw new RuntimeException("Invalid RepOfTwoListOfList Decoder")
}
case _ => throw new RuntimeException("Invalid RepOfTwoListOfList Decoder")
}
}
}
val rlpTestData: Seq[(RLPEncodeable, String)] = Seq(
intEncDec.encode(0) -> "80",
stringEncDec.encode("") -> "80",
stringEncDec.encode("d") -> "64",
stringEncDec.encode("cat") -> "83636174",
stringEncDec.encode("dog") -> "83646f67",
stringSeqEncDec.encode(Seq("cat", "dog")) -> "c88363617483646f67",
stringSeqEncDec.encode(Seq("dog", "god", "cat")) -> "cc83646f6783676f6483636174",
intEncDec.encode(1) -> "01",
intEncDec.encode(10) -> "0a",
intEncDec.encode(100) -> "64",
intEncDec.encode(1000) -> "8203e8",
bigIntEncDec.encode(BigInt("115792089237316195423570985008687907853269984665640564039457584007913129639935"))
-> "a0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
bigIntEncDec.encode(BigInt("115792089237316195423570985008687907853269984665640564039457584007913129639936"))
-> "a1010000000000000000000000000000000000000000000000000000000000000000"
)
//The following classes are used for a simplifying testing for nested objects (allowing using simple RLPEncoder and RLPDecoder)
private case class TestSimpleTransaction(id: Int, name: String)
private object TestSimpleTransaction {
implicit val encDec = new RLPEncoder[TestSimpleTransaction] with RLPDecoder[TestSimpleTransaction] {
override def encode(obj: TestSimpleTransaction): RLPEncodeable = {
import obj._
RLPList(id, name)
}
override def decode(rlp: RLPEncodeable): TestSimpleTransaction = rlp match {
case RLPList(id, name) => TestSimpleTransaction(id, name)
case _ => throw new RuntimeException("Invalid Simple Transaction")
}
}
implicit def fromEncodeable(rlp: RLPEncodeable)(implicit
dec: RLPDecoder[TestSimpleTransaction]
): TestSimpleTransaction = dec.decode(rlp)
}
private case class TestSimpleBlock(
id: Byte,
parentId: Short,
owner: String,
nonce: Int,
txs: Seq[TestSimpleTransaction],
unclesIds: Seq[Int]
)
private object TestSimpleBlock {
implicit val encDec = new RLPEncoder[TestSimpleBlock] with RLPDecoder[TestSimpleBlock] {
override def encode(obj: TestSimpleBlock): RLPEncodeable = {
import obj._
RLPList(
id,
parentId,
owner,
nonce,
RLPList(txs.map(TestSimpleTransaction.encDec.encode): _*),
RLPList(unclesIds.map(id => id: RLPEncodeable): _*)
)
}
override def decode(rlp: RLPEncodeable): TestSimpleBlock = rlp match {
case RLPList(id, parentId, owner, nonce, (txs: RLPList), (unclesIds: RLPList)) =>
TestSimpleBlock(
id,
parentId,
owner,
nonce,
txs.items.map(TestSimpleTransaction.encDec.decode),
unclesIds.items.map(intEncDec.decode)
)
case _ => throw new Exception("Can't transform RLPEncodeable to block")
}
}
}
}
| input-output-hk/etc-client | src/test/scala/io/iohk/ethereum/rlp/RLPSuite.scala | Scala | mit | 26,833 |
package com.weibo.datasys.rest.data
/**
* Created by tuoyu on 25/01/2017.
*/
trait Resource {
def disk: Long
def cpu: Double
def mem: Long
}
| batizty/wolong | src/main/scala/com/weibo/datasys/rest/data/Resource.scala | Scala | apache-2.0 | 150 |
package doodle
package jvm
import doodle.core._
import doodle.core.transform.Transform
import doodle.backend.Metrics
import java.awt.{Color => AwtColor, BasicStroke, Graphics2D, RenderingHints}
import java.awt.image.BufferedImage
import java.awt.geom.{AffineTransform, Path2D}
/** Various utilities for using Java2D */
object Java2D {
def setup(graphics: Graphics2D): Graphics2D = {
graphics.setRenderingHints(
new RenderingHints(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON
)
)
graphics
}
def fontMetrics(graphics: Graphics2D): Metrics =
FontMetrics(graphics.getFontRenderContext()).boundingBox _
val bufferFontMetrics: Metrics = {
val buffer = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB)
val graphics = this.setup(buffer.createGraphics())
fontMetrics(graphics)
}
def toAwtColor(color: Color): AwtColor = {
val RGBA(r, g, b, a) = color.toRGBA
new AwtColor(r.get, g.get, b.get, a.toUnsignedByte.get)
}
def setStroke(graphics: Graphics2D, stroke: Stroke) = {
val width = stroke.width.toFloat
val cap = stroke.cap match {
case Line.Cap.Butt => BasicStroke.CAP_BUTT
case Line.Cap.Round => BasicStroke.CAP_ROUND
case Line.Cap.Square => BasicStroke.CAP_SQUARE
}
val join = stroke.join match {
case Line.Join.Bevel => BasicStroke.JOIN_BEVEL
case Line.Join.Miter => BasicStroke.JOIN_MITER
case Line.Join.Round => BasicStroke.JOIN_ROUND
}
val jStroke = new BasicStroke(width, cap, join)
val jColor = this.toAwtColor(stroke.color)
graphics.setStroke(jStroke)
graphics.setPaint(jColor)
}
def setFill(graphics: Graphics2D, fill: Color) = {
graphics.setPaint(this.toAwtColor(fill))
}
/** Converts to an *open* `Path2D` */
def toPath2D(elements: List[PathElement]): Path2D = {
import PathElement._
import Point.extractors.Cartesian
val path = new Path2D.Double()
path.moveTo(0, 0)
elements.foreach {
case MoveTo(Cartesian(x, y)) =>
path.moveTo(x, y)
case LineTo(Cartesian(x, y)) =>
path.lineTo(x, y)
case BezierCurveTo(Cartesian(cp1x, cp1y), Cartesian(cp2x, cp2y), Cartesian(endX, endY)) =>
path.curveTo(
cp1x , cp1y,
cp2x , cp2y,
endX , endY
)
}
path
}
def toAffineTransform(transform: Transform): AffineTransform = {
val elts = transform.elements
new AffineTransform(elts(0), elts(3), elts(1), elts(4), elts(2), elts(5))
}
def strokeAndFill(graphics: Graphics2D,
path: Path2D,
current: DrawingContext): Unit = {
current.stroke.foreach { s =>
setStroke(graphics, s)
graphics.draw(path)
}
current.fillColor.foreach { f =>
setFill(graphics, f)
graphics.fill(path)
}
}
}
| Angeldude/doodle | jvm/src/main/scala/doodle/jvm/Java2D.scala | Scala | apache-2.0 | 2,877 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.encoders
import scala.util.Random
import org.apache.spark.sql.{RandomDataGenerator, Row}
import org.apache.spark.sql.catalyst.plans.CodegenInterpretedPlanTest
import org.apache.spark.sql.catalyst.util.{ArrayData, DateTimeUtils, GenericArrayData}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
@SQLUserDefinedType(udt = classOf[ExamplePointUDT])
class ExamplePoint(val x: Double, val y: Double) extends Serializable {
override def hashCode: Int = 41 * (41 + x.toInt) + y.toInt
override def equals(that: Any): Boolean = {
if (that.isInstanceOf[ExamplePoint]) {
val e = that.asInstanceOf[ExamplePoint]
(this.x == e.x || (this.x.isNaN && e.x.isNaN) || (this.x.isInfinity && e.x.isInfinity)) &&
(this.y == e.y || (this.y.isNaN && e.y.isNaN) || (this.y.isInfinity && e.y.isInfinity))
} else {
false
}
}
}
/**
* User-defined type for [[ExamplePoint]].
*/
class ExamplePointUDT extends UserDefinedType[ExamplePoint] {
override def sqlType: DataType = ArrayType(DoubleType, false)
override def pyUDT: String = "pyspark.sql.tests.ExamplePointUDT"
override def serialize(p: ExamplePoint): GenericArrayData = {
val output = new Array[Any](2)
output(0) = p.x
output(1) = p.y
new GenericArrayData(output)
}
override def deserialize(datum: Any): ExamplePoint = {
datum match {
case values: ArrayData =>
if (values.numElements() > 1) {
new ExamplePoint(values.getDouble(0), values.getDouble(1))
} else {
val random = new Random()
new ExamplePoint(random.nextDouble(), random.nextDouble())
}
}
}
override def userClass: Class[ExamplePoint] = classOf[ExamplePoint]
private[spark] override def asNullable: ExamplePointUDT = this
}
class RowEncoderSuite extends CodegenInterpretedPlanTest {
private val structOfString = new StructType().add("str", StringType)
private val structOfUDT = new StructType().add("udt", new ExamplePointUDT, false)
private val arrayOfString = ArrayType(StringType)
private val arrayOfNull = ArrayType(NullType)
private val mapOfString = MapType(StringType, StringType)
private val arrayOfUDT = ArrayType(new ExamplePointUDT, false)
encodeDecodeTest(
new StructType()
.add("null", NullType)
.add("boolean", BooleanType)
.add("byte", ByteType)
.add("short", ShortType)
.add("int", IntegerType)
.add("long", LongType)
.add("float", FloatType)
.add("double", DoubleType)
.add("decimal", DecimalType.SYSTEM_DEFAULT)
.add("string", StringType)
.add("binary", BinaryType)
.add("date", DateType)
.add("timestamp", TimestampType)
.add("udt", new ExamplePointUDT))
encodeDecodeTest(
new StructType()
.add("arrayOfNull", arrayOfNull)
.add("arrayOfString", arrayOfString)
.add("arrayOfArrayOfString", ArrayType(arrayOfString))
.add("arrayOfArrayOfInt", ArrayType(ArrayType(IntegerType)))
.add("arrayOfMap", ArrayType(mapOfString))
.add("arrayOfStruct", ArrayType(structOfString))
.add("arrayOfUDT", arrayOfUDT))
encodeDecodeTest(
new StructType()
.add("mapOfIntAndString", MapType(IntegerType, StringType))
.add("mapOfStringAndArray", MapType(StringType, arrayOfString))
.add("mapOfArrayAndInt", MapType(arrayOfString, IntegerType))
.add("mapOfArray", MapType(arrayOfString, arrayOfString))
.add("mapOfStringAndStruct", MapType(StringType, structOfString))
.add("mapOfStructAndString", MapType(structOfString, StringType))
.add("mapOfStruct", MapType(structOfString, structOfString)))
encodeDecodeTest(
new StructType()
.add("structOfString", structOfString)
.add("structOfStructOfString", new StructType().add("struct", structOfString))
.add("structOfArray", new StructType().add("array", arrayOfString))
.add("structOfMap", new StructType().add("map", mapOfString))
.add("structOfArrayAndMap",
new StructType().add("array", arrayOfString).add("map", mapOfString))
.add("structOfUDT", structOfUDT))
test("encode/decode decimal type") {
val schema = new StructType()
.add("int", IntegerType)
.add("string", StringType)
.add("double", DoubleType)
.add("java_decimal", DecimalType.SYSTEM_DEFAULT)
.add("scala_decimal", DecimalType.SYSTEM_DEFAULT)
.add("catalyst_decimal", DecimalType.SYSTEM_DEFAULT)
val encoder = RowEncoder(schema).resolveAndBind()
val javaDecimal = new java.math.BigDecimal("1234.5678")
val scalaDecimal = BigDecimal("1234.5678")
val catalystDecimal = Decimal("1234.5678")
val input = Row(100, "test", 0.123, javaDecimal, scalaDecimal, catalystDecimal)
val row = encoder.toRow(input)
val convertedBack = encoder.fromRow(row)
// Decimal will be converted back to Java BigDecimal when decoding.
assert(convertedBack.getDecimal(3).compareTo(javaDecimal) == 0)
assert(convertedBack.getDecimal(4).compareTo(scalaDecimal.bigDecimal) == 0)
assert(convertedBack.getDecimal(5).compareTo(catalystDecimal.toJavaBigDecimal) == 0)
}
test("RowEncoder should preserve decimal precision and scale") {
val schema = new StructType().add("decimal", DecimalType(10, 5), false)
val encoder = RowEncoder(schema).resolveAndBind()
val decimal = Decimal("67123.45")
val input = Row(decimal)
val row = encoder.toRow(input)
assert(row.toSeq(schema).head == decimal)
}
test("RowEncoder should preserve schema nullability") {
val schema = new StructType().add("int", IntegerType, nullable = false)
val encoder = RowEncoder(schema).resolveAndBind()
assert(encoder.serializer.length == 1)
assert(encoder.serializer.head.dataType == IntegerType)
assert(encoder.serializer.head.nullable == false)
}
test("RowEncoder should preserve nested column name") {
val schema = new StructType().add(
"struct",
new StructType()
.add("i", IntegerType, nullable = false)
.add(
"s",
new StructType().add("int", IntegerType, nullable = false),
nullable = false),
nullable = false)
val encoder = RowEncoder(schema).resolveAndBind()
assert(encoder.serializer.length == 1)
assert(encoder.serializer.head.dataType ==
new StructType()
.add("i", IntegerType, nullable = false)
.add(
"s",
new StructType().add("int", IntegerType, nullable = false),
nullable = false))
assert(encoder.serializer.head.nullable == false)
}
test("RowEncoder should support primitive arrays") {
val schema = new StructType()
.add("booleanPrimitiveArray", ArrayType(BooleanType, false))
.add("bytePrimitiveArray", ArrayType(ByteType, false))
.add("shortPrimitiveArray", ArrayType(ShortType, false))
.add("intPrimitiveArray", ArrayType(IntegerType, false))
.add("longPrimitiveArray", ArrayType(LongType, false))
.add("floatPrimitiveArray", ArrayType(FloatType, false))
.add("doublePrimitiveArray", ArrayType(DoubleType, false))
val encoder = RowEncoder(schema).resolveAndBind()
val input = Seq(
Array(true, false),
Array(1.toByte, 64.toByte, Byte.MaxValue),
Array(1.toShort, 255.toShort, Short.MaxValue),
Array(1, 10000, Int.MaxValue),
Array(1.toLong, 1000000.toLong, Long.MaxValue),
Array(1.1.toFloat, 123.456.toFloat, Float.MaxValue),
Array(11.1111, 123456.7890123, Double.MaxValue)
)
val row = encoder.toRow(Row.fromSeq(input))
val convertedBack = encoder.fromRow(row)
input.zipWithIndex.map { case (array, index) =>
assert(convertedBack.getSeq(index) === array)
}
}
test("RowEncoder should support array as the external type for ArrayType") {
val schema = new StructType()
.add("array", ArrayType(IntegerType))
.add("nestedArray", ArrayType(ArrayType(StringType)))
.add("deepNestedArray", ArrayType(ArrayType(ArrayType(LongType))))
val encoder = RowEncoder(schema).resolveAndBind()
val input = Row(
Array(1, 2, null),
Array(Array("abc", null), null),
Array(Seq(Array(0L, null), null), null))
val row = encoder.toRow(input)
val convertedBack = encoder.fromRow(row)
assert(convertedBack.getSeq(0) == Seq(1, 2, null))
assert(convertedBack.getSeq(1) == Seq(Seq("abc", null), null))
assert(convertedBack.getSeq(2) == Seq(Seq(Seq(0L, null), null), null))
}
test("RowEncoder should throw RuntimeException if input row object is null") {
val schema = new StructType().add("int", IntegerType)
val encoder = RowEncoder(schema)
val e = intercept[RuntimeException](encoder.toRow(null))
assert(e.getMessage.contains("Null value appeared in non-nullable field"))
assert(e.getMessage.contains("top level Product or row object"))
}
test("RowEncoder should validate external type") {
val e1 = intercept[RuntimeException] {
val schema = new StructType().add("a", IntegerType)
val encoder = RowEncoder(schema)
encoder.toRow(Row(1.toShort))
}
assert(e1.getMessage.contains("java.lang.Short is not a valid external type"))
val e2 = intercept[RuntimeException] {
val schema = new StructType().add("a", StringType)
val encoder = RowEncoder(schema)
encoder.toRow(Row(1))
}
assert(e2.getMessage.contains("java.lang.Integer is not a valid external type"))
val e3 = intercept[RuntimeException] {
val schema = new StructType().add("a",
new StructType().add("b", IntegerType).add("c", StringType))
val encoder = RowEncoder(schema)
encoder.toRow(Row(1 -> "a"))
}
assert(e3.getMessage.contains("scala.Tuple2 is not a valid external type"))
val e4 = intercept[RuntimeException] {
val schema = new StructType().add("a", ArrayType(TimestampType))
val encoder = RowEncoder(schema)
encoder.toRow(Row(Array("a")))
}
assert(e4.getMessage.contains("java.lang.String is not a valid external type"))
}
test("SPARK-25791: Datatype of serializers should be accessible") {
val udtSQLType = new StructType().add("a", IntegerType)
val pythonUDT = new PythonUserDefinedType(udtSQLType, "pyUDT", "serializedPyClass")
val schema = new StructType().add("pythonUDT", pythonUDT, true)
val encoder = RowEncoder(schema)
assert(encoder.serializer(0).dataType == pythonUDT.sqlType)
}
test("encoding/decoding TimestampType to/from java.time.Instant") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val schema = new StructType().add("t", TimestampType)
val encoder = RowEncoder(schema).resolveAndBind()
val instant = java.time.Instant.parse("2019-02-26T16:56:00Z")
val row = encoder.toRow(Row(instant))
assert(row.getLong(0) === DateTimeUtils.instantToMicros(instant))
val readback = encoder.fromRow(row)
assert(readback.get(0) === instant)
}
}
test("encoding/decoding DateType to/from java.time.LocalDate") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val schema = new StructType().add("d", DateType)
val encoder = RowEncoder(schema).resolveAndBind()
val localDate = java.time.LocalDate.parse("2019-02-27")
val row = encoder.toRow(Row(localDate))
assert(row.getLong(0) === DateTimeUtils.localDateToDays(localDate))
val readback = encoder.fromRow(row)
assert(readback.get(0).equals(localDate))
}
}
for {
elementType <- Seq(IntegerType, StringType)
containsNull <- Seq(true, false)
nullable <- Seq(true, false)
} {
test("RowEncoder should preserve array nullability: " +
s"ArrayType($elementType, containsNull = $containsNull), nullable = $nullable") {
val schema = new StructType().add("array", ArrayType(elementType, containsNull), nullable)
val encoder = RowEncoder(schema).resolveAndBind()
assert(encoder.serializer.length == 1)
assert(encoder.serializer.head.dataType == ArrayType(elementType, containsNull))
assert(encoder.serializer.head.nullable == nullable)
}
}
for {
keyType <- Seq(IntegerType, StringType)
valueType <- Seq(IntegerType, StringType)
valueContainsNull <- Seq(true, false)
nullable <- Seq(true, false)
} {
test("RowEncoder should preserve map nullability: " +
s"MapType($keyType, $valueType, valueContainsNull = $valueContainsNull), " +
s"nullable = $nullable") {
val schema = new StructType().add(
"map", MapType(keyType, valueType, valueContainsNull), nullable)
val encoder = RowEncoder(schema).resolveAndBind()
assert(encoder.serializer.length == 1)
assert(encoder.serializer.head.dataType == MapType(keyType, valueType, valueContainsNull))
assert(encoder.serializer.head.nullable == nullable)
}
}
private def encodeDecodeTest(schema: StructType): Unit = {
test(s"encode/decode: ${schema.simpleString}") {
val encoder = RowEncoder(schema).resolveAndBind()
val inputGenerator = RandomDataGenerator.forType(schema, nullable = false).get
var input: Row = null
try {
for (_ <- 1 to 5) {
input = inputGenerator.apply().asInstanceOf[Row]
val row = encoder.toRow(input)
val convertedBack = encoder.fromRow(row)
assert(input == convertedBack)
}
} catch {
case e: Exception =>
fail(
s"""
|schema: ${schema.simpleString}
|input: ${input}
""".stripMargin, e)
}
}
}
}
| aosagie/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala | Scala | apache-2.0 | 14,480 |
package by.bsuir.verpav.misoi.clustering.steps
import java.awt.image.BufferedImage
import javax.swing.{JFrame, JOptionPane}
import by.bsuir.verpav.misoi.util.ImageUtils
/**
* Created by Pavel_Verkhovtsov on 10/5/16.
*/
object BrightnessCorrection extends ClusteringStep {
override def perform(baseImage: BufferedImage): BufferedImage = {
val isUseCorrection = params.getOrElse("isUseCorrection", 1)
if (isUseCorrection == 1) {
val colors = ImageUtils.extractColors(baseImage)
val red = colors.map(_._1)
val redMin = red.min
val redMax = red.max
val green = colors.map(_._2)
val greenMin = green.min
val greenMax = green.max
val blue = colors.map(_._3)
val blueMin = blue.min
val blueMax = blue.max
def newPixelValue(pixel: Int, max: Int, min: Int) = ((pixel - min).toDouble / (max - min).toDouble * 255).toInt
ImageUtils.binaryImageTransformation(baseImage,
(r: Int, g: Int, b: Int) => true,
(r: Int, g: Int, b: Int) => Array(newPixelValue(r, redMax, redMin), newPixelValue(g, greenMax, greenMin), newPixelValue(b, blueMax, blueMin)),
(r: Int, g: Int, b: Int) => Array(0, 0, 0))
}
else
baseImage
}
override def requestParameters(frame: JFrame): Unit = {
val isUseCorrection = JOptionPane.showInputDialog(frame, "Use correction 0 or 1: ")
params.put("isUseCorrection", isUseCorrection.toInt)
}
}
| VerkhovtsovPavel/BSUIR_Labs | Labs/MISOI/MISOI-2/src/by/bsuir/verpav/misoi/clustering/steps/BrightnessCorrection.scala | Scala | mit | 1,440 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive
import cats.Eq
import minitest.TestSuite
import minitest.laws.Checkers
import monix.eval.Task
import monix.execution.Scheduler
import monix.execution.schedulers.SchedulerService
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.util.{Failure, Success}
trait BaseConcurrencySuite extends TestSuite[SchedulerService] with Checkers with ArbitraryInstancesBase {
def setup(): SchedulerService = {
Scheduler.computation(parallelism = 4, name = "concurrency-tests", daemonic = true)
}
def tearDown(env: SchedulerService): Unit = {
env.shutdown()
assert(env.awaitTermination(1.minute), "scheduler.awaitTermination")
}
implicit def equalityObservable[A](implicit A: Eq[A], ec: Scheduler): Eq[Observable[A]] =
new Eq[Observable[A]] {
def eqv(lh: Observable[A], rh: Observable[A]): Boolean = {
val eqList = implicitly[Eq[Option[List[A]]]]
val fa = lh.foldLeft(List.empty[A])((acc, e) => e :: acc).firstOptionL.runToFuture
val fb = rh.foldLeft(List.empty[A])((acc, e) => e :: acc).firstOptionL.runToFuture
equalityFuture(eqList, ec).eqv(fa, fb)
}
}
implicit def equalityTask[A](implicit A: Eq[A], ec: Scheduler): Eq[Task[A]] =
new Eq[Task[A]] {
def eqv(lh: Task[A], rh: Task[A]): Boolean =
equalityFuture(A, ec).eqv(lh.runToFuture, rh.runToFuture)
}
implicit def equalityFuture[A](implicit A: Eq[A], ec: Scheduler): Eq[Future[A]] =
new Eq[Future[A]] {
def eqv(x: Future[A], y: Future[A]): Boolean = {
Await.ready(for (_ <- x; _ <- y) yield (), 5.minutes)
x.value match {
case None =>
y.value.isEmpty
case Some(Success(a)) =>
y.value match {
case Some(Success(b)) => A.eqv(a, b)
case _ => false
}
case Some(Failure(ex1)) =>
y.value match {
case Some(Failure(ex2)) =>
equalityThrowable.eqv(ex1, ex2)
case _ =>
false
}
}
}
}
}
| alexandru/monifu | monix-reactive/jvm/src/test/scala/monix/reactive/BaseConcurrencySuite.scala | Scala | apache-2.0 | 2,780 |
package benchmarks
import java.nio.file.{Files, Path, Paths}
object Main extends App {
val startTime = System.currentTimeMillis()
val rootPath: Path = Paths.get(args.headOption.getOrElse("."))
val compilerSetup = new CompilerSetup(rootPath, args.drop(3).toList)
val N = args.drop(1).headOption.map(_.toInt).getOrElse(2) // TODO change it!
val M = args.drop(2).headOption.map(_.toInt).getOrElse(15)
val sources = IO.listSourcesIn(rootPath.resolve("sources")).map(_.toString)
val removeAt = N - M
val profileFile = compilerSetup.outputDir.resolve("profile.txt") // TODO always add this!
def runCompilation(n: Int): Long = {
val run = new compilerSetup.global.Run
val start = System.currentTimeMillis()
run.compile(sources)
val duration = System.currentTimeMillis() - start
Files.move(compilerSetup.currentOutput, compilerSetup.currentOutput.resolveSibling(s"classes_$n"))
if (n == removeAt && Files.exists(profileFile)) {
Files.move(profileFile, profileFile.resolveSibling("initial-profile.txt"))
}
duration
}
println(s"Running benchmark with (N=$N, M=$M) in $rootPath with scalac options: ${compilerSetup.scalacOptions}")
val times = (1 to N).map(runCompilation)
val total = System.currentTimeMillis() - startTime
def asSec(long: Long) = long / 1000.0
def asSec(d: Double) = d / 1000
val overhead = asSec(total - times.sum)
val lastMAvg = asSec(times.takeRight(M).sum / M.toDouble) // TODO support cases where M > N
val allAvg = asSec(times.sum / N.toDouble)
// TODO proper output format
println(s"Run $N compilations in ${asSec(total)} with overhead: $overhead.")
println(s"Avgs. Last ($M): $lastMAvg, all $allAvg")
println(s"Times: ${times.map(asSec)}")
} | rorygraves/perf_tester | light/benchmark/src/main/scala/benchmarks/Main.scala | Scala | apache-2.0 | 1,748 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.security.PrivilegedExceptionAction
import java.sql.{Date, Timestamp}
import java.util.{Arrays, Map => JMap, UUID}
import java.util.concurrent.RejectedExecutionException
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
import org.apache.hadoop.hive.metastore.api.FieldSchema
import org.apache.hadoop.hive.shims.Utils
import org.apache.hive.service.cli._
import org.apache.hive.service.cli.operation.ExecuteStatementOperation
import org.apache.hive.service.cli.session.HiveSession
import org.apache.spark.SparkContext
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, Row => SparkRow, SQLContext}
import org.apache.spark.sql.execution.HiveResult
import org.apache.spark.sql.execution.command.SetCommand
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.util.{Utils => SparkUtils}
private[hive] class SparkExecuteStatementOperation(
parentSession: HiveSession,
statement: String,
confOverlay: JMap[String, String],
runInBackground: Boolean = true)
(sqlContext: SQLContext, sessionToActivePool: JMap[SessionHandle, String])
extends ExecuteStatementOperation(parentSession, statement, confOverlay, runInBackground)
with Logging {
private var result: DataFrame = _
// We cache the returned rows to get iterators again in case the user wants to use FETCH_FIRST.
// This is only used when `spark.sql.thriftServer.incrementalCollect` is set to `false`.
// In case of `true`, this will be `None` and FETCH_FIRST will trigger re-execution.
private var resultList: Option[Array[SparkRow]] = _
private var iter: Iterator[SparkRow] = _
private var dataTypes: Array[DataType] = _
private var statementId: String = _
private lazy val resultSchema: TableSchema = {
if (result == null || result.schema.isEmpty) {
new TableSchema(Arrays.asList(new FieldSchema("Result", "string", "")))
} else {
logInfo(s"Result Schema: ${result.schema}")
SparkExecuteStatementOperation.getTableSchema(result.schema)
}
}
def close(): Unit = {
// RDDs will be cleaned automatically upon garbage collection.
logDebug(s"CLOSING $statementId")
cleanup(OperationState.CLOSED)
sqlContext.sparkContext.clearJobGroup()
}
def addNonNullColumnValue(from: SparkRow, to: ArrayBuffer[Any], ordinal: Int) {
dataTypes(ordinal) match {
case StringType =>
to += from.getString(ordinal)
case IntegerType =>
to += from.getInt(ordinal)
case BooleanType =>
to += from.getBoolean(ordinal)
case DoubleType =>
to += from.getDouble(ordinal)
case FloatType =>
to += from.getFloat(ordinal)
case DecimalType() =>
to += from.getDecimal(ordinal)
case LongType =>
to += from.getLong(ordinal)
case ByteType =>
to += from.getByte(ordinal)
case ShortType =>
to += from.getShort(ordinal)
case DateType =>
to += from.getAs[Date](ordinal)
case TimestampType =>
to += from.getAs[Timestamp](ordinal)
case BinaryType =>
to += from.getAs[Array[Byte]](ordinal)
case _: ArrayType | _: StructType | _: MapType | _: UserDefinedType[_] =>
val hiveString = HiveResult.toHiveString((from.get(ordinal), dataTypes(ordinal)))
to += hiveString
}
}
def getNextRowSet(order: FetchOrientation, maxRowsL: Long): RowSet = {
validateDefaultFetchOrientation(order)
assertState(OperationState.FINISHED)
setHasResultSet(true)
val resultRowSet: RowSet = RowSetFactory.create(getResultSetSchema, getProtocolVersion)
// Reset iter to header when fetching start from first row
if (order.equals(FetchOrientation.FETCH_FIRST)) {
iter = if (sqlContext.getConf(SQLConf.THRIFTSERVER_INCREMENTAL_COLLECT.key).toBoolean) {
resultList = None
result.toLocalIterator.asScala
} else {
if (resultList.isEmpty) {
resultList = Some(result.collect())
}
resultList.get.iterator
}
}
if (!iter.hasNext) {
resultRowSet
} else {
// maxRowsL here typically maps to java.sql.Statement.getFetchSize, which is an int
val maxRows = maxRowsL.toInt
var curRow = 0
while (curRow < maxRows && iter.hasNext) {
val sparkRow = iter.next()
val row = ArrayBuffer[Any]()
var curCol = 0
while (curCol < sparkRow.length) {
if (sparkRow.isNullAt(curCol)) {
row += null
} else {
addNonNullColumnValue(sparkRow, row, curCol)
}
curCol += 1
}
resultRowSet.addRow(row.toArray.asInstanceOf[Array[Object]])
curRow += 1
}
resultRowSet
}
}
def getResultSetSchema: TableSchema = resultSchema
override def runInternal(): Unit = {
setState(OperationState.PENDING)
setHasResultSet(true) // avoid no resultset for async run
if (!runInBackground) {
execute()
} else {
val sparkServiceUGI = Utils.getUGI()
// Runnable impl to call runInternal asynchronously,
// from a different thread
val backgroundOperation = new Runnable() {
override def run(): Unit = {
val doAsAction = new PrivilegedExceptionAction[Unit]() {
override def run(): Unit = {
registerCurrentOperationLog()
try {
execute()
} catch {
case e: HiveSQLException =>
setOperationException(e)
log.error("Error running hive query: ", e)
}
}
}
try {
sparkServiceUGI.doAs(doAsAction)
} catch {
case e: Exception =>
setOperationException(new HiveSQLException(e))
logError("Error running hive query as user : " +
sparkServiceUGI.getShortUserName(), e)
}
}
}
try {
// This submit blocks if no background threads are available to run this operation
val backgroundHandle =
parentSession.getSessionManager().submitBackgroundOperation(backgroundOperation)
setBackgroundHandle(backgroundHandle)
} catch {
case rejected: RejectedExecutionException =>
setState(OperationState.ERROR)
throw new HiveSQLException("The background threadpool cannot accept" +
" new task for execution, please retry the operation", rejected)
case NonFatal(e) =>
logError(s"Error executing query in background", e)
setState(OperationState.ERROR)
throw e
}
}
}
private def execute(): Unit = {
statementId = UUID.randomUUID().toString
logInfo(s"Running query '$statement' with $statementId")
setState(OperationState.RUNNING)
// Always use the latest class loader provided by executionHive's state.
val executionHiveClassLoader = sqlContext.sharedState.jarClassLoader
Thread.currentThread().setContextClassLoader(executionHiveClassLoader)
HiveThriftServer2.listener.onStatementStart(
statementId,
parentSession.getSessionHandle.getSessionId.toString,
statement,
statementId,
parentSession.getUsername)
sqlContext.sparkContext.setJobGroup(statementId, statement)
val pool = sessionToActivePool.get(parentSession.getSessionHandle)
if (pool != null) {
sqlContext.sparkContext.setLocalProperty(SparkContext.SPARK_SCHEDULER_POOL, pool)
}
try {
result = sqlContext.sql(statement)
logDebug(result.queryExecution.toString())
result.queryExecution.logical match {
case SetCommand(Some((SQLConf.THRIFTSERVER_POOL.key, Some(value)))) =>
sessionToActivePool.put(parentSession.getSessionHandle, value)
logInfo(s"Setting ${SparkContext.SPARK_SCHEDULER_POOL}=$value for future statements " +
"in this session.")
case _ =>
}
HiveThriftServer2.listener.onStatementParsed(statementId, result.queryExecution.toString())
iter = {
if (sqlContext.getConf(SQLConf.THRIFTSERVER_INCREMENTAL_COLLECT.key).toBoolean) {
resultList = None
result.toLocalIterator.asScala
} else {
resultList = Some(result.collect())
resultList.get.iterator
}
}
dataTypes = result.queryExecution.analyzed.output.map(_.dataType).toArray
} catch {
case e: HiveSQLException =>
if (getStatus().getState() == OperationState.CANCELED) {
return
} else {
setState(OperationState.ERROR)
HiveThriftServer2.listener.onStatementError(
statementId, e.getMessage, SparkUtils.exceptionString(e))
throw e
}
// Actually do need to catch Throwable as some failures don't inherit from Exception and
// HiveServer will silently swallow them.
case e: Throwable =>
val currentState = getStatus().getState()
logError(s"Error executing query, currentState $currentState, ", e)
setState(OperationState.ERROR)
HiveThriftServer2.listener.onStatementError(
statementId, e.getMessage, SparkUtils.exceptionString(e))
throw new HiveSQLException(e.toString)
}
setState(OperationState.FINISHED)
HiveThriftServer2.listener.onStatementFinish(statementId)
}
override def cancel(): Unit = {
logInfo(s"Cancel '$statement' with $statementId")
cleanup(OperationState.CANCELED)
}
private def cleanup(state: OperationState) {
setState(state)
if (runInBackground) {
val backgroundHandle = getBackgroundHandle()
if (backgroundHandle != null) {
backgroundHandle.cancel(true)
}
}
if (statementId != null) {
sqlContext.sparkContext.cancelJobGroup(statementId)
}
}
}
object SparkExecuteStatementOperation {
def getTableSchema(structType: StructType): TableSchema = {
val schema = structType.map { field =>
val attrTypeString = if (field.dataType == NullType) "void" else field.dataType.catalogString
new FieldSchema(field.name, attrTypeString, field.getComment.getOrElse(""))
}
new TableSchema(schema.asJava)
}
}
| hhbyyh/spark | sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala | Scala | apache-2.0 | 11,179 |
package com.aesireanempire.eplus.network
class CommonProxy {
def init() {}
}
| darkhax/EnchantingPlus-Scala | src/main/scala/com/aesireanempire/eplus/network/CommonProxy.scala | Scala | lgpl-3.0 | 82 |
package akka.persistence.couchbase.journal
import java.util.concurrent.TimeUnit
import akka.actor.{Actor, ActorLogging}
import akka.persistence.couchbase.CouchbaseJournalConfig
import com.couchbase.client.java.Bucket
import com.couchbase.client.java.document.JsonDocument
import com.couchbase.client.java.document.json.JsonArray
import com.couchbase.client.java.view._
import rx.Observable
import rx.functions.Func1
import scala.collection.immutable.Seq
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Try}
trait CouchbaseStatements extends Actor with ActorLogging {
def config: CouchbaseJournalConfig
def bucket: Bucket
implicit def executionContext: ExecutionContext
def bySequenceNr(persistenceId: String, from: Long, to: Long): ViewQuery = {
ViewQuery
.from("journal", "by_sequenceNr")
.stale(config.stale)
.startKey(JsonArray.from(persistenceId, from.asInstanceOf[AnyRef]))
.endKey(JsonArray.from(persistenceId, to.asInstanceOf[AnyRef]))
}
/**
* Adds all messages in a single atomically updated batch.
*/
def executeBatch(messages: Seq[JournalMessage]): Try[Unit] = {
nextKey(JournalMessageBatch.name).flatMap { key =>
Try {
val batch = JournalMessageBatch.create(messages)
val jsonObject = JournalMessageBatch.serialize(batch)
val jsonDocument = JsonDocument.create(key, jsonObject)
bucket.insert(
jsonDocument,
config.persistTo,
config.replicateTo,
config.timeout.toSeconds,
TimeUnit.SECONDS
)
log.debug("Wrote batch: {}", key)
} recoverWith {
case e =>
log.error(e, "Writing batch: {}", key)
Failure(e)
}
}
}
/**
* removes a batch of journal messages
*/
def deleteBatch(journalIds: Seq[String]): Try[Unit] = {
Try {
val keyBatch = journalIds.toArray
//batching mutation to remove data from the bucket
Observable.from(keyBatch)
.flatMap(new Func1[String, Observable[JsonDocument]] {
override def call(id: String): Observable[JsonDocument] = {
bucket.async().remove(id,
config.persistTo,
config.replicateTo)
}
}).last.toBlocking.single
log.debug("A batch of entries was removed!")
} recoverWith {
case e =>
log.error(e, "Deleting batch")
Failure(e)
}
}
/**
* Generates a new key with the given base name.
*
* Couchbase guarantees the key is unique within the cluster.
*/
def nextKey(name: String): Try[String] = {
Try {
val counterKey = s"counter::$name"
val counter = bucket.counter(counterKey, 1L, 0L).content()
s"$name-$counter"
}
}
}
| Product-Foundry/akka-persistence-couchbase | src/main/scala/akka/persistence/couchbase/journal/CouchbaseStatements.scala | Scala | apache-2.0 | 2,890 |
package io.argos.agent.sentinels
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{TestKit, TestProbe}
import com.typesafe.config.ConfigFactory
import io.argos.agent.Constants._
import io.argos.agent.bean.ActorProtocol._
import io.argos.agent.bean._
import io.argos.agent.{Constants, Messages, SentinelConfiguration}
import org.scalatest._
/**
* Created by eric on 05/07/16.
*/
class TestConsistencySentinel extends TestKit(ActorSystem("TestConsistencySentinel")) with FlatSpecLike with Matchers with BeforeAndAfterAll {
val globalConfig = ConfigFactory.load()
val configJmx = globalConfig.getConfig(CONF_OBJECT_ENTRY_METRICS)
val metricsProviderProbe = TestProbe()
val notificationProbe = TestProbe()
system.eventStream.subscribe(
notificationProbe.ref,
classOf[Notification])
val rrBlockingActor = system.actorOf(Props(classOf[ReadRepairBlockingSentinel], metricsProviderProbe.ref, SentinelConfiguration("test", globalConfig.getConfig(CONF_OBJECT_ENTRY_SENTINEL_CONSISTENCY_RR_BLOCKING))))
val rrBackgroundActor = system.actorOf(Props(classOf[ReadRepairBackgroundSentinel], metricsProviderProbe.ref, SentinelConfiguration("test", globalConfig.getConfig(CONF_OBJECT_ENTRY_SENTINEL_CONSISTENCY_RR_BACKGROUND))))
override def afterAll() {
system.terminate()
}
"A notification " should " be sent on ReadrepairedBlocking tasks" in {
executeTest(rrBlockingActor, Messages.READ_REPAIR_BLOCKING)
}
"A notification " should " be sent on ReadrepairedBackground tasks" in {
executeTest(rrBackgroundActor, Messages.READ_REPAIR_BACKGROUND)
}
private def executeTest(testableActorRef : ActorRef, dmType: String): Unit = {
testableActorRef ! CheckMetrics()
metricsProviderProbe.expectMsg(MetricsRequest(ActorProtocol.ACTION_CHECK_READ_REPAIR, dmType))
metricsProviderProbe.reply(MetricsResponse(ACTION_CHECK_READ_REPAIR, Some(ReadRepairStats(dmType, 0, 0, 0, 0, 0))))
notificationProbe.expectNoMsg()
testableActorRef ! CheckMetrics()
metricsProviderProbe.expectMsg(MetricsRequest(ActorProtocol.ACTION_CHECK_READ_REPAIR, dmType))
// non zero one minute rate should trigger a notification
metricsProviderProbe.reply(MetricsResponse(ACTION_CHECK_READ_REPAIR, Some(ReadRepairStats(dmType, 1, 0.0, 0.0, 0.0, 0.1))))
val notif = notificationProbe.expectMsgAnyClassOf(classOf[Notification])
assert(notif.message.contains(dmType))
}
}
| leleueri/argos | argos-agent/src/test/scala/io/argos/agent/sentinels/TestConsistencySentinel.scala | Scala | apache-2.0 | 2,442 |
class Complex(real: Double, imaginary: Double){
//O retorno será definido automaticamente pelo compilador.
def re() = real
def im() = imaginary
override def toString() = "" + re() + (if (im() > 0) "re" else "+") + im() + "i"
/*
Métodos sem argumentos, para chamar é apenas usar c.re
def re = real
def im = imaginary
*/
}
object ComplexNumbers{
def main(args: Array[String]){
val c = new Complex(1.5,1.6)
println("imaginary part: " + c.im())
println(c.toString())
}
} | diogenesfilho/first-scala-project | Classe.scala | Scala | gpl-2.0 | 496 |
package edu.gemini.itc.baseline.util
import edu.gemini.itc.shared._
// TEMPORARY helper
// All input objects will become immutable data only objects (probably Scala case classes).
// For now we need a workaround for missing hash functions on the existing Java objects.
object Hash {
def calc(ip: InstrumentDetails): Int = ip match {
case p: AcquisitionCamParameters => calc(p)
case p: Flamingos2Parameters => calc(p)
case p: GmosParameters => calc(p)
case p: GnirsParameters => calc(p)
case p: GsaoiParameters => calc(p)
case p: MichelleParameters => calc(p)
case p: NifsParameters => calc(p)
case p: NiriParameters => calc(p)
case p: TRecsParameters => calc(p)
case _ => throw new Exception("no hash function available")
}
def calc(p: GmosParameters): Int =
hash(
p.filter.name,
p.ccdType.name,
p.fpMask.name,
p.grating.name,
f"${p.centralWavelength.toNanometers}%.0f",
p.site.name,
p.spatialBinning,
p.spectralBinning
)
def calc(p: GnirsParameters): Int =
hash(
p.grating.name,
p.pixelScale.name,
p.crossDispersed.name,
p.readMode.name,
f"${p.centralWavelength.toNanometers}%.0f",
p.slitWidth.name
)
def calc(p: GsaoiParameters): Int =
hash(
p.filter.name,
p.readMode.name,
calc(p.gems)
)
def calc(p: MichelleParameters): Int =
hash(
p.filter.name,
p.mask.name,
p.grating.name,
f"${p.centralWavelength.toNanometers}%.0f",
p.polarimetry.name
)
def calc(p: NifsParameters): Int =
hash(
p.filter.name,
p.grating.name,
p.readMode.name,
f"${p.centralWavelength.toNanometers}%.0f",
calc(p.altair)
)
def calc(p: NiriParameters): Int =
hash(
p.camera.name,
p.filter.name,
p.mask.name,
p.grism.name,
p.readMode.name,
p.wellDepth.name,
calc(p.altair)
)
def calc(p: TRecsParameters): Int =
hash(
p.filter.name,
p.mask.name,
p.grating.name,
f"${p.centralWavelength.toNanometers}%.0f",
p.instrumentWindow.name
)
def calc(p: AcquisitionCamParameters): Int =
hash(
p.colorFilter.name,
p.ndFilter.name
)
def calc(p: Flamingos2Parameters): Int =
hash(
p.filter.name,
p.mask.name,
p.grism.name,
p.readMode.name
)
def calc(odp: ObservationDetails): Int =
hash(
odp.calculationMethod,
odp.analysisMethod
)
def calc(src: SourceDefinition): Int =
hash(
src.profile,
src.distribution,
src.norm, // this is the magnitude value
src.normBand.name, // this is the magnitude band name
src.redshift.z
)
def calc(tp: TelescopeDetails): Int =
hash(
tp.getInstrumentPort.name,
tp.getMirrorCoating.name,
tp.getWFS.name
)
def calc(ocp: ObservingConditions): Int =
hash(
ocp.airmass,
ocp.iq.name,
ocp.cc.name,
ocp.wv.name,
ocp.sb.name
)
def calc(alt: Option[AltairParameters]): Int = alt match {
case Some(altair) => calc(altair)
case None => 0
}
def calc(alt: AltairParameters): Int =
hash (
f"${alt.guideStarMagnitude}%.2f",
f"${alt.guideStarSeparation}%.2f",
alt.fieldLens.name,
alt.wfsMode.name
)
def calc(alt: GemsParameters): Int =
hash(
f"${alt.avgStrehl}%.2f",
alt.strehlBand
)
def calc(pdp: PlottingDetails): Int =
hash(
f"${pdp.getPlotWaveL}%.2f",
f"${pdp.getPlotWaveU}%.2f"
)
private def hash(values: Any*) =
values.
filter(_ != null).
map(_.hashCode).
foldLeft(17)((acc, h) => 37*acc + h)
}
| arturog8m/ocs | bundle/edu.gemini.itc/src/test/scala/edu/gemini/itc/baseline/util/Hash.scala | Scala | bsd-3-clause | 3,871 |
//
// Copyright (c) 2014 Ole Krause-Sparmann
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.postwall.db
// Salat makes it easy to save case class objects into MongoDB using casbah
import com.novus.salat._
import com.novus.salat.global._
import com.novus.salat.dao._
// MongoDB driver
import com.mongodb.casbah.Imports._
// Contains DAO definitions for all types in the Postwall model
trait PostwallDAO {
import EntityCreation._
import Helpers._
// Get db connection (get connection string from Heroku env or use localhost)
val mongoURI = MongoClientURI(Option(System.getenv("MONGOHQ_URL")).getOrElse("mongodb://localhost:27017/postwall"))
val mongoClient = MongoClient(mongoURI)
val db = mongoClient(mongoURI.database.get)
// Collection DAOs ///////////////////////////////////////////////////////////////////////////////////////
object WallPostDAO extends SalatDAO[WallPost, ObjectId](collection = db("wallposts"))
// Actual db methods ////////////////////////////////////////////////////////////////////////////////////
// Returns all posts
def getAllPosts(): Option[List[WallPost]] = {
// Get all wall posts
return Some(WallPostDAO.find(ref = MongoDBObject()).toList)
}
// Posts to wall
def postToWall(params: WallPostParams): Option[WallPost] = {
val newPost = createPostFromParams(params)
WallPostDAO.insert(newPost)
return Some(newPost)
}
}
| pixelogik/postwall | postwall/common/src/main/scala/com/postwall/db/PostwallDAO.scala | Scala | mit | 2,450 |
package twine.macwire.config
import com.typesafe.config.{ConfigFactory, ConfigValue}
import com.typesafe.config.ConfigValueType._
import scala.collection.JavaConversions._
import scala.reflect.internal.util.ScalaClassLoader
import scala.reflect.runtime.universe._
/**
*/
object ConfigReader
{
/**
* Flattens the configuration and returns a list of (key, type) tuples representing
* all of the configuration properties and their associated types. Types are inferred
* from property values. This means that every
* by the the most specific type that can be inferred from reading the configuration.
*/
def readConfig(cl: ClassLoader): Seq[(String, TypeTag[_ <: Any])] =
{
//System.setProperty("config.trace", "loads")
// Set the ctx classloader for ConfigFactory to find the conf file in the classpath
val origLoader = ScalaClassLoader.contextLoader
ScalaClassLoader.setContext(this.getClass.getClassLoader)
val config = ConfigFactory.load
ScalaClassLoader.setContext(origLoader)
val set = for (
entry <- config.entrySet
) yield {
val cv = entry.getValue
cv.valueType match {
case STRING | NUMBER | BOOLEAN =>
(entry.getKey, primitiveType(entry.getValue))
case LIST =>
(entry.getKey, listType(entry.getValue))
case NULL =>
throw new AssertionError(
s"Did not expect NULL entry in ConfigValue.entrySet: ${cv.origin}"
)
case OBJECT =>
throw new AssertionError(
s"Did not expect OBJECT entry in ConfigValue.entrySet: ${cv.origin}"
)
}
}
set.toSeq
}
/**
* Convert the given config value to a String, Boolean, Int or Double
*/
private def primitiveType(valueHolder: ConfigValue): TypeTag[_ <: Any]
= {
val value: TypeTag[_] = valueHolder.valueType match {
case STRING => typeTag[String]
case BOOLEAN => typeTag[Boolean]
case NUMBER => valueHolder.unwrapped match {
case i: java.lang.Integer => typeTag[Int]
case d: java.lang.Double => typeTag[Double]
case _ => throw new AssertionError("Unsupported type " + valueHolder.unwrapped.getClass.getName + ": " + valueHolder)
}
case OBJECT => throw new AssertionError("Unexpected type OBJECT: " + valueHolder)
case NULL => throw new AssertionError("Unsupported type NULL: " + valueHolder)
case LIST => throw new AssertionError("Unexpected type LIST: " + valueHolder)
}
value
}
/**
* Convert the given config value list to an Array[String|Boolean|Int|Double]
*/
private def listType(value: ConfigValue): TypeTag[_ <: Seq[_ <: Any]]
= {
val list = value.unwrapped.asInstanceOf[java.util.List[Any]]
if (list.size == 0) {
typeTag[Seq[String]]
} else {
list.get(0) match {
case x: String => typeTag[Seq[String]]
case x: Boolean => typeTag[Seq[Boolean]]
case x: java.lang.Integer => typeTag[Seq[Int]]
case x: Double => typeTag[Seq[Double]]
case x =>
throw new AssertionError("Unsupported list type " + x.getClass)
}
}
}
}
| ehalpern/macwire-config | macros/src/main/scala/twine/macwire/config/ConfigReader.scala | Scala | mit | 3,156 |
import org.scalatest.{Matchers, FunSuite}
/** @version 1.1.0 */
class SpiralMatrixTest extends FunSuite with Matchers {
test("empty spiral") {
SpiralMatrix.spiralMatrix(0) should be(List())
}
test("trivial spiral") {
pending
SpiralMatrix.spiralMatrix(1) should be(List(List(1)))
}
test("spiral of size 2") {
pending
SpiralMatrix.spiralMatrix(2) should be(List(List(1, 2),
List(4, 3)))
}
test("spiral of size 3") {
pending
SpiralMatrix.spiralMatrix(3) should be(
List(List(1, 2, 3),
List(8, 9, 4),
List(7, 6, 5)))
}
test("spiral of size 4") {
pending
SpiralMatrix.spiralMatrix(4) should be(
List(List(1, 2, 3, 4),
List(12, 13, 14, 5),
List(11, 16, 15, 6),
List(10, 9, 8, 7)))
}
test("spiral of size 5") {
pending
SpiralMatrix.spiralMatrix(5) should be(
List(List(1, 2, 3, 4, 5),
List(16, 17, 18, 19, 6),
List(15, 24, 25, 20, 7),
List(14, 23, 22, 21, 8),
List(13, 12, 11, 10, 9)))
}
}
| ricemery/xscala | exercises/spiral-matrix/src/test/scala/SpiralMatrixTest.scala | Scala | mit | 1,125 |
/*
* Copyright 2015 Foundational Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package pro.foundev.commons.benchmarking
import org.apache.commons.lang.time.StopWatch
class BenchmarkRunner (benchmarks: Benchmark*){
def exec: Map[String, Seq[BenchmarkReport]] = {
if (benchmarks.length==0) {
throw new IllegalArgumentException("need to have some benchmarks to actually run")
}
benchmarks.map(b => {
val watch = new StopWatch()
watch.start()
b.callback.apply()
watch.stop()
val milliseconds = watch.getTime
val seconds: Double = milliseconds / 1000.0
new Tuple2[String, BenchmarkReport](b.tag, new BenchmarkReport(seconds, b.name))
}).groupBy(_._1)
.map { case (k, v) => (k, v.map(_._2)) }
}
}
| rssvihla/datastax_work | spark_commons/commons/src/main/scala/pro/foundev/commons/benchmarking/BenchmarkRunner.scala | Scala | apache-2.0 | 1,320 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package matryoshka.patterns
import matryoshka._
import scalaz._, Scalaz._
/** The pattern functor for Free. */
final case class CoEnv[E, F[_], A](run: E \/ F[A])
object CoEnv extends CoEnvInstances {
def coEnv[E, F[_], A](v: E \/ F[A]): CoEnv[E, F, A] = CoEnv(v)
def hmap[F[_], G[_], A](f: F ~> G): CoEnv[A, F, ?] ~> CoEnv[A, G, ?] =
λ[CoEnv[A, F, ?] ~> CoEnv[A, G, ?]](fa => CoEnv(fa.run.map(f(_))))
def htraverse[G[_]: Applicative, F[_], H[_], A](f: F ~> (G ∘ H)#λ): CoEnv[A, F, ?] ~> (G ∘ CoEnv[A, H, ?])#λ =
λ[CoEnv[A, F, ?] ~> (G ∘ CoEnv[A, H, ?])#λ](
_.run.traverse(f(_)).map(CoEnv(_)))
def freeIso[E, F[_]: Functor]: AlgebraIso[CoEnv[E, F, ?], Free[F, E]] =
AlgebraIso[CoEnv[E, F, ?], Free[F, E]](
coe => coe.run.fold(_.point[Free[F, ?]], Free.roll))(
fr => CoEnv(fr.fold(_.left, _.right)))
}
sealed abstract class CoEnvInstances extends CoEnvInstances0 {
implicit def equal[E: Equal, F[_]](implicit F: Delay[Equal, F]):
Delay[Equal, CoEnv[E, F, ?]] =
new Delay[Equal, CoEnv[E, F, ?]] {
def apply[α](arb: Equal[α]) = {
Equal.equal((a, b) => (a.run, b.run) match {
case (-\/(e1), -\/(e2)) => e1 ≟ e2
case (\/-(f1), \/-(f2)) => F(arb).equal(f1, f2)
case (_, _) => false
})
}
}
implicit def show[E: Show, F[_]](implicit F: Delay[Show, F]): Delay[Show, CoEnv[E, F, ?]] =
new Delay[Show, CoEnv[E, F, ?]] {
def apply[A](sh: Show[A]) =
Show.show(
_.run.fold(
e => Cord("-\\/(") ++ e.show,
fa => Cord("\\/-(") ++ F(sh).show(fa)) ++
Cord(")"))
}
// TODO: Need to have lower-prio instances of Bifoldable, with
// corresponding constraint on F.
implicit def bitraverse[F[_]: Traverse]: Bitraverse[CoEnv[?, F, ?]] =
new Bitraverse[CoEnv[?, F, ?]] {
def bitraverseImpl[G[_]: Applicative, A, B, C, D](
fab: CoEnv[A, F, B])(
f: A ⇒ G[C], g: B ⇒ G[D]) =
fab.run.bitraverse(f, _.traverse(g)).map(CoEnv(_))
}
implicit def traverse[F[_]: Traverse, E]: Traverse[CoEnv[E, F, ?]] =
bitraverse[F].rightTraverse
// TODO: write a test to ensure the two monad instances are identical
// implicit def monadCo[F[_]: Applicative: Comonad, A]: Monad[CoEnv[A, F, ?]] =
// new Monad[CoEnv[A, F, ?]] {
// def bind[B, C](fa: CoEnv[A, F, B])(f: (B) ⇒ CoEnv[A, F, C]) =
// CoEnv(fa.run >>= (fb => f(fb.copoint).run))
// def point[B](x: => B) = CoEnv(x.point[F].right)
// }
}
sealed abstract class CoEnvInstances0 {
implicit def bifunctor[F[_]: Functor]: Bifunctor[CoEnv[?, F, ?]] =
new Bifunctor[CoEnv[?, F, ?]] {
def bimap[A, B, C, D](fab: CoEnv[A, F, B])(f: A ⇒ C, g: B ⇒ D) =
CoEnv(fab.run.bimap(f, _.map(g)))
}
implicit def functor[F[_]: Functor, E]: Functor[CoEnv[E, F, ?]] =
bifunctor[F].rightFunctor
implicit def bifoldable[F[_]: Foldable]: Bifoldable[CoEnv[?, F, ?]] =
new Bifoldable[CoEnv[?, F, ?]] {
def bifoldMap[A, B, M: Monoid](fa: CoEnv[A, F, B])(f: (A) ⇒ M)(g: (B) ⇒ M) =
fa.run.fold(f, _.foldMap(g))
def bifoldRight[A, B, C](
fa: CoEnv[A, F, B], z: ⇒ C)(
f: (A, ⇒ C) ⇒ C)(
g: (B, ⇒ C) ⇒ C) =
fa.run.fold(f(_, z), _.foldRight(z)(g))
}
implicit def foldable[F[_]: Foldable, E]: Foldable[CoEnv[E, F, ?]] =
bifoldable[F].rightFoldable
// implicit def monad[F[_]: Monad: Traverse, A]: Monad[CoEnv[A, F, ?]] =
// new Monad[CoEnv[A, F, ?]] {
// def bind[B, C](fa: CoEnv[A, F, B])(f: (B) ⇒ CoEnv[A, F, C]) =
// CoEnv(fa.run >>= (_.traverse[CoEnv[A, F, ?], C](f).run.map(_.join)))
// def point[B](x: => B) = CoEnv(x.point[F].right)
// }
}
| slamdata/matryoshka | core/shared/src/main/scala/matryoshka/patterns/CoEnv.scala | Scala | apache-2.0 | 4,391 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.net.URI
private[spark] case class ApplicationDescription(
name: String,
maxCores: Option[Int],
memoryPerExecutorMB: Int,
command: Command,
appUiUrl: String,
eventLogDir: Option[URI] = None,
// short name of compression codec used when writing event logs, if any (e.g. lzf)
eventLogCodec: Option[String] = None,
coresPerExecutor: Option[Int] = None,
user: String = System.getProperty("user.name", "<unknown>")) {
override def toString: String = "ApplicationDescription(" + name + ")"
}
| chenc10/Spark-PAF | core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala | Scala | apache-2.0 | 1,376 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.calculations.TotalNetAssetsLiabilitiesCalculator
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.validation.AssetsEqualToSharesValidator
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
case class AC69(value: Option[Int]) extends CtBoxIdentifier(name = "Total net assets or liabilities (previous PoA)")
with CtOptionalInteger
with ValidatableBox[Frs105AccountsBoxRetriever with FilingAttributesBoxValueRetriever]
with AssetsEqualToSharesValidator {
override def validate(boxRetriever: Frs105AccountsBoxRetriever with FilingAttributesBoxValueRetriever): Set[CtValidation] = {
validateAssetsEqualToShares("AC69", boxRetriever.ac491(), boxRetriever.companyType().isLimitedByGuarantee)
}
}
object AC69 extends Calculated[AC69, Frs105AccountsBoxRetriever with FilingAttributesBoxValueRetriever] with TotalNetAssetsLiabilitiesCalculator {
override def calculate(boxRetriever: Frs105AccountsBoxRetriever with FilingAttributesBoxValueRetriever): AC69 = {
import boxRetriever._
calculatePreviousTotalNetAssetsLiabilities(ac63(), ac65(), ac67(), ac471())
}
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC69.scala | Scala | apache-2.0 | 1,876 |
package com.eclipsesource.schema
import com.eclipsesource.schema.drafts.{Version4, Version7}
import com.eclipsesource.schema.test.JsonSpec
import org.specs2.mutable.Specification
class MinItemsSpec extends Specification with JsonSpec {
"minItems draft4" in {
import Version4._
implicit val validator: SchemaValidator = SchemaValidator(Some(Version4))
validate("minItems", "draft4")
}
"minItems draft7" in {
import Version7._
implicit val validator: SchemaValidator = SchemaValidator(Some(Version7))
validate("minItems", "draft7")
}
}
| eclipsesource/play-json-schema-validator | src/test/scala/com/eclipsesource/schema/MinItemsSpec.scala | Scala | apache-2.0 | 570 |
package fpinscala.answers.testing
import fpinscala.answers.state.*
import fpinscala.answers.parallelism.*
import fpinscala.answers.parallelism.Par
import java.util.concurrent.{Executors, ExecutorService}
import annotation.targetName
import Gen.*
import Prop.*
import Prop.Result.{Passed, Falsified, Proved}
opaque type Prop = (MaxSize, TestCases, RNG) => Result
object Prop:
opaque type SuccessCount = Int
object SuccessCount:
extension (x: SuccessCount) def toInt: Int = x
def fromInt(x: Int): SuccessCount = x
opaque type TestCases = Int
object TestCases:
extension (x: TestCases) def toInt: Int = x
def fromInt(x: Int): TestCases = x
opaque type MaxSize = Int
object MaxSize:
extension (x: MaxSize) def toInt: Int = x
def fromInt(x: Int): MaxSize = x
opaque type FailedCase = String
object FailedCase:
extension (f: FailedCase) def string: String = f
def fromString(s: String): FailedCase = s
enum Result:
case Passed
case Falsified(failure: FailedCase, successes: SuccessCount)
case Proved
def isFalsified: Boolean = this match
case Passed => false
case Falsified(_, _) => true
case Proved => false
/* Produce an infinite random lazy list from a `Gen` and a starting `RNG`. */
def randomLazyList[A](g: Gen[A])(rng: RNG): LazyList[A] =
LazyList.unfold(rng)(rng => Some(g.run(rng)))
def forAll[A](as: Gen[A])(f: A => Boolean): Prop = Prop {
(n, rng) => randomLazyList(as)(rng).zip(LazyList.from(0)).take(n).map {
case (a, i) =>
try
if f(a) then Passed else Falsified(a.toString, i)
catch
case e: Exception => Falsified(buildMsg(a, e), i)
}.find(_.isFalsified).getOrElse(Passed)
}
@targetName("forAllSized")
def forAll[A](g: SGen[A])(f: A => Boolean): Prop =
(max, n, rng) =>
val casesPerSize = (n.toInt - 1) / max.toInt + 1
val props: LazyList[Prop] =
LazyList.from(0).take((n.toInt min max.toInt) + 1).map(i => forAll(g(i))(f))
val prop: Prop =
props.map[Prop](p => (max, n, rng) => p(max, casesPerSize, rng)).toList.reduce(_ && _)
prop(max, n, rng)
// String interpolation syntax. A string starting with `s"` can refer to
// a Scala value `v` as `$v` or `${v}` in the string.
// This will be expanded to `v.toString` by the Scala compiler.
def buildMsg[A](s: A, e: Exception): String =
s"test case: $s\n" +
s"generated an exception: ${e.getMessage}\n" +
s"stack trace:\n ${e.getStackTrace.mkString("\n")}"
def apply(f: (TestCases, RNG) => Result): Prop =
(_, n, rng) => f(n, rng)
extension (self: Prop)
def &&(that: Prop): Prop =
(max, n, rng) => self.tag("and-left")(max, n, rng) match
case Passed | Proved => that.tag("and-right")(max, n, rng)
case x => x
def ||(that: Prop): Prop =
(max, n, rng) => self.tag("or-left")(max, n, rng) match
// In case of failure, run the other prop.
case Falsified(msg, _) => that.tag("or-right").tag(msg.string)(max, n, rng)
case x => x
/* This is rather simplistic - in the event of failure, we simply wrap
* the failure message with the given message.
*/
def tag(msg: String): Prop =
(max, n, rng) => self(max, n, rng) match
case Falsified(e, c) => Falsified(FailedCase.fromString(s"$msg($e)"), c)
case x => x
def run(maxSize: MaxSize = 100,
testCases: TestCases = 100,
rng: RNG = RNG.Simple(System.currentTimeMillis)): Unit =
self(maxSize, testCases, rng) match
case Falsified(msg, n) =>
println(s"! Falsified after $n passed tests:\n $msg")
case Passed =>
println(s"+ OK, passed $testCases tests.")
case Proved =>
println(s"+ OK, proved property.")
val executor: ExecutorService = Executors.newCachedThreadPool
val p1 = Prop.forAll(Gen.unit(Par.unit(1)))(pi =>
pi.map(_ + 1).run(executor).get == Par.unit(2).run(executor).get)
def check(p: => Boolean): Prop =
(_, _, _) => if p then Passed else Falsified("()", 0)
val p2 = check {
val p = Par.unit(1).map(_ + 1)
val p2 = Par.unit(2)
p.run(executor).get == p2.run(executor).get
}
def equal[A](p: Par[A], p2: Par[A]): Par[Boolean] =
p.map2(p2)(_ == _)
val p3 = check {
equal(
Par.unit(1).map(_ + 1),
Par.unit(2)
).run(executor).get
}
val executors: Gen[ExecutorService] = weighted(
choose(1,4).map(Executors.newFixedThreadPool) -> .75,
unit(Executors.newCachedThreadPool) -> .25) // `a -> b` is syntax sugar for `(a, b)`
def forAllPar[A](g: Gen[A])(f: A => Par[Boolean]): Prop =
forAll(executors ** g)((s, a) => f(a).run(s).get)
def checkPar(p: Par[Boolean]): Prop =
forAllPar(Gen.unit(()))(_ => p)
def forAllPar2[A](g: Gen[A])(f: A => Par[Boolean]): Prop =
forAll(executors ** g)((s, a) => f(a).run(s).get)
def forAllPar3[A](g: Gen[A])(f: A => Par[Boolean]): Prop =
forAll(executors ** g) { case s ** a => f(a).run(s).get }
val gpy: Gen[Par[Int]] = Gen.choose(0, 10).map(Par.unit(_))
val p4 = forAllPar(gpy)(py => equal(py.map(y => y), py))
val gpy2: Gen[Par[Int]] = choose(-100, 100).listOfN(choose(0, 20)).map(ys =>
ys.foldLeft(Par.unit(0))((p, y) =>
Par.fork(p.map2(Par.unit(y))(_ + _))))
extension [A](self: List[A]) def parTraverse[B](f: A => Par[B]): Par[List[B]] =
self.foldRight(Par.unit(Nil: List[B]))((a, pacc) => Par.fork(f(a).map2(pacc)(_ :: _)))
val gpy3: Gen[Par[Int]] =
choose(-100, 100).listOfN(choose(0, 20)).map(ys =>
ys.parTraverse(Par.unit).map(_.sum))
val forkProp = Prop.forAllPar(gpy2)(y => equal(Par.fork(y), y))
end Prop
opaque type Gen[+A] = State[RNG, A]
object Gen:
extension [A](self: Gen[A])
def map[B](f: A => B): Gen[B] =
State.map(self)(f)
def map2[B,C](that: Gen[B])(f: (A, B) => C): Gen[C] =
State.map2(self)(that)(f)
def flatMap[B](f: A => Gen[B]): Gen[B] =
State.flatMap(self)(f)
/* A method alias for the function we wrote earlier. */
def listOfN(size: Int): Gen[List[A]] =
Gen.listOfN(size, self)
/* A version of `listOfN` that generates the size to use dynamically. */
def listOfN(size: Gen[Int]): Gen[List[A]] =
size.flatMap(listOfN)
def list: SGen[List[A]] =
n => listOfN(n)
def nonEmptyList: SGen[List[A]] =
n => listOfN(n.max(1))
def unsized: SGen[A] = _ => self
@targetName("product")
def **[B](gb: Gen[B]): Gen[(A, B)] =
map2(gb)((_, _))
def apply[A](s: State[RNG, A]): Gen[A] = s
def unit[A](a: => A): Gen[A] =
State.unit(a)
val boolean: Gen[Boolean] =
State(RNG.boolean)
def choose(start: Int, stopExclusive: Int): Gen[Int] =
State(RNG.nonNegativeInt).map(n => start + n % (stopExclusive - start))
def listOfN[A](n: Int, g: Gen[A]): Gen[List[A]] =
State.sequence(List.fill(n)(g))
def listOfN_1[A](n: Int, g: Gen[A]): Gen[List[A]] =
List.fill(n)(g).foldRight(unit(List[A]()))((a, b) => a.map2(b)(_ :: _))
val double: Gen[Double] = Gen(State(RNG.double))
val int: Gen[Int] = Gen(State(RNG.int))
def choose(i: Double, j: Double): Gen[Double] =
State(RNG.double).map(d => i + d * (j - i))
/* Basic idea is to add 1 to the result of `choose` if it is of the wrong
* parity, but we require some special handling to deal with the maximum
* integer in the range.
*/
def even(start: Int, stopExclusive: Int): Gen[Int] =
choose(start, if stopExclusive % 2 == 0 then stopExclusive - 1 else stopExclusive).
map(n => if n % 2 != 0 then n + 1 else n)
def odd(start: Int, stopExclusive: Int): Gen[Int] =
choose(start, if stopExclusive % 2 != 0 then stopExclusive - 1 else stopExclusive).
map(n => if n % 2 == 0 then n + 1 else n)
def sameParity(from: Int, to: Int): Gen[(Int, Int)] =
for
i <- choose(from, to)
j <- if i % 2 == 0 then even(from, to) else odd(from, to)
yield (i, j)
def union[A](g1: Gen[A], g2: Gen[A]): Gen[A] =
boolean.flatMap(b => if b then g1 else g2)
def weighted[A](g1: (Gen[A], Double), g2: (Gen[A], Double)): Gen[A] =
/* The probability we should pull from `g1`. */
val g1Threshold = g1._2.abs / (g1._2.abs + g2._2.abs)
State(RNG.double).flatMap(d => if d < g1Threshold then g1._1 else g2._1)
/* Not the most efficient implementation, but it's simple.
* This generates ASCII strings.
*/
def stringN(n: Int): Gen[String] =
listOfN(n, choose(0, 127)).map(_.map(_.toChar).mkString)
val string: SGen[String] = SGen(stringN)
val smallInt = Gen.choose(-10, 10)
val maxProp = Prop.forAll(smallInt.list) { l =>
val max = l.max
l.forall(_ <= max)
}
val maxProp1 = Prop.forAll(smallInt.nonEmptyList) { l =>
val max = l.max
l.forall(_ <= max)
}
val sortedProp = Prop.forAll(smallInt.list) { l =>
val ls = l.sorted
val ordered = l.isEmpty || ls.zip(ls.tail).forall { (a, b) => a <= b }
ordered && l.forall(ls.contains) && ls.forall(l.contains)
}
object ** :
def unapply[A, B](p: (A, B)) = Some(p)
def genStringIntFn(g: Gen[Int]): Gen[String => Int] =
g.map(i => s => i)
def genStringFn[A](g: Gen[A]): Gen[String => A] =
State[RNG, String => A] { rng =>
val (seed, rng2) = rng.nextInt // we still use `rng` to produce a seed, so we get a new function each time
val f = (s: String) => g.run(RNG.Simple(seed.toLong ^ s.hashCode.toLong))._1
(f, rng2)
}
end Gen
opaque type SGen[+A] = Int => Gen[A]
object SGen:
extension [A](self: SGen[A])
def apply(n: Int): Gen[A] = self(n)
def map[B](f: A => B): SGen[B] =
self(_).map(f)
def flatMap[B](f: A => SGen[B]): SGen[B] =
n => self(n).flatMap(f(_)(n))
def **[B](s2: SGen[B]): SGen[(A, B)] =
n => Gen.**(apply(n))(s2(n))
def apply[A](f: Int => Gen[A]): SGen[A] = f
opaque type Cogen[-A] = (A, RNG) => RNG
object Cogen:
def fn[A, B](in: Cogen[A], out: Gen[B]): Gen[A => B] =
State[RNG, A => B] { rng =>
val (seed, rng2) = rng.nextInt
val f = (a: A) => out.run(in(a, rng2))._1
(f, rng2)
}
def cogenInt: Cogen[Int] = (i, rng) =>
val (seed, rng2) = rng.nextInt
RNG.Simple(seed.toLong ^ i.toLong)
// We can now write properties that depend on arbitrary functions
def takeWhilePropInt =
forAll(Gen.int.list ** fn(cogenInt, Gen.boolean).unsized)((ys, f) => ys.takeWhile(f).forall(f))
// And we can further generalize those properties to be parameterized by types which are not relevant
def takeWhileProp[A](ga: Gen[A], ca: Cogen[A]) =
forAll(ga.list ** fn(ca, Gen.boolean).unsized)((ys, f) => ys.takeWhile(f).forall(f))
| fpinscala/fpinscala | src/main/scala/fpinscala/answers/testing/Gen.scala | Scala | mit | 10,724 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md
// MAGIC # Extending spark.graphx.lib.ShortestPaths to GraphXShortestWeightedPaths
// MAGIC
// MAGIC ### 2016-2020, Ivan Sadikov and Raazesh Sainudiin
// MAGIC
// MAGIC We extend Shortest Paths algorithm in Spark's GraphX Library to allow for user-specified edge-weights as an edge attribute.
// MAGIC
// MAGIC This is part of *Project MEP: Meme Evolution Programme* and supported by databricks academic partners program.
// MAGIC
// MAGIC The analysis is available in the following databricks notebook:
// MAGIC * [http://lamastex.org/lmse/mep/src/GraphXShortestWeightedPaths.html](http://lamastex.org/lmse/mep/src/GraphXShortestWeightedPaths.html)
// MAGIC
// MAGIC
// MAGIC ```
// MAGIC Copyright 2016 Ivan Sadikov and Raazesh Sainudiin
// MAGIC
// MAGIC Licensed under the Apache License, Version 2.0 (the "License");
// MAGIC you may not use this file except in compliance with the License.
// MAGIC You may obtain a copy of the License at
// MAGIC
// MAGIC http://www.apache.org/licenses/LICENSE-2.0
// MAGIC
// MAGIC Unless required by applicable law or agreed to in writing, software
// MAGIC distributed under the License is distributed on an "AS IS" BASIS,
// MAGIC WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// MAGIC See the License for the specific language governing permissions and
// MAGIC limitations under the License.
// MAGIC ```
// COMMAND ----------
// MAGIC %md
// MAGIC ### Let's modify shortest paths algorithm to allow for user-specified edge-weights
// MAGIC Update shortest paths algorithm to work over edge attribute of edge-weights as Double, key concepts are:
// MAGIC - we increment map with delta, which is `edge.attr`
// MAGIC - edge attribute is anything numeric, tested on Double
// MAGIC - infinity value is not infinity, but `Integer.MAX_VALUE`
// MAGIC
// MAGIC Modifying the following code:
// MAGIC * https://github.com/apache/spark/blob/master/graphx/src/main/scala/org/apache/spark/graphx/lib/ShortestPaths.scala
// MAGIC
// MAGIC Explained here:
// MAGIC * http://note.yuhc.me/2015/03/graphx-pregel-shortest-path/
// COMMAND ----------
import scala.reflect.ClassTag
import org.apache.spark.graphx._
/**
* Computes shortest weighted paths to the given set of landmark vertices, returning a graph where each
* vertex attribute is a map containing the shortest-path distance to each reachable landmark.
* Currently supports only Graph of [VD, Double], where VD is an arbitrary vertex type.
*/
object GraphXShortestWeightedPaths extends Serializable {
/** Stores a map from the vertex id of a landmark to the distance to that landmark. */
type SPMap = Map[VertexId, Double]
// initial and infinity values, use to relax edges
private val INITIAL = 0.0
private val INFINITY = Int.MaxValue.toDouble
private def makeMap(x: (VertexId, Double)*) = Map(x: _*)
private def incrementMap(spmap: SPMap, delta: Double): SPMap = {
spmap.map { case (v, d) => v -> (d + delta) }
}
private def addMaps(spmap1: SPMap, spmap2: SPMap): SPMap = {
(spmap1.keySet ++ spmap2.keySet).map {
k => k -> math.min(spmap1.getOrElse(k, INFINITY), spmap2.getOrElse(k, INFINITY))
}.toMap
}
// at this point it does not really matter what vertex type is
def run[VD](graph: Graph[VD, Double], landmarks: Seq[VertexId]): Graph[SPMap, Double] = {
val spGraph = graph.mapVertices { (vid, attr) =>
// initial value for itself is 0.0 as Double
if (landmarks.contains(vid)) makeMap(vid -> INITIAL) else makeMap()
}
val initialMessage = makeMap()
def vertexProgram(id: VertexId, attr: SPMap, msg: SPMap): SPMap = {
addMaps(attr, msg)
}
def sendMessage(edge: EdgeTriplet[SPMap, Double]): Iterator[(VertexId, SPMap)] = {
val newAttr = incrementMap(edge.dstAttr, edge.attr)
if (edge.srcAttr != addMaps(newAttr, edge.srcAttr)) Iterator((edge.srcId, newAttr))
else Iterator.empty
}
Pregel(spGraph, initialMessage)(vertexProgram, sendMessage, addMaps)
}
}
println("Usage: val result = GraphXShortestWeightedPaths.run(graph, Seq(4L, 0L, 9L))")
// COMMAND ----------
// MAGIC %md
// MAGIC ### Generate test graph
// MAGIC Generate simple graph with double weights for edges
// COMMAND ----------
import scala.util.Random
import org.apache.spark.graphx.{Graph, VertexId}
import org.apache.spark.graphx.util.GraphGenerators
// A graph with edge attributes containing distances
val graph: Graph[Long, Double] = GraphGenerators.logNormalGraph(sc, numVertices = 10, seed=123L).mapEdges { e =>
// to make things nicer we assign 0 distance to itself
if (e.srcId == e.dstId) 0.0 else Random.nextDouble()
}
// COMMAND ----------
val landMarkVertexIds = Seq(4L, 0L, 9L)
val result = GraphXShortestWeightedPaths.run(graph, landMarkVertexIds)
// COMMAND ----------
// Found shortest paths
println(result.vertices.collect.mkString("\\n"))
// COMMAND ----------
// edges with weights, make sure to check couple of shortest paths from above
display(result.edges.toDF)
// COMMAND ----------
display(graph.edges.toDF) // this is the directed weighted edge of the graph
// COMMAND ----------
// now let us collect the shortest distance between every vertex and every landmark vertex
// to manipulate scala maps that are vertices of the result see: http://docs.scala-lang.org/overviews/collections/maps.html
// a quick point: http://stackoverflow.com/questions/28769367/scala-map-a-map-to-list-of-tuples
val shortestDistsVertex2Landmark = result.vertices.flatMap(GxSwpSPMap => {
GxSwpSPMap._2.toSeq.map(x => (GxSwpSPMap._1, x._1, x._2)) // to get triples: vertex, landmarkVertex, shortest_distance
})
// COMMAND ----------
shortestDistsVertex2Landmark.collect.mkString("\\n")
// COMMAND ----------
// MAGIC %md
// MAGIC #### Let's make a DataFrame for visualizing pairwise matrix plots
// MAGIC
// MAGIC We want to make 4 columns in this example as follows (note actual values change for each realisation of graph!):
// MAGIC
// MAGIC ```
// MAGIC landmark_Id1 ("0"), landmarkID2 ("4"), landmarkId3 ("9"), srcVertexId
// MAGIC ------------------------------------------------------------------------
// MAGIC 0.0, 0.7425.., 0.8718, 0
// MAGIC 0.924..., 1.2464.., 1.0472, 1
// MAGIC ...
// MAGIC ```
// COMMAND ----------
// http://alvinalexander.com/scala/how-to-sort-map-in-scala-key-value-sortby-sortwith
// we need this to make sure that the maps are ordered by the keys for ensuring unique column values
import scala.collection.immutable.ListMap
import sqlContext.implicits._
// COMMAND ----------
// recall our landmark vertices in landMarkVertexIds. let's use their Strings for names
val unorderedNamedLandmarkVertices = landMarkVertexIds.map(id => (id, id.toString) )
val orderedNamedLandmarkVertices = ListMap(unorderedNamedLandmarkVertices.sortBy(_._1):_*)
val orderedLandmarkVertexNames = orderedNamedLandmarkVertices.toSeq.map(x => x._2)
orderedLandmarkVertexNames.mkString(", ")
// COMMAND ----------
// this is going to be our column names
val columnNames:Seq[String] = orderedLandmarkVertexNames :+ "srcVertexId"
// COMMAND ----------
// a case class to make a data-frame quickly from the result
case class SeqOfDoublesAndsrcVertexId(shortestDistances: Seq[Double], srcVertexId: VertexId)
// COMMAND ----------
val shortestDistsSeqFromVertex2Landmark2DF = result.vertices.map(GxSwpSPMap => {
//GxSwpSPMap._2.toSeq.map(x => (GxSwpSPMap._1, x._1, x._2)) // from before to get triples: vertex, landmarkVertex, shortest_distance
val v = GxSwpSPMap._1
val a = ListMap(GxSwpSPMap._2.toSeq.sortBy(_._1):_*).toSeq.map(x => x._2)
val d = (a,v)
d
}).map(x => SeqOfDoublesAndsrcVertexId(x._1, x._2)).toDF()
// COMMAND ----------
display(shortestDistsSeqFromVertex2Landmark2DF) // but this dataframe needs the first column exploded into 3 columns
// COMMAND ----------
// MAGIC %md
// MAGIC Now we want to make separate columns for each distance in the Sequence in column 'shortestDistances'.
// MAGIC
// MAGIC Let us use the following ideas for this:
// MAGIC * https://databricks-prod-cloudfront.cloud.databricks.com/public/4027ec902e239c93eaaa8714f173bcfc/3741049972324885/2662535171379268/4413065072037724/latest.html
// COMMAND ----------
// this is from https://databricks-prod-cloudfront.cloud.databricks.com/public/4027ec902e239c93eaaa8714f173bcfc/3741049972324885/2662535171379268/4413065072037724/latest.html
import org.apache.spark.sql.{Column, DataFrame}
import org.apache.spark.sql.functions.{lit, udf}
// UDF to extract i-th element from array column
//val elem = udf((x: Seq[Int], y: Int) => x(y))
val elem = udf((x: Seq[Double], y: Int) => x(y)) // modified for Sequence of Doubles
// Method to apply 'elem' UDF on each element, requires knowing length of sequence in advance
def split(col: Column, len: Int): Seq[Column] = {
for (i <- 0 until len) yield { elem(col, lit(i)).as(s"$col($i)") }
}
// Implicit conversion to make things nicer to use, e.g.
// select(Column, Seq[Column], Column) is converted into select(Column*) flattening sequences
implicit class DataFrameSupport(df: DataFrame) {
def select(cols: Any*): DataFrame = {
var buffer: Seq[Column] = Seq.empty
for (col <- cols) {
if (col.isInstanceOf[Seq[_]]) {
buffer = buffer ++ col.asInstanceOf[Seq[Column]]
} else {
buffer = buffer :+ col.asInstanceOf[Column]
}
}
df.select(buffer:_*)
}
}
// COMMAND ----------
val shortestDistsFromVertex2Landmark2DF = shortestDistsSeqFromVertex2Landmark2DF.select(split($"shortestDistances", 3), $"srcVertexId")
// COMMAND ----------
display(shortestDistsFromVertex2Landmark2DF)
// COMMAND ----------
// now let's give it our names based on the landmark vertex Ids
val shortestDistsFromVertex2Landmark2DF = shortestDistsSeqFromVertex2Landmark2DF.select(split($"shortestDistances", 3), $"srcVertexId").toDF(columnNames:_*)
// COMMAND ----------
display(shortestDistsFromVertex2Landmark2DF)
// COMMAND ----------
display(shortestDistsFromVertex2Landmark2DF.select($"0",$"4",$"9"))
// COMMAND ----------
| lamastex/scalable-data-science | _sds/3/x/db/000_2-sds-3-x-ml/998_EX_01_GraphXShortestWeightedPaths.scala | Scala | unlicense | 10,403 |
/*
* Copyright 2013 Michael Krolikowski
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mkroli.dss
import com.typesafe.config.ConfigFactory
trait ConfigurationComponent {
lazy val config = ConfigFactory.load("dss").withFallback(ConfigFactory.load("default"))
}
| mkroli/domain-search-system | src/main/scala/com/github/mkroli/dss/ConfigurationComponent.scala | Scala | apache-2.0 | 799 |
package com.programmaticallyspeaking.ncd.chrome.domains
import com.programmaticallyspeaking.ncd.host.ScriptHost
object Network {
// Network.enable takes parameters (optional), so needs to be a custom class.
case class enable()
// Exists to make VSCode happy.
case class setCacheDisabled(cacheDisabled: Boolean)
}
/**
* Exists only to satisfy VSCode. Without it, VSCode fails to attach.
*/
class Network(scriptHost: ScriptHost, eventEmitHook: EventEmitHook) extends DomainActor(scriptHost, eventEmitHook) {
override protected def isEnable = {
case Network.enable() => // ok
}
override protected def handle = {
case Network.setCacheDisabled(_) => // noop
}
}
| provegard/ncdbg | src/main/scala/com/programmaticallyspeaking/ncd/chrome/domains/Network.scala | Scala | bsd-3-clause | 690 |
import dstructures._
import org.scalatest._
class FeatureMapSpec extends FunSpec with Matchers {
describe("FeatureMap") {
it("should have default length of 0") {
val ri = new FeatureMap("ssa")
ri.length() should be (0)
}
it("should return key") {
val ri = new FeatureMap("ssa")
ri.key should be ("ssa")
}
it("should allow adding strings") {
val ri = new FeatureMap("ssa")
ri.addString("Kissa")
ri.addString("Kassa")
ri.length() should be (2)
}
it("should return added strings") {
val ri = new FeatureMap("ssa")
ri.addString("Kissa")
ri.addString("Kassa")
ri.getStrings().length should be (2)
}
it("should calculate feature count") {
val ri = new FeatureMap("ssa")
ri.featureCount("kissa") should be (7)
ri.featureCount("hiilihappo") should be (12)
ri.featureCount("p") should be (3)
}
it("should return added strings of length n") {
val ri = new FeatureMap("ssa")
ri.addString("Kissa")
ri.addString("Kassa")
ri.addString("Puuhassa")
ri.getStringsOfLength(7).length should be (2)
}
}
describe("CountMap") {
it("should have default count of 0") {
val ri = new CountMap("kissa")
ri.count should be (0)
ri.value should be (0)
}
it("should have allow setting initial count") {
val ri = new CountMap("kissa", 8)
ri.count should be (8)
ri.value should be (8)
}
it("should return key") {
val ri = new CountMap("kissa")
ri.key should be ("kissa")
}
it("should allow increasing counter") {
val ri = new CountMap("kissa")
ri.count should be (0)
ri ++;
ri.count should be (1)
ri ++;
ri.count should be (2)
}
it("should return true if two CountMaps are same") {
val ri = new CountMap("kissa", 3)
val rj = new CountMap("kissa", 3)
ri == rj should be (true)
rj ++;
ri ++;
ri == rj should be (true)
}
it("should return false if two CountMaps are different") {
val ri = new CountMap("kissa", 3)
val rj = new CountMap("kissa", 3)
val rk = new CountMap("koira", 3)
ri == rj should be (true)
rj ++;
ri == rj should be (false)
ri == rk should be (false)
}
}
} | theikkila/fuzzydb | src/test/scala/FeatureMapSpec.scala | Scala | apache-2.0 | 2,596 |
package scwebapp.header
import scwebapp.HeaderType
import scwebapp.data.*
import scwebapp.format.*
import scparse.ng.text.*
object ContentRange extends HeaderType[ContentRange] {
val key = "Content-Range"
def parse(it:String):Option[ContentRange] =
parsers.finished.parseString(it).toOption
def unparse(it:ContentRange):String =
ContentRangeValue unparse it.value
private object parsers {
import HttpParsers.*
val value:TextParser[ContentRange] = ContentRangeValue.parser map ContentRange.apply
val finished:TextParser[ContentRange] = value finishRight LWSP
}
}
final case class ContentRange(value:ContentRangeValue)
| ritschwumm/scwebapp | modules/core/src/main/scala/scwebapp/header/ContentRange.scala | Scala | bsd-2-clause | 640 |
package com.blackboxsociety.util
import java.util.concurrent._
import scalaz.concurrent._
import scalaz.concurrent.Task._
import scalaz.effect._
object Concurrency {
val pool: ExecutorService = Executors.newFixedThreadPool(32)
def forkIO(n: IO[Unit]): IO[Unit] = IO {
pool.execute(new Runnable {
def run(): Unit = n.unsafePerformIO()
})
}
def forkIO(n: Task[Unit]): Task[Unit] = now {
n.runAsync({ _ => Unit })
}
def forkForever(n: Task[Unit]): Task[Unit] = now {
n.runAsync({ _ =>
forkForever(n)
})
}
}
| blackboxsociety/blackbox-core | src/main/scala/com/blackboxsociety/util/Concurrency.scala | Scala | mit | 557 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.job.local;
import org.apache.samza.coordinator.JobCoordinator
import org.junit.Assert._
import org.junit.Test
import org.apache.samza.job.ApplicationStatus
import org.apache.samza.job.CommandBuilder
import scala.collection.JavaConversions._
class TestProcessJob {
@Test
def testProcessJobShouldFinishOnItsOwn {
val commandBuilder = new CommandBuilder {
override def buildCommand = "sleep 1"
override def buildEnvironment = Map[String, String]()
}
val coordinator = new MockJobCoordinator()
val job = new ProcessJob(commandBuilder, coordinator)
job.submit
job.waitForFinish(999999)
}
@Test
def testProcessJobKillShouldWork {
val commandBuilder = new CommandBuilder {
override def buildCommand = "sleep 999999999"
override def buildEnvironment = Map[String, String]()
}
val coordinator = new MockJobCoordinator()
val job = new ProcessJob(commandBuilder, coordinator)
job.submit
job.waitForFinish(500)
job.kill
job.waitForFinish(999999)
assertTrue(coordinator.stopped)
assertEquals(ApplicationStatus.UnsuccessfulFinish, job.waitForFinish(999999999))
}
}
class MockJobCoordinator extends JobCoordinator(null, null) {
var stopped: Boolean = false
override def start: Unit = { }
override def stop: Unit = {
stopped = true;
}
}
| guozhangwang/samza | samza-core/src/test/scala/org/apache/samza/job/local/TestProcessJob.scala | Scala | apache-2.0 | 2,173 |
package controllers
import akka.actor.ActorSystem
import akka.util.Timeout
import play.modules.reactivemongo.MongoController
import reactivemongo.api.DB
import utils.QueryStringParser
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import akka.pattern.ask
import play.api.libs.ws.WS
import play.api.mvc._
import play.api.libs.json.{Json, JsValue, JsSuccess, JsError}
import play.api.Play.current
import models.{User, FacebookProfile}
import services.{UserService, JWTService}
import services.JWTService.Generate
import services.UserService.CreateOrMergeUser
object Authentication extends Controller with MongoController {
val system: ActorSystem = ActorSystem("authentication")
implicit val timeout = Timeout.intToTimeout(60 * 1000) // Long timeout because we do two operation over internet
val userService = system.actorOf(UserService.props(db))
val jwtService = system.actorOf(JWTService.props)
case class AuthData(redirectUri: String, code: String)
object AuthData {
implicit val fmt = Json.format[AuthData]
def fromRequest(implicit request: Request[JsValue]) =
request.body.validate[AuthData]
}
def index = Action {
Ok(views.html.index("Your new application is ready."))
}
def facebook() = Action.async(parse.json) { implicit request =>
val clientId = "your-facebook-app-id"
val clientSecret = "your-facebook-app-secret"
val accessTokenUrl = "https://graph.facebook.com/oauth/access_token"
val graphApiUrl = "https://graph.facebook.com/me"
AuthData.fromRequest match {
case JsSuccess(data, _) =>
// Step 1. Exchange authorization code for access token.
val xx =
WS.url(accessTokenUrl)
.withQueryString(
"redirect_uri" -> data.redirectUri,
"code" -> data.code,
"client_id" -> clientId,
"client_secret" -> clientSecret
).get
xx onComplete {
case x =>
println(s"xx= $x")
println(s"xx.body= ${x.get.body}")
}
val accessTokenData =
xx.map(_.body)
.map(QueryStringParser.parse)
.map(_.get)
accessTokenData onComplete {
case x => println(s"accessTokenData= $x")
}
val accessToken = accessTokenData.map(_("access_token"))
// Step 2. Retrieve information about the current user.
val profile = accessToken.flatMap{ t =>
WS.url(graphApiUrl)
.withQueryString("access_token" -> t)
.get
}.map(_.json.validate[FacebookProfile]).map(_.get)
profile onComplete {
case x => println(s"profile= $x")
}
// Step 3. update/merge/create our data and fetch user
val user = profile.flatMap(userService ? CreateOrMergeUser(_)).mapTo[User]
user onComplete {
case x => println(s"user= $x")
}
// Step 4. Generate JWT and send it back to client
val token = user.flatMap(jwtService ? Generate(_)).mapTo[String]
token onComplete {
case x => println(s"token= $x")
}
token map { t =>
Created(Json.obj("token" -> t))
} recover {
case e =>
println(e)
InternalServerError
}
case e: JsError =>
Future successful BadRequest(JsError.toFlatJson(e))
}
}
def foursquare() = Action.async(parse.json) { implicit request =>
???
}
def google() = Action.async(parse.json) { implicit request =>
???
}
def github() = Action.async(parse.json) { implicit request =>
???
}
def linkedin() = Action.async(parse.json) { implicit request =>
???
}
def twitter() = Action.async(parse.json) { implicit request =>
???
}
}
| tabdulradi/satellizer-play-example | app/controllers/Authentication.scala | Scala | mit | 3,815 |
package controllers
import enums.RoleEnum
import play.api.mvc._
import scala.concurrent.Future
import play.api.cache.Cache
import models.conf.{UserHelper, User}
import enums.RoleEnum._
case class RequestWithUser[A](val user: User, request: Request[A]) extends WrappedRequest[A](request)
/**
* 基础controller
*
* @author of546
*/
trait BaseController extends Controller with Security {
val ALogger = play.Logger.of("ActionLog")
def AuthAction[A](implicit role: Role = RoleEnum.user) = new ActionBuilder[({ type R[A] = RequestWithUser[A] })#R] {
def invokeBlock[A](request: Request[A], block: (RequestWithUser[A]) => Future[Result]) = {
authenticate(request, block)
}
}
private def authenticate[A](request: Request[A], block: (RequestWithUser[A]) => Future[Result])(implicit role: Role) = {
val maybeToken = request.headers.get(AuthTokenHeader).orElse(request.getQueryString(AuthTokenUrlKey))
maybeToken flatMap { token =>
Cache.getAs[String](token) map { jobNo =>
UserHelper.findByJobNo(jobNo) match {
case Some(user) if (user.role == RoleEnum.admin || role == user.role) => block(new RequestWithUser[A](user, request))
case _ => Future.successful(Forbidden)
}
}
} getOrElse Future.successful(Unauthorized)
}
// 页面返回
import play.api.libs.json.Json
val _Success = Json.obj("r" -> "ok")
val _Fail = Json.obj("r" -> "error")
val _Exist = Json.obj("r" -> "exist")
val _None = Json.obj("r" -> "none")
def resultUnique(data: String) = _Exist.+("u", Json.toJson(data))
}
| fengshao0907/bugatti | app/controllers/BaseController.scala | Scala | bsd-2-clause | 1,587 |
package sorm.test.features
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.matchers.ShouldMatchers
import sorm._
import sorm.test.MultiInstanceSuite
@RunWith(classOf[JUnitRunner])
class InFilterTest extends FunSuite with ShouldMatchers with MultiInstanceSuite {
import InFilterTest._
def entities = Set() + Entity[A]()
instancesAndIds foreach { case (db, dbId) =>
val a1 = db.save(A(1))
val a2 = db.save(A(2))
val a3 = db.save(A(3))
test(dbId + " - empty value"){
db.query[A].whereIn("a", Seq()).fetchOne().should(equal(None))
}
test(dbId + " - valid value"){
db.query[A].whereIn("a", Seq(2)).fetchOne().should(equal(Some(a2)))
}
}
}
object InFilterTest {
case class A ( a : Int ) extends Persistable
} | cllu/sorm2 | src/test/scala/sorm/test/features/InFilterTest.scala | Scala | mit | 836 |
package com.stulsoft.scala.chart
import de.sciss.chart.module.Charting
/**
* @author Yuriy Stul.
*/
object MyChartApp extends App with Charting {
val data = for (i <- 1 to 5) yield (i, i)
val chart = XYLineChart(data)
chart.title = "The title"
chart.show("Test")
chart.saveAsPNG("/tmp/chart.png")
}
| ysden123/poc | scala-chart/src/main/scala/com/stulsoft/scala/chart/MyChartApp.scala | Scala | mit | 315 |
package spire.math
import scala.{specialized => spec}
import spire.macrosk.Ops
import spire.algebra.{ Trig, IsReal }
trait ConvertableTo[@spec A] {
def fromByte(n: Byte): A
def fromShort(n: Short): A
def fromInt(n: Int): A
def fromLong(n: Long): A
def fromFloat(n: Float): A
def fromDouble(n: Double): A
def fromBigInt(n: BigInt): A
def fromBigDecimal(n: BigDecimal): A
def fromRational(n: Rational): A
def fromAlgebraic(n: Algebraic): A
def fromReal(n: Real): A
def fromType[B: ConvertableFrom](b: B): A
}
private[math] trait ConvertableToByte extends ConvertableTo[Byte] {
def fromByte(a: Byte): Byte = a
def fromShort(a: Short): Byte = a.toByte
def fromInt(a: Int): Byte = a.toByte
def fromLong(a: Long): Byte = a.toByte
def fromFloat(a: Float): Byte = a.toByte
def fromDouble(a: Double): Byte = a.toByte
def fromBigInt(a: BigInt): Byte = a.toByte
def fromBigDecimal(a: BigDecimal): Byte = a.toByte
def fromRational(a: Rational): Byte = a.toBigInt.toByte
def fromAlgebraic(a: Algebraic): Byte = a.toByte
def fromReal(a: Real): Byte = a.toByte
def fromType[B: ConvertableFrom](b: B): Byte = ConvertableFrom[B].toByte(b)
}
private[math] trait ConvertableToShort extends ConvertableTo[Short] {
def fromByte(a: Byte): Short = a.toShort
def fromShort(a: Short): Short = a
def fromInt(a: Int): Short = a.toShort
def fromLong(a: Long): Short = a.toShort
def fromFloat(a: Float): Short = a.toShort
def fromDouble(a: Double): Short = a.toShort
def fromBigInt(a: BigInt): Short = a.toShort
def fromBigDecimal(a: BigDecimal): Short = a.toShort
def fromRational(a: Rational): Short = a.toBigInt.toShort
def fromAlgebraic(a: Algebraic): Short = a.toShort
def fromReal(a: Real): Short = a.toShort
def fromType[B: ConvertableFrom](b: B): Short = ConvertableFrom[B].toShort(b)
}
private[math] trait ConvertableToInt extends ConvertableTo[Int] {
def fromByte(a: Byte): Int = a.toInt
def fromShort(a: Short): Int = a.toInt
def fromInt(a: Int): Int = a
def fromLong(a: Long): Int = a.toInt
def fromFloat(a: Float): Int = a.toInt
def fromDouble(a: Double): Int = a.toInt
def fromBigInt(a: BigInt): Int = a.toInt
def fromBigDecimal(a: BigDecimal): Int = a.toInt
def fromRational(a: Rational): Int = a.toBigInt.toInt
def fromAlgebraic(a: Algebraic): Int = a.toInt
def fromReal(a: Real): Int = a.toInt
def fromType[B: ConvertableFrom](b: B): Int = ConvertableFrom[B].toInt(b)
}
private[math] trait ConvertableToLong extends ConvertableTo[Long] {
def fromByte(a: Byte): Long = a.toLong
def fromShort(a: Short): Long = a.toLong
def fromInt(a: Int): Long = a.toLong
def fromLong(a: Long): Long = a
def fromFloat(a: Float): Long = a.toLong
def fromDouble(a: Double): Long = a.toLong
def fromBigInt(a: BigInt): Long = a.toLong
def fromBigDecimal(a: BigDecimal): Long = a.toLong
def fromRational(a: Rational): Long = a.toBigInt.toLong
def fromAlgebraic(a: Algebraic): Long = a.toLong
def fromReal(a: Real): Long = a.toLong
def fromType[B: ConvertableFrom](b: B): Long = ConvertableFrom[B].toLong(b)
}
private[math] trait ConvertableToFloat extends ConvertableTo[Float] {
def fromByte(a: Byte): Float = a.toFloat
def fromShort(a: Short): Float = a.toFloat
def fromInt(a: Int): Float = a.toFloat
def fromLong(a: Long): Float = a.toFloat
def fromFloat(a: Float): Float = a
def fromDouble(a: Double): Float = a.toFloat
def fromBigInt(a: BigInt): Float = a.toFloat
def fromBigDecimal(a: BigDecimal): Float = a.toFloat
def fromRational(a: Rational): Float = a.toBigDecimal.toFloat
def fromAlgebraic(a: Algebraic): Float = a.toFloat
def fromReal(a: Real): Float = a.toFloat
def fromType[B: ConvertableFrom](b: B): Float = ConvertableFrom[B].toFloat(b)
}
private[math] trait ConvertableToDouble extends ConvertableTo[Double] {
def fromByte(a: Byte): Double = a.toDouble
def fromShort(a: Short): Double = a.toDouble
def fromInt(a: Int): Double = a.toDouble
def fromLong(a: Long): Double = a.toDouble
def fromFloat(a: Float): Double = a.toDouble
def fromDouble(a: Double): Double = a
def fromBigInt(a: BigInt): Double = a.toDouble
def fromBigDecimal(a: BigDecimal): Double = a.toDouble
def fromRational(a: Rational): Double = a.toBigDecimal.toDouble
def fromAlgebraic(a: Algebraic): Double = a.toDouble
def fromReal(a: Real): Double = a.toDouble
def fromType[B: ConvertableFrom](b: B): Double = ConvertableFrom[B].toDouble(b)
}
private[math] trait ConvertableToBigInt extends ConvertableTo[BigInt] {
def fromByte(a: Byte): BigInt = BigInt(a)
def fromShort(a: Short): BigInt = BigInt(a)
def fromInt(a: Int): BigInt = BigInt(a)
def fromLong(a: Long): BigInt = BigInt(a)
def fromFloat(a: Float): BigInt = BigInt(a.toLong)
def fromDouble(a: Double): BigInt = BigInt(a.toLong)
def fromBigInt(a: BigInt): BigInt = a
def fromBigDecimal(a: BigDecimal): BigInt = a.toBigInt
def fromRational(a: Rational): BigInt = a.toBigInt
def fromAlgebraic(a: Algebraic): BigInt = a.toBigInt
def fromReal(a: Real): BigInt = fromRational(a.toRational)
def fromType[B: ConvertableFrom](b: B): BigInt = ConvertableFrom[B].toBigInt(b)
}
private[math] trait ConvertableToBigDecimal extends ConvertableTo[BigDecimal] {
def fromByte(a: Byte): BigDecimal = BigDecimal(a)
def fromShort(a: Short): BigDecimal = BigDecimal(a)
def fromInt(a: Int): BigDecimal = BigDecimal(a)
def fromLong(a: Long): BigDecimal = BigDecimal(a)
def fromFloat(a: Float): BigDecimal = BigDecimal(a)
def fromDouble(a: Double): BigDecimal = BigDecimal(a)
def fromBigInt(a: BigInt): BigDecimal = BigDecimal(a)
def fromBigDecimal(a: BigDecimal): BigDecimal = a
def fromRational(a: Rational): BigDecimal = a.toBigDecimal
def fromAlgebraic(a: Algebraic): BigDecimal = a.toBigDecimal
def fromReal(a: Real): BigDecimal = fromRational(a.toRational)
def fromType[B: ConvertableFrom](b: B): BigDecimal = ConvertableFrom[B].toBigDecimal(b)
}
private[math] trait ConvertableToRational extends ConvertableTo[Rational] {
def fromByte(a: Byte): Rational = Rational(a)
def fromShort(a: Short): Rational = Rational(a)
def fromInt(a: Int): Rational = Rational(a)
def fromLong(a: Long): Rational = Rational(a)
def fromFloat(a: Float): Rational = Rational(a)
def fromDouble(a: Double): Rational = Rational(a)
def fromBigInt(a: BigInt): Rational = Rational(a)
def fromBigDecimal(a: BigDecimal): Rational = Rational(a)
def fromRational(a: Rational): Rational = a
def fromAlgebraic(a: Algebraic): Rational = a.toRational
def fromReal(a: Real): Rational = a.toRational
def fromType[B: ConvertableFrom](b: B): Rational = ConvertableFrom[B].toRational(b)
}
private[math] trait ConvertableToAlgebraic extends ConvertableTo[Algebraic] {
def fromByte(a: Byte): Algebraic = Algebraic(a)
def fromShort(a: Short): Algebraic = Algebraic(a)
def fromInt(a: Int): Algebraic = Algebraic(a)
def fromLong(a: Long): Algebraic = Algebraic(a)
def fromFloat(a: Float): Algebraic = Algebraic(a)
def fromDouble(a: Double): Algebraic = Algebraic(a)
def fromBigInt(a: BigInt): Algebraic = Algebraic(a)
def fromBigDecimal(a: BigDecimal): Algebraic = Algebraic(a)
def fromRational(a: Rational) = Algebraic(a)
def fromAlgebraic(a: Algebraic): Algebraic = a
def fromReal(a: Real): Algebraic = Algebraic(a.toRational) //FIXME
def fromType[B: ConvertableFrom](b: B): Algebraic = ConvertableFrom[B].toAlgebraic(b)
}
private[math] trait ConvertableToComplex[A] extends ConvertableTo[Complex[A]] {
implicit def algebra: Integral[A]
def fromByte(a: Byte): Complex[A] = Complex(algebra.fromByte(a), algebra.zero)
def fromShort(a: Short): Complex[A] = Complex(algebra.fromShort(a), algebra.zero)
def fromInt(a: Int): Complex[A] = Complex(algebra.fromInt(a), algebra.zero)
def fromLong(a: Long): Complex[A] = Complex(algebra.fromLong(a), algebra.zero)
def fromFloat(a: Float): Complex[A] = Complex(algebra.fromFloat(a), algebra.zero)
def fromDouble(a: Double): Complex[A] = Complex(algebra.fromDouble(a), algebra.zero)
def fromBigInt(a: BigInt): Complex[A] = Complex(algebra.fromBigInt(a), algebra.zero)
def fromBigDecimal(a: BigDecimal): Complex[A] = Complex(algebra.fromBigDecimal(a), algebra.zero)
def fromRational(a: Rational): Complex[A] = Complex(algebra.fromRational(a), algebra.zero)
def fromAlgebraic(a: Algebraic): Complex[A] = Complex(algebra.fromAlgebraic(a), algebra.zero)
def fromReal(a: Real): Complex[A] = Complex(algebra.fromReal(a), algebra.zero)
def fromType[B: ConvertableFrom](b: B): Complex[A] = Complex(algebra.fromType(b), algebra.zero)
}
private[math] trait ConvertableToSafeLong extends ConvertableTo[SafeLong] {
def fromByte(a: Byte): SafeLong = SafeLong(a)
def fromShort(a: Short): SafeLong = SafeLong(a)
def fromInt(a: Int): SafeLong = SafeLong(a)
def fromLong(a: Long): SafeLong = SafeLong(a)
def fromFloat(a: Float): SafeLong = SafeLong(a.toLong)
def fromDouble(a: Double): SafeLong = SafeLong(a.toLong)
def fromBigInt(a: BigInt): SafeLong = SafeLong(a)
def fromBigDecimal(a: BigDecimal): SafeLong = SafeLong(a.toBigInt)
def fromRational(a: Rational): SafeLong = if (a.isValidInt) SafeLong(a.toInt) else SafeLong(a.toBigInt)
def fromAlgebraic(a: Algebraic): SafeLong = if (a.isValidInt) SafeLong(a.toInt) else SafeLong(a.toBigInt)
def fromReal(a: Real): SafeLong = if (a.isValidInt) SafeLong(a.toInt) else fromRational(a.toRational)
def fromType[B: ConvertableFrom](b: B): SafeLong = SafeLong(ConvertableFrom[B].toBigInt(b))
}
private[math] trait ConvertableToNumber extends ConvertableTo[Number] {
def fromByte(a: Byte): Number = Number(a)
def fromShort(a: Short): Number = Number(a)
def fromInt(a: Int): Number = Number(a)
def fromLong(a: Long): Number = Number(a)
def fromFloat(a: Float): Number = Number(a)
def fromDouble(a: Double): Number = Number(a)
def fromBigInt(a: BigInt): Number = Number(a)
def fromBigDecimal(a: BigDecimal): Number = Number(a)
def fromRational(a: Rational): Number = Number(a)
def fromAlgebraic(a: Algebraic): Number = Number(a.toRational)
def fromReal(a: Real): Number = Number(a.toRational)
def fromType[B: ConvertableFrom](b: B): Number = Number(ConvertableFrom[B].toDouble(b))
}
private[math] trait ConvertableToNatural extends ConvertableTo[Natural] {
def fromByte(a: Byte): Natural = Natural(a)
def fromShort(a: Short): Natural = Natural(a)
def fromInt(a: Int): Natural = Natural(a)
def fromLong(a: Long): Natural = Natural(a)
def fromFloat(a: Float): Natural = fromRational(Rational(a))
def fromDouble(a: Double): Natural = fromRational(Rational(a))
def fromBigInt(a: BigInt): Natural = Natural(a)
def fromBigDecimal(a: BigDecimal): Natural = Natural(a.toBigInt)
def fromRational(a: Rational): Natural = if (a.isValidInt) Natural(a.toInt) else Natural(a.toBigInt)
def fromAlgebraic(a: Algebraic): Natural = if (a.isValidInt) Natural(a.toInt) else Natural(a.toBigInt)
def fromReal(a: Real): Natural = if (a.isValidInt) Natural(a.toInt) else fromRational(a.toRational)
def fromType[B: ConvertableFrom](b: B): Natural = Natural(ConvertableFrom[B].toBigInt(b))
}
object ConvertableTo {
@inline final def apply[A](implicit ev: ConvertableTo[A]) = ev
implicit final val ConvertableToByte = new ConvertableToByte {}
implicit final val ConvertableToShort = new ConvertableToShort {}
implicit final val ConvertableToInt = new ConvertableToInt {}
implicit final val ConvertableToLong = new ConvertableToLong {}
implicit final val ConvertableToBigInt = new ConvertableToBigInt {}
implicit final val ConvertableToFloat = new ConvertableToFloat {}
implicit final val ConvertableToDouble = new ConvertableToDouble {}
implicit final val ConvertableToBigDecimal = new ConvertableToBigDecimal {}
implicit final val ConvertableToRational = new ConvertableToRational {}
implicit final val ConvertableToAlgebraic = new ConvertableToAlgebraic {}
implicit final val ConvertableToSafeLong = new ConvertableToSafeLong {}
implicit final val ConvertableToNumber = new ConvertableToNumber {}
implicit final val ConvertableToNatural = new ConvertableToNatural {}
implicit def convertableToComplex[A: Integral] =
new ConvertableToComplex[A] { val algebra = Integral[A] }
}
trait ConvertableFrom[@spec A] {
def toByte(a: A): Byte
def toShort(a: A): Short
def toInt(a: A): Int
def toLong(a: A): Long
def toFloat(a: A): Float
def toDouble(a: A): Double
def toBigInt(a: A): BigInt
def toBigDecimal(a: A): BigDecimal
def toRational(a: A): Rational
def toAlgebraic(a: A): Algebraic
def toReal(a: A): Real
def toNumber(a: A): Number
def toType[B: ConvertableTo](a: A): B
def toString(a: A): String
}
private[math] trait ConvertableFromByte extends ConvertableFrom[Byte] {
def toByte(a: Byte): Byte = a
def toShort(a: Byte): Short = a.toShort
def toInt(a: Byte): Int = a.toInt
def toLong(a: Byte): Long = a.toLong
def toFloat(a: Byte): Float = a.toFloat
def toDouble(a: Byte): Double = a.toDouble
def toBigInt(a: Byte): BigInt = BigInt(a)
def toBigDecimal(a: Byte): BigDecimal = BigDecimal(a)
def toRational(a: Byte): Rational = Rational(a)
def toAlgebraic(a: Byte): Algebraic = Algebraic(a)
def toReal(a: Byte): Real = Real(a)
def toNumber(a: Byte): Number = Number(a)
def toType[B: ConvertableTo](a: Byte): B = ConvertableTo[B].fromByte(a)
def toString(a: Byte): String = a.toString
}
private[math] trait ConvertableFromShort extends ConvertableFrom[Short] {
def toByte(a: Short): Byte = a.toByte
def toShort(a: Short): Short = a
def toInt(a: Short): Int = a.toInt
def toLong(a: Short): Long = a.toLong
def toFloat(a: Short): Float = a.toFloat
def toDouble(a: Short): Double = a.toDouble
def toBigInt(a: Short): BigInt = BigInt(a)
def toBigDecimal(a: Short): BigDecimal = BigDecimal(a)
def toRational(a: Short): Rational = Rational(a)
def toAlgebraic(a: Short): Algebraic = Algebraic(a)
def toReal(a: Short): Real = Real(a)
def toNumber(a: Short): Number = Number(a)
def toType[B: ConvertableTo](a: Short): B = ConvertableTo[B].fromShort(a)
def toString(a: Short): String = a.toString
}
private[math] trait ConvertableFromInt extends ConvertableFrom[Int] {
def toByte(a: Int): Byte = a.toByte
def toShort(a: Int): Short = a.toShort
def toInt(a: Int): Int = a
def toLong(a: Int): Long = a.toLong
def toFloat(a: Int): Float = a.toFloat
def toDouble(a: Int): Double = a.toDouble
def toBigInt(a: Int): BigInt = BigInt(a)
def toBigDecimal(a: Int): BigDecimal = BigDecimal(a)
def toRational(a: Int): Rational = Rational(a)
def toAlgebraic(a: Int): Algebraic = Algebraic(a)
def toReal(a: Int): Real = Real(a)
def toNumber(a: Int): Number = Number(a)
def toType[B: ConvertableTo](a: Int): B = ConvertableTo[B].fromInt(a)
def toString(a: Int): String = a.toString
}
private[math] trait ConvertableFromLong extends ConvertableFrom[Long] {
def toByte(a: Long): Byte = a.toByte
def toShort(a: Long): Short = a.toShort
def toInt(a: Long): Int = a.toInt
def toLong(a: Long): Long = a
def toFloat(a: Long): Float = a.toFloat
def toDouble(a: Long): Double = a.toDouble
def toBigInt(a: Long): BigInt = BigInt(a)
def toBigDecimal(a: Long): BigDecimal = BigDecimal(a)
def toRational(a: Long): Rational = Rational(a)
def toAlgebraic(a: Long): Algebraic = Algebraic(a)
def toReal(a: Long): Real = Real(a)
def toNumber(a: Long): Number = Number(a)
def toType[B: ConvertableTo](a: Long): B = ConvertableTo[B].fromLong(a)
def toString(a: Long): String = a.toString
}
private[math] trait ConvertableFromFloat extends ConvertableFrom[Float] {
def toByte(a: Float): Byte = a.toByte
def toShort(a: Float): Short = a.toShort
def toInt(a: Float): Int = a.toInt
def toLong(a: Float): Long = a.toLong
def toFloat(a: Float): Float = a
def toDouble(a: Float): Double = a.toDouble
def toBigInt(a: Float): BigInt = BigInt(a.toLong)
def toBigDecimal(a: Float): BigDecimal = BigDecimal(a)
def toRational(a: Float): Rational = Rational(a)
def toAlgebraic(a: Float): Algebraic = Algebraic(a)
def toReal(a: Float): Real = Real(a)
def toNumber(a: Float): Number = Number(a)
def toType[B: ConvertableTo](a: Float): B = ConvertableTo[B].fromFloat(a)
def toString(a: Float): String = a.toString
}
private[math] trait ConvertableFromDouble extends ConvertableFrom[Double] {
def toByte(a: Double): Byte = a.toByte
def toShort(a: Double): Short = a.toShort
def toInt(a: Double): Int = a.toInt
def toLong(a: Double): Long = a.toLong
def toFloat(a: Double): Float = a.toFloat
def toDouble(a: Double): Double = a
def toBigInt(a: Double): BigInt = BigInt(a.toLong)
def toBigDecimal(a: Double): BigDecimal = BigDecimal(a)
def toRational(a: Double): Rational = Rational(a)
def toAlgebraic(a: Double): Algebraic = Algebraic(a)
def toReal(a: Double): Real = Real(a)
def toNumber(a: Double): Number = Number(a)
def toType[B: ConvertableTo](a: Double): B = ConvertableTo[B].fromDouble(a)
def toString(a: Double): String = a.toString
}
private[math] trait ConvertableFromBigInt extends ConvertableFrom[BigInt] {
def toByte(a: BigInt): Byte = a.toByte
def toShort(a: BigInt): Short = a.toShort
def toInt(a: BigInt): Int = a.toInt
def toLong(a: BigInt): Long = a.toLong
def toFloat(a: BigInt): Float = a.toFloat
def toDouble(a: BigInt): Double = a.toDouble
def toBigInt(a: BigInt): BigInt = a
def toBigDecimal(a: BigInt): BigDecimal = BigDecimal(a)
def toRational(a: BigInt): Rational = Rational(a)
def toAlgebraic(a: BigInt): Algebraic = Algebraic(a)
def toReal(a: BigInt): Real = Real(a)
def toNumber(a: BigInt): Number = Number(a)
def toType[B: ConvertableTo](a: BigInt): B = ConvertableTo[B].fromBigInt(a)
def toString(a: BigInt): String = a.toString
}
private[math] trait ConvertableFromBigDecimal extends ConvertableFrom[BigDecimal] {
def toByte(a: BigDecimal): Byte = a.toByte
def toShort(a: BigDecimal): Short = a.toShort
def toInt(a: BigDecimal): Int = a.toInt
def toLong(a: BigDecimal): Long = a.toLong
def toFloat(a: BigDecimal): Float = a.toFloat
def toDouble(a: BigDecimal): Double = a.toDouble
def toBigInt(a: BigDecimal): BigInt = a.toBigInt
def toBigDecimal(a: BigDecimal): BigDecimal = a
def toRational(a: BigDecimal): Rational = Rational(a)
def toAlgebraic(a: BigDecimal): Algebraic = Algebraic(a)
def toReal(a: BigDecimal): Real = Real(a)
def toNumber(a: BigDecimal): Number = Number(a)
def toType[B: ConvertableTo](a: BigDecimal): B = ConvertableTo[B].fromBigDecimal(a)
def toString(a: BigDecimal): String = a.toString
}
private[math] trait ConvertableFromRational extends ConvertableFrom[Rational] {
def toByte(a: Rational): Byte = a.toBigInt.toByte
def toShort(a: Rational): Short = a.toBigInt.toShort
def toInt(a: Rational): Int = a.toBigInt.toInt
def toLong(a: Rational): Long = a.toBigInt.toLong
def toFloat(a: Rational): Float = a.toBigDecimal.toFloat
def toDouble(a: Rational): Double = a.toBigDecimal.toDouble
def toBigInt(a: Rational): BigInt = a.toBigInt
def toBigDecimal(a: Rational): BigDecimal = a.toBigDecimal
def toRational(a: Rational): Rational = a
def toAlgebraic(a: Rational): Algebraic = Algebraic(a)
def toReal(a: Rational): Real = Real(a)
def toNumber(a: Rational): Number = Number(a)
def toType[B: ConvertableTo](a: Rational): B = ConvertableTo[B].fromRational(a)
def toString(a: Rational): String = a.toString
}
private[math] trait ConvertableFromAlgebraic extends ConvertableFrom[Algebraic] {
def toByte(a: Algebraic): Byte = a.toInt.toByte
def toShort(a: Algebraic): Short = a.toInt.toShort
def toInt(a: Algebraic): Int = a.toInt
def toLong(a: Algebraic): Long = a.toLong
def toFloat(a: Algebraic): Float = a.toDouble.toFloat
def toDouble(a: Algebraic): Double = a.toDouble
def toBigInt(a: Algebraic): BigInt = a.toBigInt
// TODO: Figure out how to deal with variable approximability.
def toBigDecimal(a: Algebraic): BigDecimal = a.toBigDecimal(java.math.MathContext.DECIMAL128)
def toRational(a: Algebraic): Rational = a.toRational(ApproximationContext(Rational(1L, 100000000000000000L)))
def toAlgebraic(a: Algebraic): Algebraic = a
def toReal(a: Algebraic): Real = Real(a.toRational) //FIXME
def toNumber(a: Algebraic): Number = Number(a.toRational)
def toType[B: ConvertableTo](a: Algebraic): B = ConvertableTo[B].fromAlgebraic(a)
def toString(a: Algebraic): String = a.toString
}
private[math] trait ConvertableFromComplex[A] extends ConvertableFrom[Complex[A]] {
def algebra: Integral[A]
def toByte(a: Complex[A]): Byte = algebra.toByte(a.real)
def toShort(a: Complex[A]): Short = algebra.toShort(a.real)
def toInt(a: Complex[A]): Int = algebra.toInt(a.real)
def toLong(a: Complex[A]): Long = algebra.toLong(a.real)
def toFloat(a: Complex[A]): Float = algebra.toFloat(a.real)
def toDouble(a: Complex[A]): Double = algebra.toDouble(a.real)
def toBigInt(a: Complex[A]): BigInt = algebra.toBigInt(a.real)
def toBigDecimal(a: Complex[A]): BigDecimal = algebra.toBigDecimal(a.real)
def toRational(a: Complex[A]): Rational = algebra.toRational(a.real)
def toAlgebraic(a: Complex[A]): Algebraic = algebra.toAlgebraic(a.real)
def toReal(a: Complex[A]): Real = algebra.toReal(a.real)
def toNumber(a: Complex[A]): Number = algebra.toNumber(a.real)
def toType[B: ConvertableTo](a: Complex[A]): B = sys.error("fixme")
def toString(a: Complex[A]): String = a.toString
}
private[math] trait ConvertableFromSafeLong extends ConvertableFrom[SafeLong] {
def toByte(a: SafeLong): Byte = a.toBigInt.toByte
def toShort(a: SafeLong): Short = a.toBigInt.toShort
def toInt(a: SafeLong): Int = a.toBigInt.toInt
def toLong(a: SafeLong): Long = a.toBigInt.toLong
def toFloat(a: SafeLong): Float = a.toBigInt.toFloat
def toDouble(a: SafeLong): Double = a.toBigInt.toDouble
def toBigInt(a: SafeLong): BigInt = a.toBigInt
def toBigDecimal(a: SafeLong): BigDecimal = BigDecimal(a.toBigInt)
def toRational(a: SafeLong): Rational = Rational(a.toBigInt)
def toAlgebraic(a: SafeLong): Algebraic = Algebraic(a)
def toReal(a: SafeLong): Real = Real(a)
def toNumber(a: SafeLong): Number = Number(a)
def toType[B: ConvertableTo](a: SafeLong): B = ConvertableTo[B].fromBigInt(a.toBigInt)
def toString(a: SafeLong): String = a.toString
}
private[math] trait ConvertableFromNumber extends ConvertableFrom[Number] {
def toByte(a: Number): Byte = a.toBigInt.toByte
def toShort(a: Number): Short = a.toBigInt.toShort
def toInt(a: Number): Int = a.toBigInt.toInt
def toLong(a: Number): Long = a.toBigInt.toLong
def toFloat(a: Number): Float = a.toBigInt.toFloat
def toDouble(a: Number): Double = a.toBigInt.toDouble
def toBigInt(a: Number): BigInt = a.toBigInt
def toBigDecimal(a: Number): BigDecimal = BigDecimal(a.toBigInt)
def toRational(a: Number): Rational = Rational(a.toBigInt)
def toAlgebraic(a: Number): Algebraic = Algebraic(a.toRational)
def toReal(a: Number): Real = Real(a.toRational)
def toNumber(a: Number): Number = a
def toType[B: ConvertableTo](a: Number): B = ConvertableTo[B].fromBigInt(a.toBigInt)
def toString(a: Number): String = a.toString
}
private[math] trait ConvertableFromNatural extends ConvertableFrom[Natural] {
def toByte(a: Natural): Byte = a.toBigInt.toByte
def toShort(a: Natural): Short = a.toBigInt.toShort
def toInt(a: Natural): Int = a.toBigInt.toInt
def toLong(a: Natural): Long = a.toBigInt.toLong
def toFloat(a: Natural): Float = a.toBigInt.toFloat
def toDouble(a: Natural): Double = a.toBigInt.toDouble
def toBigInt(a: Natural): BigInt = a.toBigInt
def toBigDecimal(a: Natural): BigDecimal = BigDecimal(a.toBigInt)
def toRational(a: Natural): Rational = Rational(a.toBigInt)
def toAlgebraic(a: Natural): Algebraic = Algebraic(a.toBigInt)
def toReal(a: Natural): Real = Real(a.toBigInt)
def toNumber(a: Natural): Number = Number(a.toBigInt)
def toType[B: ConvertableTo](a: Natural): B = ConvertableTo[B].fromBigInt(a.toBigInt)
def toString(a: Natural): String = a.toString
}
object ConvertableFrom {
@inline final def apply[A](implicit ev: ConvertableFrom[A]) = ev
implicit final val ConvertableFromByte = new ConvertableFromByte {}
implicit final val ConvertableFromShort = new ConvertableFromShort {}
implicit final val ConvertableFromInt = new ConvertableFromInt {}
implicit final val ConvertableFromLong = new ConvertableFromLong {}
implicit final val ConvertableFromFloat = new ConvertableFromFloat {}
implicit final val ConvertableFromDouble = new ConvertableFromDouble {}
implicit final val ConvertableFromBigInt = new ConvertableFromBigInt {}
implicit final val ConvertableFromBigDecimal = new ConvertableFromBigDecimal {}
implicit final val ConvertableFromRational = new ConvertableFromRational {}
implicit final val ConvertableFromAlgebraic = new ConvertableFromAlgebraic {}
implicit final val ConvertableFromSafeLong = new ConvertableFromSafeLong {}
implicit final val ConvertableFromNumber = new ConvertableFromNumber {}
implicit final val ConvertableFromNatural = new ConvertableFromNatural {}
implicit def convertableFromComplex[A: Integral] =
new ConvertableFromComplex[A] { val algebra = Integral[A] }
}
| lrytz/spire | core/src/main/scala/spire/math/Convertable.scala | Scala | mit | 25,313 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import _root_.org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScPathElement, ScStableCodeReference}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
/**
* @author Alexander Podkhalyuzin
* Date: 14.03.2008
*/
trait ScSuperReference extends ScExpression with ScPathElement {
/**
* @return is reference in decompiled file from Self type class
*/
def isHardCoded: Boolean
//type of M for super[M]
def staticSuper: Option[ScType]
//name of super type as written in code
def staticSuperName: String
//for A.super or simply super
def drvTemplate: Option[ScTemplateDefinition]
def reference: Option[ScStableCodeReference] = findChild[ScStableCodeReference]
override protected def acceptScala(visitor: ScalaElementVisitor): Unit = {
visitor.visitSuperReference(this)
}
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScSuperReference.scala | Scala | apache-2.0 | 995 |
/*
* Copyright (C) 2017 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.tikitakka.common.model.marathon
import play.api.libs.json.Json
import play.api.libs.json.Reads
import play.api.libs.json.Writes
case class MarathonDeleteInfo(version: String, deploymentId: String)
object MarathonDeleteInfo {
implicit val reads:Reads[MarathonDeleteInfo] = Json.reads[MarathonDeleteInfo]
implicit val writes:Writes[MarathonDeleteInfo] = Json.writes[MarathonDeleteInfo]
}
| compae/tiki-takka | common/src/main/scala/com/stratio/tikitakka/common/model/marathon/MarathonDeleteInfo.scala | Scala | apache-2.0 | 1,033 |
package org.apache.spark.examples.h2o
import java.io.File
import hex.FrameSplitter
import hex.deeplearning.DeepLearning
import hex.deeplearning.DeepLearningParameters
import hex.deeplearning.DeepLearningParameters.Activation
import hex.tree.gbm.GBM
import hex.tree.gbm.GBMModel.GBMParameters
import org.apache.spark.examples.h2o.DemoUtils.residualPlotRCode
import org.apache.spark.h2o.{Frame, H2OContext, H2OFrame}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext, SparkFiles}
import water.Key
import water.app.SparkContextSupport
/** Demo for meetup presented at 12/17/2014 */
object AirlinesWithWeatherDemo2 extends SparkContextSupport {
def main(args: Array[String]): Unit = {
// Configure this application
val conf: SparkConf = configure("Sparkling Water Meetup: Use Airlines and Weather Data for delay prediction")
// Create SparkContext to execute application on Spark cluster
val sc = new SparkContext(conf)
val h2oContext = new H2OContext(sc).start()
import h2oContext._
// Setup environment
addFiles(sc,
absPath("examples/smalldata/Chicago_Ohare_International_Airport.csv"),
absPath("examples/smalldata/year2005.csv.gz"))
//val weatherDataFile = "examples/smalldata/Chicago_Ohare_International_Airport.csv"
val wrawdata = sc.textFile(SparkFiles.get("Chicago_Ohare_International_Airport.csv"),3).cache()
val weatherTable = wrawdata.map(_.split(",")).map(row => WeatherParse(row)).filter(!_.isWrongRow())
//
// Load H2O from CSV file (i.e., access directly H2O cloud)
// Use super-fast advanced H2O CSV parser !!!
val airlinesData = new H2OFrame(new File(SparkFiles.get("year2005.csv.gz")))
val airlinesTable : RDD[Airlines] = asRDD[Airlines](airlinesData)
// Select flights only to ORD
val flightsToORD = airlinesTable.filter(f => f.Dest==Some("ORD"))
flightsToORD.count
println(s"\\nFlights to ORD: ${flightsToORD.count}\\n")
implicit val sqlContext = new SQLContext(sc)
import sqlContext.implicits._ // import implicit conversions
flightsToORD.toDF.registerTempTable("FlightsToORD")
weatherTable.toDF.registerTempTable("WeatherORD")
//
// -- Join both tables and select interesting columns
//
val joinedTable = sqlContext.sql(
"""SELECT
|f.Year,f.Month,f.DayofMonth,
|f.CRSDepTime,f.CRSArrTime,f.CRSElapsedTime,
|f.UniqueCarrier,f.FlightNum,f.TailNum,
|f.Origin,f.Distance,
|w.TmaxF,w.TminF,w.TmeanF,w.PrcpIn,w.SnowIn,w.CDD,w.HDD,w.GDD,
|f.ArrDelay
|FROM FlightsToORD f
|JOIN WeatherORD w
|ON f.Year=w.Year AND f.Month=w.Month AND f.DayofMonth=w.Day""".stripMargin)
println(s"\\nResult of query: ${joinedTable.count}\\n")
//
// Split data into 3 tables - train/validation/test
//
// Instead of using RDD API we will directly split H2O Frame
val joinedH2OFrame:H2OFrame = joinedTable // Invoke implicit transformation
// Transform date related columns to enums
for( i <- 0 to 2) joinedH2OFrame.replace(i, joinedH2OFrame.vec(i).toEnum)
//
// Use low-level task to split the frame
val sf = new FrameSplitter(joinedH2OFrame, Array(.7, .2), Array("train", "valid","test").map(Key.make[Frame](_)), null)
water.H2O.submitTask(sf)
val splits = sf.getResult
val trainTable = splits(0)
val validTable = splits(1)
val testTable = splits(2)
//
// -- Run DeepLearning
//
val dlParams = new DeepLearningParameters()
dlParams._train = trainTable
dlParams._response_column = 'ArrDelay
dlParams._valid = validTable
dlParams._epochs = 5
dlParams._activation = Activation.RectifierWithDropout
dlParams._hidden = Array[Int](100, 100)
dlParams._reproducible = true
dlParams._force_load_balance = false
val dl = new DeepLearning(dlParams)
val dlModel = dl.trainModel.get
val dlPredictTable = dlModel.score(testTable)('predict)
val predictionsFromDlModel = asDataFrame(dlPredictTable).collect
.map(row => if (row.isNullAt(0)) Double.NaN else row(0))
println(predictionsFromDlModel.length)
println(predictionsFromDlModel.mkString("\\n===> Model predictions: ", ", ", ", ...\\n"))
printf( residualPlotRCode(dlPredictTable, 'predict, testTable, 'ArrDelay) )
// GBM Model
val gbmParams = new GBMParameters()
gbmParams._train = trainTable
gbmParams._response_column = 'ArrDelay
gbmParams._valid = validTable
gbmParams._ntrees = 100
//gbmParams._learn_rate = 0.01f
val gbm = new GBM(gbmParams)
val gbmModel = gbm.trainModel.get
// Print R code for residual plot
val gbmPredictTable = gbmModel.score(testTable)('predict)
printf( residualPlotRCode(gbmPredictTable, 'predict, testTable, 'ArrDelay) )
sc.stop()
}
}
| tromika/sparkling-water | examples/src/main/scala/org/apache/spark/examples/h2o/AirlinesWithWeatherDemo2.scala | Scala | apache-2.0 | 4,909 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.