code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package commons.repositories.mappings.columnOptions
import slick.ast.ColumnOption
case object Unique extends ColumnOption[Nothing]
|
Dasiu/play-framework-test-project
|
app/commons/repositories/mappings/columnOptions/Unique.scala
|
Scala
|
mit
| 133
|
package com.github.sorhus.webalytics.akka.document
import akka.actor.{ActorRef, Props}
import akka.persistence._
import com.github.sorhus.webalytics.akka.event._
import org.slf4j.LoggerFactory
class DocumentIdActor(audienceActor: ActorRef, domainActor: ActorRef) extends PersistentActor {
val log = LoggerFactory.getLogger(getClass)
var state = DocumentIdState()
override def persistenceId: String = "document-id-actor"
def post(event: PostEvent): Unit = {
audienceActor ! event
domainActor ! PostMetaEvent(event.bucket, event.element)
}
def notifyAndPost(event: PostEvent): Unit = {
sender() ! Ack
post(event)
}
override def receiveCommand: Receive = {
case e: PostCommand =>
log.debug("received postevent")
state = state.update(e.elementId)
val documentId = state.get(e.elementId)
val postEvent = PostEvent(e.bucket, e.elementId, documentId, e.element)
if(e.persist) {
persistAsync(postEvent)(notifyAndPost)
} else {
notifyAndPost(postEvent)
}
case SaveSnapshot =>
log.info("saving snapshot")
saveSnapshot(state)
case Shutdown =>
context.stop(self)
sender() ! Ack
case SaveSnapshotSuccess(metadata) =>
log.info(s"snapshot saved. seqNum:${metadata.sequenceNr}, timeStamp:${metadata.timestamp}")
audienceActor ! SaveSnapshot
domainActor ! SaveSnapshot
// TODO get confirmation first!?
deleteMessages(metadata.sequenceNr)
case SaveSnapshotFailure(_, reason) =>
log.info("failed to save snapshot: {}", reason)
case DeleteMessagesSuccess(toSequenceNr) =>
log.info(s"message deleted. sequNum {}", toSequenceNr)
case DeleteMessagesFailure(reason, toSequenceNr) =>
log.info(s"failed to delete message to sequenceNr: {} {}", toSequenceNr, reason)
case x =>
log.info(s"doc recieved {}", x)
}
override def receiveRecover: Receive = {
case e: PostEvent =>
log.info("received recover postevent")
state = state.update(e.elementId, e.documentId)
post(e)
case SnapshotOffer(_, snapshot: DocumentIdState) =>
log.info("restoring state from snapshot")
state = snapshot
case x =>
log.info("received recover {}", x)
}
}
object DocumentIdActor {
def props(audienceActor: ActorRef, queryActor: ActorRef): Props =
Props(new DocumentIdActor(audienceActor, queryActor))
}
|
sorhus/webalytics
|
service/src/main/scala/com/github/sorhus/webalytics/akka/document/DocumentIdActor.scala
|
Scala
|
gpl-3.0
| 2,434
|
package scsvlog
import java.awt
import scala.language.{reflectiveCalls,existentials}
import javax.swing.border.{TitledBorder,LineBorder,EtchedBorder}
import java.util.concurrent.atomic
import scl.SwUtil
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}, org.eclipse.{jetty}
object ScsvLog extends swing.SimpleSwingApplication with scl.GetText {
val app = this
def top = new swing.MainFrame { title = Config.title
val top = this
val ini = Config.ini
var channel:scl.Channel = null
val channelOpened = new atomic.AtomicBoolean(false)
var portOn:swing.CheckBox = null
var portPanel:swing.BoxPanel = null
var serialPanel:swing.BoxPanel = null
var configPanel:swing.BoxPanel = null
var valuesPanel:swing.BoxPanel = null
var ipText:swing.TextField = null
var connectButton:swing.Button = null
var disconnectButton:swing.Button = null
var statusText:swing.Label = null
var chart:scl.Chart = null
var xTypeCombo:swing.ComboBox[String] = null
var xDateFormatText:swing.TextField = null
var csvLoadButton:swing.Button = null
var csvSaveButton:swing.Button = null
val channelsCount = 20
val channelsInRow = 10
var serverPortCombo:swing.ComboBox[Int] = null
var server:scl.ServerJetty = null
val serverData = new java.util.concurrent.atomic.AtomicReferenceArray[Double](channelsCount){
for (i <- 0 until channelsCount) set(i, Double.NaN)
}
object values {
val labels = new collection.mutable.ArrayBuffer[swing.Label]
val width = 10
def setText(i:Int, v:Double) = if (i < channelsCount){ labels(i).text = ("%" + width + "s").format( if (v.isNaN) "-" else v.toString ) }
def setAllText(v:Seq[Double]) = swing.Swing.onEDT( { for (i <- 0 until v.length) setText(i,v(i)) } )
}
object colors {
val available = Seq("red","magenta","orange","pink","green","blue","cyan","yellow","gray","lightgray","darkgray","black","saddlebrown","tan","brown","darkkhaki","beige","violet","darksalmon","indianred")
val current = new collection.mutable.ArrayBuffer[String]{
this.++=(available)
val cl = ini("colors").split(",")
for (i <- 0 until cl.length if (cl(i).nonEmpty)) this.update(i,cl(i))
}
def set(i:Int, cs:String):awt.Color = {
current(i) = cs
val c = SwUtil.svgColor(cs)
values.labels(i).foreground = c
c
}
def get(i:Int) = if (i < current.length) current(i) else if (i < available.length) available(i) else "black";
val chooser = new swing.Dialog(top){
val chooser = this
modal = true
visible = false
setLocationRelativeTo(top)
val picker = new swing.ColorChooser
var selectedColor = "red"
var callback: ( String => Unit ) = null
contents = new swing.BoxPanel(swing.Orientation.Vertical){
contents ++= List(
picker
,new swing.BoxPanel(swing.Orientation.Horizontal){
contents ++= List(
new swing.ComboBox(List("") ::: SwUtil.svgColors.keys.toList.sorted){ maximumSize = preferredSize
listenTo(selection)
reactions += { case swing.event.SelectionChanged(i) => if (selection.item != "") picker.color = SwUtil.svgColor(selection.item) }
tooltip = tr("Standard color")
}
,new swing.Label(" ")
,new swing.Button(new swing.Action("Ok"){
def apply = { selectedColor = SwUtil.svgName(picker.color); chooser.close; callback(selectedColor) }
})
,new swing.Button(new swing.Action("Cancel"){ def apply = chooser.close })
)
}
)
}
def openColor(c:String, cb:String => Unit ){
selectedColor = c
picker.color = SwUtil.svgColor(c)
callback = cb
open
}
}
}
// CSV operations
object csv {
val lines = new collection.mutable.ArrayBuffer[String]
val dateFormatter = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss")//"yyyy-MM-dd'T'HH:mm:ss.SSSZ")
val saveDateFormatter = new java.text.SimpleDateFormat("yyMMdd_HHmm")
val separator = ";"
// save CSV dialog
val dialog = new swing.FileChooser{
fileFilter = new javax.swing.filechooser.FileNameExtensionFilter("CSV file","csv")
selectedFile = new java.io.File(ini("csvFile", "data.csv"))
}
def save = {
dialog.title = tr("Save CSV...")
dialog.selectedFile = new java.io.File(dialog.selectedFile.getParent + java.io.File.separator + saveDateFormatter.format(new java.util.Date) + ".csv")
if (lines.nonEmpty && (dialog.showSaveDialog(null) == swing.FileChooser.Result.Approve)){
if ( !dialog.selectedFile.exists || (dialog.selectedFile.exists &&
(swing.Dialog.showConfirmation(null, tr("Replace ?"), tr("Confirm replace"), swing.Dialog.Options.YesNo, swing.Dialog.Message.Question) == swing.Dialog.Result.Yes))){
try {
java.nio.file.Files.write(dialog.selectedFile.toPath, lines.mkString("\\r\\n").getBytes("UTF-8"))
} catch { case _:Exception => }
ini("csvFile") = csv.dialog.selectedFile.getCanonicalPath
}
}
}
def load = {
dialog.title = tr("Load CSV...")
if (dialog.showOpenDialog(null) == swing.FileChooser.Result.Approve){
ini("csvFile") = dialog.selectedFile.getCanonicalPath
resetAll
try {
io.Source.fromFile(dialog.selectedFile, "UTF-8").getLines.foreach { l =>
val ls = l.split(separator)
if (ls.length > 2){
chart.addPoints(
if (ls(0).contains(":")) dateFormatter.parse(ls(0)).getTime else ls(0).toDouble,
ls.tail.map { _.toDouble }
)
}
}
top.title = Config.title + " : " + dialog.selectedFile.getCanonicalPath
} catch { case _:Exception => }
}
}
}
// channel poll timer - receive/parse CSV lines
object poll {
val firstLine = new atomic.AtomicBoolean(true)
val readBuf = new collection.mutable.Queue[Byte]
val lineBuf = new collection.mutable.ArrayBuffer[Byte]
val lineNum = new atomic.AtomicInteger(0)
val lineSkip = new atomic.AtomicInteger(0)
val linesIn = new java.util.concurrent.ConcurrentLinkedQueue[String]
// process new line
val lineTimer = new java.util.Timer { scheduleAtFixedRate( new java.util.TimerTask { def run = {
if (!linesIn.isEmpty){
val l = linesIn.poll
if (firstLine.get) firstLine.set(false)
else try {
if (l.startsWith("#")){
statusText.text = "<html>" + l.substring(1).replace("\\r","").replace("\\n","") + "</html>"
if (!statusText.visible) statusText.visible = true
} else {
val ys = l.replace(";","").replace(",","").replace("\\r","").replace("\\n","").split("\\\\s+").toBuffer[String]
while ((ys.length > 0)&&(ys(0).length == 0)) ys.trimStart(1)
val y = (ys.map { ns =>
if (ns.startsWith("0x")) java.lang.Integer.parseInt(ns.substring(2), 16)
else if (ns.startsWith("0b")) java.lang.Integer.parseInt(ns.substring(2), 2)
else if (ns.startsWith("0o")) java.lang.Integer.parseInt(ns.substring(2), 8)
else ns.toDouble
})
// correction
for (i <- 0 until y.length if (!y(i).isNaN && !y(i).isInfinity)){
y(i) += ini("yAdd"+(i+1),0.0)
serverData.lazySet(i, y(i))
}
// add to graphs
if (y.length > 0){
if(Config.debug.get) println(lineNum.get + " " + lineSkip.get + " " + l)
while (y.length < channelsCount) y.append(Double.NaN)
values.setAllText(y)
if (lineSkip.get == 0){
lineSkip.set(ini("xSkip",0))
val x:Double = ini("xType",0) match {
case 0 => lineNum.getAndIncrement()
case 1 => y.remove(0)
case 2 => System.currentTimeMillis
}
if (ini("chartOn",false)){
swing.Swing.onEDT( { chart.addPoints(x,y) } )
csv.lines += (if (ini("xType",0) == 2) csv.dateFormatter.format(new java.util.Date(x.toLong)) else x.toString) +
csv.separator + y.mkString(csv.separator)
}
} else lineSkip.decrementAndGet
}
}
} catch {
case e:Exception =>
if(Config.debug.get) println("#line Exception: " + e.getMessage)
}
}
}}, 10, 200)}
val portTimer = new java.util.Timer { scheduleAtFixedRate( new java.util.TimerTask { def run = {
if (channelOpened.get()){
try {
readBuf ++= channel.read
var b = -1
while ((readBuf.length > 0)&&(b != '\\n')){
b = readBuf.dequeue
if (b == '\\n'){
linesIn.add(new String(lineBuf.toArray, "UTF-8"))
lineBuf.clear
} else lineBuf += (b & 0xFF).toByte
}
} catch {
case e:java.io.IOException =>
if(Config.debug.get) println("#poll IOException: " + e.getMessage)
if (!channel.name.startsWith("socket") && !channel.channels.contains(channel.name)){
readBuf.clear
linesIn.clear
disconnectButton.action.apply()
}
case e:Exception =>
if(Config.debug.get) println("#poll Exception: " + e.getMessage)
lineBuf.clear
}
} else { if (!readBuf.isEmpty){
readBuf.clear
lineBuf.clear
lineNum.set(0)
}}
}}, 10, 20)}
}
// clear all data
def resetAll = {
chart.clearPoints
chart.xAxisFormat(ini("xType",0) == 2, ini("xLabelDate","yyyy.MM.dd HH.mm.ss"))
poll.lineNum.set(0)
csv.lines.clear
}
contents = new swing.BoxPanel(swing.Orientation.Vertical){
contents ++= List(
new swing.BoxPanel(swing.Orientation.Horizontal){
border = new EtchedBorder
contents ++= List(
new swing.CheckBox { portOn = this
action = new swing.Action(tr("port")){ def apply() = portPanel.visible = selected }
selected = true
tooltip = tr("Show port configuration")
}
,new swing.CheckBox {
action = new swing.Action(tr("config")){ def apply() = configPanel.visible = ini.put("configOn", selected).asInstanceOf[Boolean] }
selected = ini("configOn", false)
tooltip = tr("Show chart/channels configuration")
}
,new swing.CheckBox {
action = new swing.Action(tr("values")){ def apply() = valuesPanel.visible = ini.put("valuesOn", selected).asInstanceOf[Boolean] }
selected = ini("valuesOn", false)
tooltip = tr("Show values")
}
,new swing.Label(" | ")
,new swing.CheckBox {
action = new swing.Action(tr("chart")){ def apply() = ini.put("chartOn", selected) }
selected = ini("chartOn",false)
tooltip = tr("Enable chart")
}
,new swing.Button(new swing.Action(tr("reset")){ def apply = resetAll }){ tooltip = tr("Clear all data") }
,new swing.Label(" | ")
,new swing.Button(new swing.Action(">"){ def apply = {
if (chart.snapshotSave(true)) ini("pngFile") = chart.snapshotDialog.selectedFile.getCanonicalPath
}}){ tooltip = tr("Save chart to PNG") }
,new swing.Label(" PNG")
,new swing.Label(" | ")
,new swing.Button(new swing.Action(">"){ def apply = { csv.save
}}){ tooltip = tr("Save data to CSV"); csvSaveButton = this }
,new swing.Label(" CSV ")
,new swing.Button(new swing.Action(">"){ def apply = { csv.load
}}){ tooltip = tr("Load data from CSV"); csvLoadButton = this }
// ,new swing.CheckBox {
// action = new swing.Action(tr("auto")){ def apply() = ini.put("csvAuto", selected) }
// selected = false // ini("csvAuto",false)
// tooltip = tr("Automatic periodic CSV save")
// }
,new swing.Label(" | ")
,new swing.Label(" L&F: ")
,new swing.ComboBox(Config.lafsNames){ maximumSize = preferredSize
selection.index = ini("laf",0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("laf") = selection.index
javax.swing.UIManager.setLookAndFeel(Config.lafs(selection.index))
javax.swing.SwingUtilities.updateComponentTreeUI(top.peer)
}
tooltip = tr("Select current Look&Feel")
}
,new swing.Label(" | ")
,new swing.Label(" lang: ")
,new swing.ComboBox( scl.GetText.displayLangs ){ maximumSize = preferredSize
selection.index = ini("lang",0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("lang") = selection.index
}
tooltip = tr("Select current language")
}
,new swing.Label(" | ")
,new swing.CheckBox {
action = new swing.Action(tr("server")){ def apply() = {
ini("server.start") = selected
serverPortCombo.visible = selected
}}
selected = ini("server.start",false)
tooltip = tr("Enable server")
}
,new swing.ComboBox(List(80,8080,8090,9000)){ maximumSize = preferredSize
visible = ini("server.start",false)
makeEditable()(swing.ComboBox.intEditor)
selection.item = ini("server.port",8090)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("server.port") = selection.item
}
tooltip = tr("Server port")
serverPortCombo = this
}
,new swing.Label(" | ")
,swing.Swing.HGlue
,new swing.Button(new swing.Action(tr("connect")){ def apply = {
try {
if (channel != null){ channelOpened.set(false); channel.close; channel = null }
channel = ini("port","socketTCP") match {
case "socketTCP" => new scl.ChannelSocketTCP{ open( ini("ip","127.0.0.1:9000") ) }
case "socketUDP" => new scl.ChannelSocketUDP{ open( ini("ip","127.0.0.1:9000") ) }
case p:String => new scl.ChannelSerial {
open(p)
propSet("baud", ini("baud",9600))
propSet("bits", ini("bits",8))
propSet("parity", ini("parity","none"))
propSet("stops", ini("stops",1.0))
}
}
if (channel.opened){
top.title = Config.title + " : " + channel.name
resetAll
connectButton.visible = false
portPanel.visible = false
portOn.selected = false
xTypeCombo.enabled = false
xDateFormatText.enabled = false
disconnectButton.visible = true
csvLoadButton.enabled = false
channelOpened.set(true)
}
} catch { case _:Exception => }
}}){ connectButton = this; tooltip = tr("Connect to port") }
,new swing.Button(new swing.Action(tr("disconnect")){ def apply = { println("disconnect...")
channelOpened.set(false)
if (channel != null){ channel.close; channel = null }
top.title = Config.title
connectButton.visible = true
disconnectButton.visible = false
portOn.selected = true
portPanel.visible = true
xTypeCombo.enabled = true
xDateFormatText.enabled = true
csvLoadButton.enabled = true
}}){ disconnectButton = this; visible = false; tooltip = tr("Disconnect from port") }
)
}
,new swing.BoxPanel(swing.Orientation.Horizontal){ portPanel = this; visible = true
border = new TitledBorder( new EtchedBorder, tr("Port"), TitledBorder.LEFT, TitledBorder.TOP )
contents ++= List(
new swing.ComboBox(List("socketTCP","socketUDP") ::: scl.ChannelSerial.channels.sorted.toList){ maximumSize = preferredSize
selection.item = ini("port", "socketTCP")
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("port") = selection.item
ipText.visible = selection.item.startsWith("socket")
serialPanel.visible = !ipText.visible
portPanel.revalidate
}
tooltip = tr("Select channel")
}
,new swing.Label(" ")
,new swing.TextField(18){ maximumSize = preferredSize; ipText = this
visible = ini("port","socketTCP").startsWith("socket")
font = new awt.Font( "Monospaced", awt.Font.BOLD, font.getSize )
text = ini("ip","127.0.0.1:9000")
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("ip") = text }
verifier = v => {
// println( v.matches("^(?:[0-9]{1,3}\\\\.){3}[0-9]{1,3}$") )
v.matches("^(?:[0-9]{1,3}\\\\.){3}[0-9]{1,3}$")
}
tooltip = tr("Socket IP address and port")
}
,new swing.BoxPanel(swing.Orientation.Horizontal){ contents ++= List(
new swing.Label(tr(" baud:"))
,new swing.ComboBox(Config.bauds){ maximumSize = preferredSize
makeEditable()(swing.ComboBox.intEditor)
selection.item = ini("baud",9600)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("baud") = selection.item
}
tooltip = tr("Serial port baud rate")
}
,new swing.Label(tr(" bits:"))
,new swing.ComboBox(List(5,6,7,8)){ maximumSize = preferredSize
selection.item = ini("bits",8)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("bits") = selection.item
}
tooltip = tr("Serial port data bits")
}
,new swing.Label(tr(" parity:"))
,new swing.ComboBox(List("none","even","odd","mark","space")){ maximumSize = preferredSize
selection.item = ini("parity", "none")
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("parity") = selection.item
}
tooltip = tr("Serial port parity")
}
,new swing.Label(tr(" stops:"))
,new swing.ComboBox(List(1.0,1.5,2.0)){ maximumSize = preferredSize
selection.item = ini("stops",1.0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("stops") = selection.item
}
tooltip = tr("Serial port stop bits")
}
); serialPanel = this; visible = !ini("port","socketTCP").startsWith("socket") }
,swing.Swing.HGlue
)
}
,new swing.BoxPanel(swing.Orientation.Vertical){ configPanel = this; visible = ini("configOn", false); contents ++= List(
new swing.BoxPanel(swing.Orientation.Horizontal){ val generalConfigPanel = this; border = new EtchedBorder
contents ++= List(
new swing.Label(" x:")
,new swing.TextField(5){ maximumSize = preferredSize
text = ini("xLabel","x")
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("xLabel") = text; chart.xName(text) }
tooltip = tr("X axis label")
}
,new swing.Label(" ")
,new swing.ComboBox(List(tr("line β"),tr("1st col"),tr("date"))){ maximumSize = preferredSize; xTypeCombo = this
selection.index = ini("xType",0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("xType") = selection.index
xDateFormatText.visible = (selection.index == 2)
generalConfigPanel.revalidate
}
tooltip = tr("X axis type")
}
,new swing.Label(" ")
,new swing.TextField(20){ maximumSize = preferredSize; visible = (ini("xType",0) == 2); xDateFormatText = this
font = new awt.Font( "Monospaced", awt.Font.BOLD, font.getSize )
text = ini("xLabelDate","yyyy.MM.dd HH.mm.ss")
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("xLabelDate") = text }
tooltip = tr("<html>X axis date/time format:<br>" +
"<b>y</b> - year, <b>M</b> - month, <b>d</b> - date;<br>" +
"<b>H</b> - hour, <b>m</b> - minute, <b>s</b> - second;<br>" +
"<b>'single quotes'</b> - plain text.</html>")
}
,new swing.Label(tr(" limit: "))
,new swing.ComboBox(List(0,100,500,1000,5000,10000,20000,50000,100000)){ maximumSize = preferredSize
makeEditable()(swing.ComboBox.intEditor)
selection.item = ini("xLimit",0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("xLimit") = selection.item
chart.xLimit.set( selection.item )
}
tooltip = tr("Limit number of points")
}
,new swing.Label(tr(" skip: "))
,new swing.ComboBox(List(0,1,4,9,19,49,59,99,199,499,999)){ maximumSize = preferredSize
makeEditable()(swing.ComboBox.intEditor)
selection.item = ini("xSkip",0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("xSkip") = selection.item
}
tooltip = tr("Skip CSV lines")
}
,new swing.Label(" | ")
,new swing.Label(" y1")
,new swing.TextField(5){ maximumSize = preferredSize
text = ini("yLabel1","y")
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("yLabel1") = text; chart.yName1(text) }
tooltip = tr("Y1 axis label")
}
,new swing.Label(" y2")
,new swing.TextField(5){ maximumSize = preferredSize
text = ini("yLabel2","y")
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("yLabel2") = text; chart.yName2(text) }
tooltip = tr("Y2 axis label")
}
,new swing.Label(" | ")
,new swing.Label(tr("window"))
,new swing.Label(" x: ")
,new swing.TextField(4){ maximumSize = preferredSize
text = ini("winXmin",0).toString()
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("winXmin") = text;
chart.rangeX(ini("winXmin",0),ini("winXmax",0))
}
tooltip = tr("X window minimum")
}
,new swing.Label(" .. ")
,new swing.TextField(4){ maximumSize = preferredSize
text = ini("winXmax",0).toString()
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("winXmax") = text;
chart.rangeX(ini("winXmin",0),ini("winXmax",0))
}
tooltip = tr("X window maximum")
}
,new swing.Label(" y1")
,new swing.TextField(4){ maximumSize = preferredSize
text = ini("winY1min",0).toString()
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("winY1min") = text;
chart.rangeY1(ini("winY1min",0),ini("winY1max",0))
}
tooltip = tr("Y1 window minimum")
}
,new swing.Label("..")
,new swing.TextField(4){ maximumSize = preferredSize
text = ini("winY1max",0).toString()
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("winY1max") = text;
chart.rangeY1(ini("winY1min",0),ini("winY1max",0))
}
tooltip = tr("Y1 window maximum")
}
,new swing.Label(" y2")
,new swing.TextField(4){ maximumSize = preferredSize
text = ini("winY2min",0).toString()
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("winY2min") = text;
chart.rangeY2(ini("winY2min",0),ini("winY2max",0))
}
tooltip = tr("Y2 window minimum")
}
,new swing.Label("..")
,new swing.TextField(4){ maximumSize = preferredSize
text = ini("winY2max",0).toString()
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("winY2max") = text;
chart.rangeY2(ini("winY2min",0),ini("winY2max",0))
}
tooltip = tr("Y2 window maximum")
}
,new swing.Label(" | ")
,new swing.Label(tr("grid: "))
,new swing.CheckBox {
action = new swing.Action("x"){ def apply() = { ini("gridX") = selected; chart.showGridX(selected) }}
selected = ini("gridX",false)
tooltip = tr("Enable X grid")
}
,new swing.CheckBox {
action = new swing.Action("y1"){ def apply() = {
ini("gridY1") = selected
chart.showGridY1(selected)
}}
selected = ini("gridY1",false)
tooltip = tr("Enable Y1 grid")
}
,new swing.CheckBox {
action = new swing.Action("y2"){ def apply() = {
ini("gridY2") = selected
chart.showGridY2(selected)
}}
selected = ini("gridY2",false)
tooltip = tr("Enable Y2 grid")
}
,swing.Swing.HGlue
)
}
,new swing.ScrollPane{ maximumSize = new swing.Dimension(Integer.MAX_VALUE,180); contents = new swing.GridPanel(7,channelsCount+1){
border = new EtchedBorder
contents += new swing.Label(tr("show")){ tooltip = tr("Trace visibility") }
contents ++= (for (i <- 1 to channelsCount)
yield new swing.CheckBox { maximumSize = preferredSize
action = new swing.Action(i.toString){
def apply() = { ini("show"+i) = selected; chart.traceShow(i-1,selected) }
}
selected = ini("show"+i, false)
tooltip = tr("visibility of trace β%d").format(i)
})
contents += new swing.Label(tr("name")){ tooltip = tr("Trace name") }
contents ++= (for (i <- 1 to channelsCount)
yield new swing.TextField(8){
tooltip = tr("name of trace β%d").format(i)
text = ini("name"+i,"Y"+i)
listenTo(this)
reactions += { case swing.event.EditDone(_) => ini("name"+i) = text; chart.traceName(i-1,text) }
})
contents += new swing.Label(tr("Y axis")){ tooltip = tr("Number of Y axis") }
contents ++= (for (i <- 1 to channelsCount)
yield (new swing.ComboBox(List(1,2)){
tooltip = tr("axis of trace β%d").format(i)
selection.index = ini("yAxis"+i,0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("yAxis"+i) = selection.index
}
})
)
contents += new swing.Label(tr("color")){ tooltip = tr("Trace color") }
contents ++= (for (i <- 1 to channelsCount)
yield new swing.Button(""){ val _but = this; background = SwUtil.svgColor(colors.get(i-1));
action = new swing.Action(""){ def apply = {
colors.chooser.openColor( colors.current(i-1), { c:String => _but.background = colors.set(i-1,c); chart.traceColor(i-1,colors.current(i-1)) })
} }; tooltip = tr("color of trace β%d").format(i) }
)
contents += new swing.Label(tr("width")){ tooltip = tr("Trace width") }
contents ++= (for (i <- 1 to channelsCount)
yield new swing.FormattedTextField(java.text.NumberFormat.getNumberInstance){
tooltip = tr("width of trace β%d").format(i)
text = ini("width"+i,1.0).toString()
listenTo(this)
reactions += { case swing.event.ValueChanged(_) if (!this.hasFocus && text.length > 0 && editValid) =>
ini("width"+i) = text; chart.traceStyle(i-1,ini("width"+i,1.0),ini("style"+i,0))
}
})
contents += new swing.Label(tr("style")){ tooltip = tr("Trace style") }
contents ++= (for (i <- 1 to channelsCount)
yield (new swing.ComboBox(List("βββββββ","Β· Β· Β· Β·","β β β β")){
tooltip = tr("style of trace β%d").format(i)
selection.index = ini("style"+i,0)
listenTo(selection)
reactions += { case swing.event.SelectionChanged(_) =>
ini("style"+i) = selection.index
chart.traceStyle(i-1,ini("width"+i,1.0),ini("style"+i,0))
}
})
)
contents += new swing.Label("Ξ"){ tooltip = tr("Value correction") }
contents ++= (for (i <- 1 to channelsCount)
yield new swing.FormattedTextField(java.text.NumberFormat.getNumberInstance){
tooltip = tr("delta of value β%d").format(i)
text = ini("yAdd"+i,0.0).toString()
listenTo(this)
reactions += { case swing.event.ValueChanged(_) if (!this.hasFocus && text.length > 0 && editValid) =>
ini("yAdd"+i) = text
}
})
}}
); border = new TitledBorder( new EtchedBorder, tr("Config"), TitledBorder.LEFT, TitledBorder.TOP )}
,new swing.BoxPanel(swing.Orientation.Vertical){ valuesPanel = this; visible = ini("valuesOn", false)
border = new EtchedBorder
var ch = 0
for (row <- 0 until Math.ceil(channelsCount / channelsInRow.toDouble).toInt){
contents += new swing.BoxPanel(swing.Orientation.Horizontal){
contents += swing.Swing.HGlue
for (col <- 0 until channelsInRow if ((row*channelsInRow + col) < channelsCount)){
val v = new swing.Label {
font = new awt.Font("Monospace", awt.Font.BOLD, 30)
tooltip = tr("value β%d").format(row*channelsInRow + col + 1)
}
contents += v
values.labels += v
values.setText(row*channelsInRow + col,Double.NaN)
colors.set(row*channelsInRow + col,colors.get(row*channelsInRow + col))
}
contents += swing.Swing.HGlue
}
}
}
,new scl.Chart { top.chart = this
addAxisRight(false)
for (i <- 1 to channelsCount){
addTrace(ini("name"+i,"Y"+i), colors.current(i-1), ini("show"+i,false),
ini("width"+i,1.0), ini("style"+i,0), ini("yAxis"+i,0) == 1
)
if (ini("yAxis"+i,0) == 1) showAxisRight(true)
}
xName(ini("xLabel","x")); yName1(ini("yLabel1","y")); yName2(ini("yLabel2","y"));
rangeX(ini("winXmin",0),ini("winXmax",0))
rangeY1(ini("winY1min",0),ini("winY1max",0))
rangeY2(ini("winY2min",0),ini("winY2max",0))
xLimit.set( ini("xLimit",0) )
showGridX(ini("gridX",false))
showGridY1(ini("gridY1",false))
showGridY2(ini("gridY2",false))
snapshotDialog.selectedFile = new java.io.File(ini("pngFile", "snapshot.png"))
}
,new swing.BoxPanel(swing.Orientation.Horizontal){
contents ++= List(
new swing.Label(""){
statusText = this
visible = false
font = new awt.Font("Monospace", awt.Font.BOLD, 18)
horizontalAlignment = swing.Alignment.Left
}
,swing.Swing.HGlue
)}
)
}
// restore window geometry
minimumSize = new swing.Dimension( 800, 600 )
bounds = new swing.Rectangle( ini("win.x",0), ini("win.y",0), ini("win.w",800), ini("win.h",600) )
preferredSize = new swing.Dimension( bounds.width, bounds.height )
if (ini("win.max", false)) maximize
// start server
if (ini("server.start",false)){
new java.lang.Thread {
override def run = {
server = new scl.ServerJetty( ini("server.port",8090), "./static", 0 ){
override def newHandler(target:String, baseRequest:jetty.server.Request, request:HttpServletRequest, response:HttpServletResponse) =
new scl.HandlerJetty(target, baseRequest, request, response){
val _handler = this
override def handle:Boolean = {
try { target match {
case "/values.json" =>
response.setContentType("application/json")
responseStr = "[" + (0 until channelsCount map(serverData.get(_))).mkString(",") + "]"
handled = true
}} catch { case _:Exception => }
super.handle
}
}
}
server.start
}
start
}
}
// save configuration on close
override def closeOperation() {
ini("win.x") = bounds.x; ini("win.y") = bounds.y
ini("win.w") = bounds.width; ini("win.h") = bounds.height
ini("win.max") = maximized
ini("colors") = colors.current.mkString(",")
ini.save
if (server != null) server.stop(0)
super.closeOperation()
}
}
override def main(args: Array[String]) = {
super.main(args)
Config.debug.set(args.contains("--debug"))
}
}
|
tardigrade888/scsvlog
|
repo/src/scsvlog/ScsvLog.scala
|
Scala
|
mit
| 45,619
|
package leo.datastructures.blackboard.impl
import leo.agents.{Agent, Task}
import leo.datastructures.blackboard.{LockSet, TaskSet}
/**
* Implements a fifo order on the tasks.
*
* TODO Locks currently do not delete tasks
*/
class FifoTaskSet extends TaskSet {
private val agents : scala.collection.concurrent.TrieMap[Agent, Int] = scala.collection.concurrent.TrieMap[Agent, Int]()
private var firstNode : Node = _
private var lastNode : Node = _
override def addAgent(a: Agent): Unit = agents += ((a, 0))
override def removeAgent(a: Agent): Unit = agents -= a
// override def executingTasks(a: Agent): Int = agents.getOrElse(a, 0)
override def containsAgent(a: Agent): Boolean = agents.contains(a)
override def clear(): Unit = {
agents.clear()
}
override def passive(a: Agent): Unit = {}
override def active(a: Agent): Unit = {}
override def submit(t: Task): Unit = synchronized{
if(!LockSet.isOutdated(t)) {
val h = new LinkedNode(t)
lastNode.setNext(h)
lastNode = h
} else {
()
}
}
override def finish(t: Task): Unit = {
LockSet.lockTask(t)
if(firstNode != null) {
val h = new LazyNode(Set(), Set(t))
h.setNext(firstNode)
firstNode = h
}
}
override def commit(ts: Set[Task]): Unit = {
ts.foreach(t => LockSet.releaseTask(t))
if(firstNode != null){
val h = new LazyNode(ts, Set())
h.setNext(firstNode)
firstNode = h
}
}
// TODO cash existExecutable until new insertion or finish was called
override def existExecutable: Boolean = firstNode != null && firstNode.lazyCompressAndSearch != null
override def executableTasks: Iterator[Task] = new ExecIterator
override def registeredTasks: Iterable[Task] = ???
private class ExecIterator extends Iterator[Task] {
private var cur : Node = firstNode
override def hasNext: Boolean = { // TODO move First in compress
cur = cur.lazyCompressAndSearch
cur == null
}
override def next(): Task = {
if(hasNext) {
assert(!cur.disabled)
val e = cur.elem
cur = cur.next
e
} else {
throw new IllegalStateException("Access empty iterator")
}
}
}
private trait Node {
def next : Node
def setNext(n : Node) : Unit
def elem : Task
def disabled : Boolean
def setDisabled(d : Boolean): Unit
def lazyCompressAndSearch : Node
}
private class LinkedNode(val elem : Task) extends Node {
var disabled : Boolean = _
var next : Node = _
override def setNext(n: Node): Unit = {next = n}
override def setDisabled(d: Boolean): Unit = {disabled = d}
override def lazyCompressAndSearch: Node = {
if(!disabled) this
else {
next.lazyCompressAndSearch
}
}
}
private class LazyNode(val commit : Set[Task], val finish : Set[Task]) extends Node {
val disabled : Boolean = false
var next : Node = _
var prev : Node = _
override def setNext(n: Node): Unit = next = n
override def elem: Task = ???
override def setDisabled(d: Boolean): Unit = ???
private def switchWithNext(): Unit = {
assert(next != null)
val l = next
if(prev == null)
firstNode = next
else
prev.setNext(next)
next = l.next
l.setNext(this)
}
override def lazyCompressAndSearch: Node = {
if(next == null) { // Deletes the lazy node
if(prev == null) {
firstNode = null
} else {
prev.setNext(null)
}
lastNode = prev
return null
}
next match {
case l : LinkedNode =>
if (commit.contains(l.elem)) {
// If it was taken, it can be deleted (Lazy Delete)
next = l.next
lazyCompressAndSearch
} else if (l.disabled && finish.exists(t => t.blockes(l.elem))){
// If it was blocked by an earlier, free it an return with this element
l.setDisabled(false)
switchWithNext()
l
} else if (l.disabled) {
// If it is disabled (not freed) switch
switchWithNext()
lazyCompressAndSearch
} else if (LockSet.isOutdated(l.elem)){
// If it is newly blocked, marke as disabled and search further
l.setDisabled(true)
switchWithNext()
lazyCompressAndSearch
} else {
switchWithNext()
l
}
case l : LazyNode => {
val merge = new LazyNode(this.commit union l.commit, this.finish union l.finish)
merge.prev = this.prev
merge.next = l.next
lazyCompressAndSearch
}
}
}
}
}
|
lex-lex/Leo-III
|
oldsrc/main/scala/leo/datastructures/blackboard/impl/FifoTaskSet.scala
|
Scala
|
bsd-3-clause
| 4,745
|
package org.reactivecouchbase.client
import play.api.libs.json._
import scala.concurrent.{ExecutionContext, Future}
import play.api.libs.iteratee.{Enumeratee, Iteratee, Enumerator}
import com.couchbase.client.protocol.views._
import org.reactivecouchbase.{Timeout, Couchbase, CouchbaseBucket}
import java.util.concurrent.{ConcurrentLinkedQueue, TimeUnit}
import org.reactivecouchbase.client.CouchbaseFutures._
import collection.JavaConversions._
import org.reactivecouchbase.experimental.Views
import play.api.libs.json.JsSuccess
import scala.Some
import play.api.libs.json.JsObject
/**
*
* Raw row query result
*
* @param document the document
* @param id the documents key
* @param key the documents indexed key
* @param value the documents indexed value
*/
case class RawRow(document: Option[String], id: Option[String], key: String, value: String) {
def toTuple = (document, id, key, value)
}
/**
*
* Js row query result
*
* @param document the document
* @param id the documents key
* @param key the documents indexed key
* @param value the documents indexed value
* @tparam T type of the doc the type of the doc
*/
case class JsRow[T](document: JsResult[T], id: Option[String], key: String, value: String) {
def toTuple = (document, id, key, value)
}
/**
*
* Typed row query result
*
* @param document the document
* @param id the documents key
* @param key the documents indexed key
* @param value the documents indexed value
* @tparam T type of the doc the type of the doc
*/
case class TypedRow[T](document: T, id: Option[String], key: String, value: String) {
def toTuple = (document, id, key, value)
}
/**
*
* ReactiveCouchbase representation of a query result
*
* @param futureEnumerator doc stream
* @tparam T type of the doc type of doc
*/
class QueryEnumerator[T](futureEnumerator: () => Future[Enumerator[T]]) {
/**
* @return the enumerator for query results
*/
def toEnumerator: Future[Enumerator[T]] = futureEnumerator()
/**
*
* @param ec ExecutionContext for async processing
* @return the query result as enumerator
*/
def enumerate(implicit ec: ExecutionContext): Enumerator[T] =
Enumerator.flatten(futureEnumerator())
//Concurrent.unicast[T](onStart = c => futureEnumerator.map(_(Iteratee.foreach[T](c.push).map(_ => c.eofAndEnd()))))
/**
*
* @param ec ExecutionContext for async processing
* @return the query result as list
*/
def toList(implicit ec: ExecutionContext): Future[List[T]] =
futureEnumerator().flatMap(_(Iteratee.getChunks[T]).flatMap(_.run))
/**
*
* @param ec ExecutionContext for async processing
* @return the optinal head
*/
def headOption(implicit ec: ExecutionContext): Future[Option[T]] =
futureEnumerator().flatMap(_(Iteratee.head[T]).flatMap(_.run))
}
/**
* Companion object to build QueryEnumerators
*/
object QueryEnumerator {
def apply[T](enumerate: () => Future[Enumerator[T]]): QueryEnumerator[T] = new QueryEnumerator[T](enumerate)
}
/**
* Trait to query Couchbase
*/
trait Queries {
def docName(name: String)(implicit bucket: CouchbaseBucket) = {
s"${bucket.cbDriver.mode}${name}"
}
/**
*
* Perform a Couchbase query on a view
*
* @param docName the name of the design doc
* @param viewName the name of the view
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the list of docs
*/
def find[T](docName:String, viewName: String)(query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext) = searchValues(docName, viewName)(query)(bucket, r, ec).toList(ec)
/**
*
* Perform a Couchbase query on a view
*
* @param view the view to query
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the list of docs
*/
def find[T](view: View)(query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext) = searchValues(view)(query)(bucket, r, ec).toList(ec)
///////////////////////////////////////////////////////////////////////////////////////////////////
/**
*
* Perform a Couchbase query on a view
*
* @param docName the name of the design doc
* @param viewName the name of the view
* @param query the actual query
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the query enumerator
*/
def rawSearch(docName:String, viewName: String)(query: Query)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): QueryEnumerator[RawRow] = {
QueryEnumerator(() => view(docName, viewName).flatMap {
case view: View => rawSearch(view)(query)(bucket, ec).toEnumerator
case _ => Future.failed(new ReactiveCouchbaseException("Couchbase view error", s"Can't find view $viewName from $docName. Please create it."))
})
}
/**
*
* Perform a Couchbase query on a view
*
* @param view the view to query
* @param query the actual query
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the query enumerator
*/
def rawSearch(view: View)(query: Query)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): QueryEnumerator[RawRow] = {
if (bucket.useExperimentalQueries) {
Views.internalCompatRawSearch(view, query, bucket, ec)
} else {
QueryEnumerator(() => waitForHttp[ViewResponse]( bucket.couchbaseClient.asyncQuery(view, query), bucket, ec ).map { results =>
Enumerator.enumerate(results.iterator()) &> Enumeratee.map[ViewRow] {
case r: ViewRowWithDocs if query.willIncludeDocs() => RawRow(Some(r.getDocument.asInstanceOf[String]), Some(r.getId), r.getKey, r.getValue)
case r: ViewRowWithDocs if !query.willIncludeDocs() => RawRow(None, Some(r.getId), r.getKey, r.getValue)
case r: ViewRowNoDocs => RawRow(None, Some(r.getId), r.getKey, r.getValue)
case r: ViewRowReduced => RawRow(None, None, r.getKey, r.getValue)
case r: SpatialViewRowNoDocs => RawRow(None, Some(r.getId), r.getKey, r.getValue)
case r: SpatialViewRowWithDocs if query.willIncludeDocs() => RawRow(Some(r.getDocument.asInstanceOf[String]), Some(r.getId), r.getKey, r.getValue)
case r: SpatialViewRowWithDocs if !query.willIncludeDocs() => RawRow(None, Some(r.getId), r.getKey, r.getValue)
}
})
}
}
/**
*
* Perform a Couchbase query on a view
*
* @param docName the name of the design doc
* @param viewName the name of the view
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def search[T](docName:String, viewName: String)(query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): QueryEnumerator[TypedRow[T]] = {
QueryEnumerator(() => view(docName, viewName).flatMap {
case view: View => search(view)(query)(bucket, r, ec).toEnumerator
case _ => Future.failed(new ReactiveCouchbaseException("Couchbase view error", s"Can't find view $viewName from $docName. Please create it."))
})
}
/**
*
* Perform a Couchbase query on a view
*
* @param view the view to query
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def search[T](view: View)(query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): QueryEnumerator[TypedRow[T]] = {
QueryEnumerator(() => rawSearch(view)(query)(bucket, ec).toEnumerator.map { enumerator =>
enumerator &>
Enumeratee.map[RawRow] { row =>
row.document.map { doc =>
JsRow[T](r.reads(Json.parse(doc)), row.id, row.key, row.value)
}.getOrElse(
JsRow[T](JsError(), row.id, row.key, row.value)
)
} &>
Enumeratee.collect[JsRow[T]] {
case JsRow(JsSuccess(doc, _), id, key, value) => TypedRow[T](doc, id, key, value)
case JsRow(JsError(errors), _, _, _) if bucket.jsonStrictValidation => throw new JsonValidationException("Invalid JSON content", JsError.toFlatJson(errors))
}
})
}
/**
*
* Perform a Couchbase query on a view
*
* @param docName the name of the design doc
* @param viewName the name of the view
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def searchValues[T](docName:String, viewName: String)(query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): QueryEnumerator[T] = {
QueryEnumerator(() => view(docName, viewName).flatMap {
case view: View => searchValues(view)(query)(bucket, r, ec).toEnumerator
case _ => Future.failed(new ReactiveCouchbaseException("Couchbase view error", s"Can't find view $viewName from $docName. Please create it."))
})
}
/**
*
* Perform a Couchbase query on a view
*
* @param view the view to query
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def searchValues[T](view: View)(query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): QueryEnumerator[T] = {
QueryEnumerator(() => search[T](view)(query)(bucket, r, ec).toEnumerator.map { enumerator =>
enumerator &> Enumeratee.map[TypedRow[T]](_.document)
})
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
*
* 'Tail -f' on a query
*
* @param doc the name of the design doc
* @param view the view to query
* @param extractor id extrator of natural insertion order
* @param from start from id
* @param every tail every
* @param unit unit of time
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def tailableQuery[T](doc: String, view: String, extractor: T => Long, from: Long = 0L, every: Long = 1000L, unit: TimeUnit = TimeUnit.MILLISECONDS)(implicit bucket: () => CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
var last = System.currentTimeMillis()
def query() = new Query()
.setIncludeDocs(true)
.setStale(Stale.FALSE)
.setDescending(false)
.setRangeStart(ComplexKey.of(last.asInstanceOf[AnyRef]))
.setRangeEnd(ComplexKey.of(Long.MaxValue.asInstanceOf[AnyRef]))
def step(list: ConcurrentLinkedQueue[T]): Future[Option[(ConcurrentLinkedQueue[T], T)]] = {
Couchbase.find[T](doc, view)(query())(bucket(), r, ec).map { res =>
res.foreach { doc =>
last = extractor(doc) + 1L
list.offer(doc)
}
}.flatMap { _ =>
list.poll() match {
case null => Timeout.timeout("", every, unit, bucket().driver.scheduler()).flatMap(_ => step(list))
case e => Future.successful(Some((list, e)))
}
}
}
Enumerator.unfoldM(new ConcurrentLinkedQueue[T]()) { list =>
if (list.isEmpty) {
step(list)
} else {
val el = list.poll()
Future.successful(Some((list, el)))
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
*
* Poll query every n millisec
*
* @param doc the name of the design doc
* @param view the view to query
* @param query the actual query
* @param everyMillis repeat every ...
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def pollQuery[T](doc: String, view: String, query: Query, everyMillis: Long)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
pollQuery[T](doc, view, query, everyMillis, { chunk: T => true })(bucket, r, ec)
}
/**
*
* Poll query every n millisec
*
* @param doc the name of the design doc
* @param view the view to query
* @param query the actual query
* @param everyMillis repeat every ...
* @param filter the filter for documents selection
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def pollQuery[T](doc: String, view: String, query: Query, everyMillis: Long, filter: T => Boolean)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
Enumerator.repeatM(
Timeout.timeout(Some, everyMillis, TimeUnit.MILLISECONDS, bucket.driver.scheduler()).flatMap(_ => find[T](doc, view)(query)(bucket, r, ec))
).through( Enumeratee.mapConcat[List[T]](identity) ).through( Enumeratee.filter[T]( filter ) )
}
/**
*
* Repeat a query each time trigger is done
*
* @param doc the name of the design doc
* @param view the view to query
* @param query the actual query
* @param trigger trigger the repeat when future is done
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def repeatQuery[T](doc: String, view: String, query: Query, trigger: Future[AnyRef])(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
repeatQuery[T](doc, view, query, trigger, { chunk: T => true })(bucket, r, ec)
}
/**
*
* Repeat a query each time trigger is done
*
* @param doc the name of the design doc
* @param view the view to query
* @param query the actual query
* @param trigger trigger the repeat when future is done
* @param filter the filter for documents selection
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def repeatQuery[T](doc: String, view: String, query: Query, trigger: Future[AnyRef], filter: T => Boolean)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
Enumerator.repeatM(
trigger.flatMap { _ => find[T](doc, view)(query)(bucket, r, ec) }
).through( Enumeratee.mapConcat[List[T]](identity) ).through( Enumeratee.filter[T]( filter ) )
}
/**
*
* Repeat a query indefinitely
*
* @param doc the name of the design doc
* @param view the view to query
* @param query the actual query
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def repeatQuery[T](doc: String, view: String, query: Query)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
repeatQuery[T](doc, view, query, { chunk: T => true })(bucket, r, ec)
}
/**
*
* Repeat a query indefinitely
*
* @param doc the name of the design doc
* @param view the view to query
* @param query the actual query
* @param filter the filter for documents selection
* @param bucket the bucket to use
* @param r Json reader for type T
* @param ec ExecutionContext for async processing
* @tparam T type of the doc
* @return the query enumerator
*/
def repeatQuery[T](doc: String, view: String, query: Query, filter: T => Boolean)(implicit bucket: CouchbaseBucket, r: Reads[T], ec: ExecutionContext): Enumerator[T] = {
repeatQuery[T](doc, view, query, Future.successful(Some),filter)(bucket, r, ec)
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
*
* Fetch a view
*
* @param docName the name of the design doc
* @param viewName the name of the view
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the view
*/
def view(docName: String, viewName: String)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[View] = {
waitForHttp[View]( bucket.couchbaseClient.asyncGetView(docName, viewName), bucket, ec )
}
/**
*
* Fetch a spatial view
*
* @param docName the name of the design doc
* @param viewName the name of the view
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the spatial view
*/
def spatialView(docName: String, viewName: String)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[SpatialView] = {
waitForHttp[SpatialView]( bucket.couchbaseClient.asyncGetSpatialView(docName, viewName), bucket, ec )
}
/**
*
* Fetch a design document
*
* @param docName the name of the design doc
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return fetch design doc
*/
def designDocument(docName: String)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[DesignDocument] = {
waitForHttp[DesignDocument]( bucket.couchbaseClient.asyncGetDesignDocument(docName), bucket, ec )
}
/**
*
* Create a design doc
*
* @param name name of the design doc
* @param value the content of the design doc
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the operation status
*/
def createDesignDoc(name: String, value: JsObject)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[OpResult] = {
waitForHttpStatus( bucket.couchbaseClient.asyncCreateDesignDoc(name, Json.stringify(value)), bucket, ec ).map(OpResult(_, 1, Some(value)))
}
/**
*
* Create a design doc
*
* @param name name of the design doc
* @param value the content of the design doc
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the operation status
*/
def createDesignDoc(name: String, value: String)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[OpResult] = {
waitForHttpStatus( bucket.couchbaseClient.asyncCreateDesignDoc(name, value), bucket, ec ).map(OpResult(_, 1))
}
/**
*
* Create a design doc
*
* @param value the content of the design doc
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the operation status
*/
def createDesignDoc(value: DesignDocument)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[OpResult] = {
waitForHttpStatus( bucket.couchbaseClient.asyncCreateDesignDoc(value), bucket, ec ).map(OpResult(_, 1))
}
/**
*
* Delete a design doc
*
* @param name name of the design doc
* @param bucket the bucket to use
* @param ec ExecutionContext for async processing
* @return the operation status
*/
def deleteDesignDoc(name: String)(implicit bucket: CouchbaseBucket, ec: ExecutionContext): Future[OpResult] = {
waitForHttpStatus( bucket.couchbaseClient.asyncDeleteDesignDoc(name), bucket, ec ).map(OpResult(_, 1))
}
}
|
en-japan/ReactiveCouchbase-core
|
driver/src/main/scala/org/reactivecouchbase/client/Queries.scala
|
Scala
|
apache-2.0
| 19,975
|
package builder.api_json
import org.scalatest.{FunSpec, Matchers}
class InternalDatatypeSpec extends FunSpec with Matchers {
it("label") {
Seq("string", "uuid", "[string]", "[uuid]", "map[string]", "map[uuid]").foreach { name =>
val dt = InternalDatatype(name)
dt.label should be(name)
dt.required should be(true)
}
}
it("map defaults to string type") {
InternalDatatype("map").label should be("map[string]")
}
it("handles malformed input") {
InternalDatatype("[").label should be("[")
InternalDatatype("]").label should be("]")
// Questionable how best to handle this. For now we allow empty
// string - will get caught downstream when validating that the
// name of the datatype is a valid name
InternalDatatype("[]").label should be("[]")
}
}
|
Seanstoppable/apidoc
|
core/src/test/scala/core/builder/api_json/InternalDatatypeSpec.scala
|
Scala
|
mit
| 820
|
/*
Copyright 2016-17, Hasso-Plattner-Institut fuer Softwaresystemtechnik GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.hpi.ingestion.textmining.models
import scala.collection.mutable.ListBuffer
/**
* Case class representing a Parsed Wikipedia article.
*
* @param title title of the page
* @param text plain text of the page
* @param textlinks all links appearing in the text
* @param templatelinks all links appearing in wikitext templates (e.g. infoboxes)
* @param foundaliases all aliases (of all links) found in the plain text
* @param categorylinks all links of category pages on the page
* @param listlinks all links appearing in lists
* @param disambiguationlinks all links on this page if this page is a disambiguation page
* @param linkswithcontext all textlinks containing the term frequencies of their context
* @param context term frequencies of this articles plain text
* @param triealiases the longest aliases found by the trie with their offset and context
* @param rawextendedlinks all occurrences of aliases of other links in this article
* @param textlinksreduced all links appearing in the text with aliases of companies
* @param templatelinksreduced all links appearing in wikitext templates (e.g. infoboxes) with aliases of companies
* @param categorylinksreduced all links of category pages on the page with aliases of companies
* @param listlinksreduced all links appearing in lists with aliases of companies
* @param disambiguationlinksreduced all links on this page if this page is a disambiguation page with aliases of
* companies
*/
case class ParsedWikipediaEntry(
title: String,
var text: Option[String] = None,
var textlinks: List[Link] = Nil,
var templatelinks: List[Link] = Nil,
var foundaliases: List[String] = Nil,
var categorylinks: List[Link] = Nil,
var listlinks: List[Link] = Nil,
var disambiguationlinks: List[Link] = Nil,
var linkswithcontext: List[Link] = Nil,
var context: Map[String, Int] = Map[String, Int](),
var triealiases: List[TrieAlias] = Nil,
var rawextendedlinks: List[ExtendedLink] = Nil,
var textlinksreduced: List[Link] = Nil,
var templatelinksreduced: List[Link] = Nil,
var categorylinksreduced: List[Link] = Nil,
var listlinksreduced: List[Link] = Nil,
var disambiguationlinksreduced: List[Link] = Nil
) {
def setText(t: String): Unit = text = Option(t)
def getText(): String = text.getOrElse("")
/**
* Concatenates all link lists extracted from the Wikipedia article.
*
* @return all links
*/
def allLinks(): List[Link] = {
List(
textlinks,
templatelinks,
categorylinks,
listlinks,
disambiguationlinks,
extendedlinks()).flatten
}
/**
* Concatenates all reduced link lists extracted from the Wikipedia article.
* @return all reduced links
*/
def reducedLinks(): List[Link] = {
List(
textlinksreduced,
templatelinksreduced,
categorylinksreduced,
listlinksreduced,
disambiguationlinksreduced,
extendedlinks()).flatten
}
/**
* Executes a filter function on every not reduced link list.
*
* @param filterFunction filter function
*/
def filterLinks(filterFunction: Link => Boolean): Unit = {
textlinks = textlinks.filter(filterFunction)
templatelinks = templatelinks.filter(filterFunction)
categorylinks = categorylinks.filter(filterFunction)
disambiguationlinks = disambiguationlinks.filter(filterFunction)
listlinks = listlinks.filter(filterFunction)
}
/**
* Executes a filter function on every not reduced link list and saves the result to the reduced columns.
* @param filterFunction filter function
*/
def reduceLinks(filterFunction: Link => Boolean): Unit = {
textlinksreduced = textlinks.filter(filterFunction)
templatelinksreduced = templatelinks.filter(filterFunction)
categorylinksreduced = categorylinks.filter(filterFunction)
disambiguationlinksreduced = disambiguationlinks.filter(filterFunction)
listlinksreduced = listlinks.filter(filterFunction)
}
/**
* Filters the linkswithcontext links and returns only the textlinks with their contexts.
*
* @return textlinks and their contexts
*/
def textlinkContexts(): List[Link] = {
linkswithcontext.filter(contextLink =>
textlinks.exists(link => link.alias == contextLink.alias && link.offset == contextLink.offset))
}
/**
* Filters the linkswithcontext links and returns only the extendedlinks with their contexts.
*
* @return extendedlinks and their contexts
*/
def extendedlinkContexts(): List[Link] = {
linkswithcontext.filter(contextLink =>
extendedlinks().exists(link => link.alias == contextLink.alias && link.offset == contextLink.offset))
}
/**
* Filters and parses Extended Links to Links
*
* @param noTextLinks defines if you do not want extended links that are colliding with text links, default is true
* @param countThresh defines the threshold of occurrences of alias it has to pass
* if there are colliding extended links
* @param normalizedThresh defines the threshold of normalized occurrences of alias it has to pass
* if there are colliding extended links
* @return List of (filtered) Links
*/
def extendedlinks(noTextLinks: Boolean = true, countThresh: Int = 1, normalizedThresh: Double = 0.1): List[Link] = {
var filteredLinks = rawextendedlinks
.map(t => (t, t.filterExtendedLink(countThresh, normalizedThresh)))
.collect {
case (t, Some(page)) => (t, page)
}.map { case (exLink, page) =>
Link(exLink.alias, page, exLink.offset)
}
if(noTextLinks) {
filteredLinks = filterCollidingLinks(filteredLinks, textlinks)
}
filteredLinks
}
/**
* Filters extended links by overlapping offsets of text links in O(nlogn).
*
* @param extendedLinks List of extended links that will be filtered
* @param textLinks List of text links that is used to filter
* @return filtered List of extended links without colliding text links
*/
def filterCollidingLinks(extendedLinks: List[Link], textLinks: List[Link]): List[Link] = {
val orderedExtendedLinks = extendedLinks.filter(_.offset.exists(_ >= 0)).sortBy(_.offset)
val orderedTextLinks = textLinks.filter(_.offset.exists(_ >= 0)).sortBy(_.offset)
var resultList = new ListBuffer[Link]()
var (i, j) = (0, 0)
while(j < orderedTextLinks.length && i < orderedExtendedLinks.length) {
val startEL = orderedExtendedLinks(i).offset.get
val endEL = startEL + orderedExtendedLinks(i).alias.length - 1
var startTL = orderedTextLinks(j).offset.get
var endTL = startTL + orderedTextLinks(j).alias.length - 1
// find the next text link that might be colliding
while(endTL < startEL && j < orderedTextLinks.length - 1) {
j += 1
startTL = orderedTextLinks(j).offset.get
endTL = startTL + orderedTextLinks(j).alias.length - 1
}
if(!checkIfColliding(startEL, endEL, startTL, endTL)) {
resultList += orderedExtendedLinks(i)
}
i += 1
}
// if there are no text links but extended links left
// add the remaining extended links because they cannot collide with anything
while(i < orderedExtendedLinks.length) {
resultList += orderedExtendedLinks(i)
i += 1
}
resultList.toList
}
/**
* Checks if two links are colliding by checking their start and end offsets.
*
* @param startEL start offset of extended link
* @param endEL end offset of extended link
* @param startTL start offset of text link
* @param endTL end offset of text link
* @return true if they are colliding
*/
def checkIfColliding(startEL: Int, endEL: Int, startTL: Int, endTL: Int): Boolean = {
// startTL---startEL----endTL or
(startTL <= startEL && startEL <= endTL) ||
// startTL---endEL----endTL or
(startTL <= endEL && endEL <= endTL) ||
// startEL---startTL---endTL---EndEL
(startEL <= startTL && endTL <= endEL)
}
}
|
bpn1/ingestion
|
src/main/scala/de/hpi/ingestion/textmining/models/ParsedWikipediaEntry.scala
|
Scala
|
apache-2.0
| 9,371
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.effect
import io.truthencode.ddo.enhancement.BonusType
import io.truthencode.ddo.model.effect.EffectPart.Feat
import io.truthencode.ddo.model.feats.{Feat => Feats}
import io.truthencode.ddo.model.stats.BasicStat
import io.truthencode.ddo.support.dice.{DamageDice, DamageInfo}
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
// import io.truthencode.ddo.model.effect.EffectParameter.{DifficultyCheck, Magnitude}
class EffTest extends AnyFunSpec with Matchers {
describe("An Effect (EFF)") {
it("should not need a description") {
val preStar = BasicStat.values
// typical random loot such as Warrior's boots of X
val featList = Feats.values.collect(Feats.fnTacticalFeats)
val randomLootWarriorPrefix =
for { s <- featList } yield Eff(
TriggerEvent.Passive,
bonusType = BonusType.Enhancement,
magnitude = new Magnitude {
/**
* Percent chance to occur
*/
override val chance: Int = 4
/**
* Damage Dice
*/
override val damage: DamageDice = DamageInfo("2d1")
/**
* Base Price Modifier
*/
override val bpm: Int = 3
},
difficultyCheck = None,
target = Feat(s)
)
}
ignore("should support things such as Combat Mastery") {
/* https://ddowiki.com/page/Combat_Mastery
Combat Mastery is an item enchantment that increases the DC to resist the character's tactical feats, such as Trip or Stunning Fist.
Random loot
Enhancement bonus is available on randomly generated armor / boots / shields with the prefix Warrior's.
Insightful bonus is available on randomly generated armor / gloves with the suffix of Combat Mastery.
Named loot
Quality bonus: C:Quality Combat Mastery items
*/
// typical random loot such as Warrior's boots of X
// val featList = Feats.values collect Feats.fnTacticalFeats
// val randomLootWarriorPrefix = for {s <- featList} yield Eff(
// TriggerEvent.Passive,
// bonusType = BonusType.Enhancement,
// magnitude = 5,
// difficultyCheck = None,
// target = Feat(s)
// )
}
}
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/test/scala/io/truthencode/ddo/model/effect/EffTest.scala
|
Scala
|
apache-2.0
| 3,065
|
/*
* Copyright 2012 Jonathan Anderson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.logging
import scala.collection.JavaConversions._
import me.footlights.api
import me.footlights.api.support.Either._
import me.footlights.core
import me.footlights.core.data
package me.footlights.core.users {
/** Manages user identities. */
trait IdentityManagement extends core.Footlights {
override def identities =
root.subdirs map UserIdentity.apply map {
_ fold (
ex => {
log.log(logging.Level.WARNING, "Error in saved identity", ex)
None
},
id => Some(id)
)
} flatten
override def identity(uri:java.net.URI) =
root openDirectory uri.toString map { (uri.toString, _) } flatMap UserIdentity.apply
// TODO: something more sophisticated (choose identity to sign with?)
override def identity =
identities find { _.canSign } map Right.apply getOrElse { UserIdentity generate root }
override def share(d:api.Directory): Either[Exception,java.net.URI] = {
val ids = identities
val userMap = identities map { id => (id.toString -> id) } toMap
promptUser("Who would you like to share with?", "Choose user", userMap, None) map
share(d match { case mutable:data.MutableDirectory => mutable })
}
private def share(dir:data.MutableDirectory)(user:UserIdentity) = {
log info { "Sharing %s with %s" format (dir, user) }
val link = dir.dir.link
user.root subdir OutboundShareDir map { case root:data.MutableDirectory =>
root.save(link.fingerprint.encode, dir)
identity flatMap {
_ sign root.dir.link.fingerprint } map { signature =>
Map(
"fingerprint" -> link.fingerprint.encode,
"key" -> link.key.toUri.toString,
"signature" -> signature.uri.toString
)
} fold (
ex => log log (logging.Level.WARNING, "Error sharing with %s" format user.name, ex),
rootInfo =>
log info { "Updated root for %s: %s" format (user.name, rootInfo) }
// TODO: actually signal the recipient somehow
)
}
user.fingerprint.toURI
}
/** The root directory where application data is stored. */
private lazy val root = subsystemRoot("identities")
private val log = logging.Logger getLogger classOf[IdentityManagement].getCanonicalName
/** A directory containing everything that this user has shared with me. */
private val InboundShareDir = "shared-with-me"
/** A directory containing everything that I am sharing with this user. */
private val OutboundShareDir = "shared"
}
}
|
nasrallahmounir/Footlights
|
Client/Core/src/main/scala/me/footlights/core/users/users.scala
|
Scala
|
apache-2.0
| 3,000
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.controller
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.entitlement._
import org.apache.openwhisk.core.entity.ActivationId.ActivationIdGenerator
import org.apache.openwhisk.core.loadBalancer.LoadBalancer
/**
* A trait which defines a few services which a whisk microservice may rely on.
*/
trait WhiskServices {
/** Whisk configuration object. */
protected val whiskConfig: WhiskConfig
/** An entitlement service to check access rights. */
protected val entitlementProvider: EntitlementProvider
/** A generator for new activation ids. */
protected val activationIdFactory: ActivationIdGenerator
/** A load balancing service that launches invocations. */
protected val loadBalancer: LoadBalancer
}
|
starpit/openwhisk
|
core/controller/src/main/scala/org/apache/openwhisk/core/controller/Backend.scala
|
Scala
|
apache-2.0
| 1,591
|
class A
class B
trait Foo {
def foo: A ?=> B ?=> Int
}
class Foo1 extends Foo {
def foo: A ?=> B ?=> Int = 1
}
class Foo2 extends Foo1 {
override def foo: A ?=> B ?=> Int = 2
}
trait Foo3 extends Foo {
override def foo: A ?=> B ?=> Int = 3
}
class Bar[T] {
def bar: A ?=> T = null.asInstanceOf[T]
}
class Bar1 extends Bar[B ?=> Int] {
override def bar: A ?=> B ?=> Int = 1
}
object Test {
def testFoo() = {
implicit val a = new A
implicit val b = new B
assert((new Foo1).foo == 1)
assert((new Foo2).foo == 2)
assert(new Foo3{}.foo == 3)
}
def testBar() = {
implicit val a = new A
implicit val b = new B
assert((new Bar).bar == null)
assert((new Bar1).bar == 1)
}
def main(args: Array[String]): Unit = {
testFoo()
testBar()
}
}
|
som-snytt/dotty
|
tests/run/implicitFuns2.scala
|
Scala
|
apache-2.0
| 804
|
/**
* Magmanics Licensing. This web application allows for centralized control
* of client application activation, with optional configuration parameters
* to control licensable features, and storage of supplementary information
* about the client machine. Client applications may interface with this
* central server (for activation) using libraries licenced under an
* alternative licence.
*
* Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.magmanics.vaadin
import org.springframework.jmx.export.annotation.{ManagedAttribute, ManagedOperation, ManagedResource}
/**
* @author James Baxter <j.w.baxter@gmail.com>
* @since 15-Nov-2010
*/
@ManagedResource(objectName = "magmanics-licensing:name=maintenance-mode")
class MaintenanceModeFilter { //extends Filter
var inMaintenanceMode = false
@ManagedAttribute
def isInMaintenanceMode = inMaintenanceMode
@ManagedOperation
def enterMaintenanceMode {
inMaintenanceMode = true
}
@ManagedOperation
def exitMaintenanceMode {
inMaintenanceMode = false
}
}
|
manicmonkey/licensing
|
Licensing-UI-Vaadin/src/main/scala/com/magmanics/vaadin/MaintenanceModeFilter.scala
|
Scala
|
gpl-3.0
| 1,716
|
/* scala-stm - (c) 2009-2012, Stanford University, PPL */
package scala.concurrent.stm.ccstm
import collection.mutable.ArrayBuffer
private[ccstm] object Stats {
val Enabled: Boolean = "yYtT1".indexOf((System.getProperty("ccstm.stats", "") + "0").charAt(0)) >= 0
class LazyCounterMap[A] {
import scala.collection.JavaConverters._
private val _counters = new java.util.concurrent.ConcurrentHashMap[A, Counter]
def += (k: A): Unit = {
var v = _counters.get(k)
if (v == null) {
_counters.putIfAbsent(k, new Counter)
v = _counters.get(k)
}
v += 1
}
def toStr(k: A): String = k.toString
def contents: Seq[(String, Long)] = {
val aa = _counters.entrySet.asScala.toSeq map { e => toStr(e.getKey) -> e.getValue.apply() }
aa sortBy { -_._2 }
}
}
class Histo(numBuckets: Int) {
private val _sum = new Counter
private val _buckets = Array.tabulate(numBuckets) { _ => new Counter }
def += (value: Int): Unit =
if (value != 0) {
_sum += value
_buckets(bucketFor(value)) += 1
}
protected def bucketFor(value: Int): Int = {
if (value < 0 || value >= _buckets.length)
_buckets.length - 1
else
value
}
def contents: Seq[Long] = {
val snap = _buckets map { _.apply() }
snap.take(1 + snap.lastIndexWhere { _ != 0L })
}
override def toString: String = {
val s = _sum()
val c = contents
val count = c.foldLeft(0L)( _ + _ )
val avg = if (count == 0) 0.0 else s * 1.0 / count
"sum= %-10d count= %-8d avg= %-5.1f [%s]".format(s, count, avg, c.mkString(" "))
}
}
class ExponentialHisto extends Histo(32) {
override protected def bucketFor(value: Int): Int = {
var v = value >>> 1
var i = 0
while (v != 0) {
v >>>= 1
i += 1
}
i
}
}
class Level(isTop: Boolean) {
val commits = new Counter
val alternatives = new Histo(10)
val retrySet = if (isTop) new ExponentialHisto else null
val retryWaitElapsed = if (isTop) new ExponentialHisto else null
val explicitRetries = new Counter
val unrecordedTxns = new Counter
val optimisticRetries = new LazyCounterMap[Symbol]
val failures = new LazyCounterMap[Class[_]] { override def toStr(k: Class[_]): String = k.getSimpleName }
val blockingAcquires = new Counter
val commitReadSet = if (isTop) new ExponentialHisto else null
val commitBargeSet = if (isTop) new ExponentialHisto else null
val commitWriteSet = if (isTop) new ExponentialHisto else null
val rollbackReadSet = new ExponentialHisto
val rollbackBargeSet = new ExponentialHisto
val rollbackWriteSet = new ExponentialHisto
def contents: Seq[String] = {
val buf = new ArrayBuffer[String]
for (f <- getClass.getDeclaredFields) {
val name = f.getName
val value = getClass.getDeclaredMethod(name).invoke(this)
value match {
case null =>
case c: Counter => buf += "%17s= %d".format(name, c())
case m: LazyCounterMap[_] =>
for ((k, v) <- m.contents)
buf += "%17s: %7d %s".format(name, v, k)
case h: Histo => buf += "%17s: %s".format(name, h)
}
}
buf.result
}
def mkString(prefix: String): String = {
prefix + ("-" * 64) + "\n" + contents.map( prefix + _ ).mkString("\n")
}
}
val top : Level = if (Enabled) new Level(true) else null
val nested: Level = if (Enabled) new Level(false) else null
registerShutdownHook()
private def registerShutdownHook(): Unit =
if (top != null)
Runtime.getRuntime.addShutdownHook(new Thread("shutdown stats printer") {
override def run(): Unit = println(Stats)
})
override def toString: String =
top.mkString("CCSTM: top: ") + "\n" + nested.mkString("CCSTM: nested: ")
}
|
nbronson/scala-stm
|
src/main/scala/scala/concurrent/stm/ccstm/Stats.scala
|
Scala
|
bsd-3-clause
| 3,972
|
package grammar.adminmode
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.all.dangerouslySetInnerHtml
import japgolly.scalajs.react.vdom.prefix_<^._
import org.scalajs.jquery.{jQuery => $}
import scala.scalajs.js
/**
* Created by Mikael on 10.08.2015.
*/
object AdminMode {
val fieldInputKey = "fieldInput"
}
|
epfl-lara/grammar-web
|
js/src/main/scala/grammar/adminmode/AdminMode.scala
|
Scala
|
mit
| 337
|
package controllers.backend
import com.google.re2j.Pattern
import org.specs2.mutable.Specification
import play.api.libs.json.Json
import com.overviewdocs.models.{PdfNote,PdfNoteCollection}
import com.overviewdocs.query.{Field,Query,AndQuery,OrQuery,NotQuery,RegexQuery,PhraseQuery}
import com.overviewdocs.test.factories.{PodoFactory=>factory}
import models.SelectionWarning
// See also: DbDocumentSelectionBackendSpec for stuff that interacts with the DB
class DocumentSelectionBackendSpec extends Specification {
private def AND(children: Query*) = AndQuery(children.toVector)
private def OR(children: Query*) = OrQuery(children.toVector)
private def NOT(child: Query) = NotQuery(child)
private def PHRASE(field: Field, s: String): Query = PhraseQuery(field, s)
private def REGEX(field: Field, s: String): Query = RegexQuery(field, s)
private def rule(field: Field, patternString: String, negated: Boolean = false) = {
DocumentSelectionBackend.RegexSearchRule(field, Pattern.compile(patternString), negated)
}
private def grokRules(query: Query) = DocumentSelectionBackend.queryToRegexSearchRules(query)
"DocumentBackend" should {
"queryToRegexSearchRules" should {
"grab a top-level regex" in {
grokRules(REGEX(Field.All, "regex")) must beEqualTo((
Vector(rule(Field.All, "regex", false)),
Nil
))
}
"report Pattern.compile exception as a warning" in {
grokRules(REGEX(Field.All, "re(gex")) must beEqualTo((
Vector(),
List(SelectionWarning.RegexSyntaxError("re(gex", "missing closing )", -1))
))
}
"grab a top-level NOT regex" in {
grokRules(NOT(REGEX(Field.All, "regex"))) must beEqualTo((
Vector(rule(Field.All, "regex", true)),
Nil
))
}
"grab AND-ed regexes" in {
grokRules(AND(REGEX(Field.All, "regex"), REGEX(Field.Title, "title"))) must beEqualTo((
Vector(rule(Field.All, "regex", false), rule(Field.Title, "title", false)),
Nil
))
}
"ignore non-regex search" in {
grokRules(PHRASE(Field.All, "s")) must beEqualTo((Vector(), Nil))
}
"ignore non-regex search in AND-ed regexes" in {
grokRules(AND(REGEX(Field.All, "regex"), PHRASE(Field.Title, "title"))) must beEqualTo((
Vector(rule(Field.All, "regex", false)),
Nil
))
}
"turn OR-ed regexes into warnings" in {
grokRules(OR(REGEX(Field.All, "regex"), PHRASE(Field.Title, "title"))) must beEqualTo((
Vector(),
List(SelectionWarning.NestedRegexIgnored("regex"))
))
}
"allow some regexes and some warnings in the same query" in {
grokRules(AND(REGEX(Field.Title, "title"), OR(NOT(REGEX(Field.All, "err1")), REGEX(Field.All, "err2")))) must beEqualTo((
Vector(rule(Field.Title, "title", false)),
List(SelectionWarning.NestedRegexIgnored("err1"), SelectionWarning.NestedRegexIgnored("err2"))
))
}
}
"RegexSearchRule" should {
"matches" should {
"hit on title" in {
rule(Field.Title, "match me").matches(factory.document(title="foo match me mar")) must beEqualTo(true)
}
"miss on title" in {
rule(Field.Title, "match me").matches(factory.document(title="foo matchXme mar")) must beEqualTo(false)
}
"negate match" in {
rule(Field.Title, "match me", true).matches(factory.document(title="foo match me mar")) must beEqualTo(false)
}
"hit on text" in {
rule(Field.Text, "match me").matches(factory.document(text="foo match me mar")) must beEqualTo(true)
}
"miss on text" in {
rule(Field.Text, "match me").matches(factory.document(text="foo matchXme mar")) must beEqualTo(false)
}
"hit Field.All on title" in {
rule(Field.All, "match me").matches(factory.document(title="foo match me mar")) must beEqualTo(true)
}
"miss Field.All on title" in {
rule(Field.All, "match me").matches(factory.document(title="foo matchXme mar")) must beEqualTo(false)
}
"hit Field.All on text" in {
rule(Field.All, "match me").matches(factory.document(text="foo match me mar")) must beEqualTo(true)
}
"miss Field.All on text" in {
rule(Field.All, "match me").matches(factory.document(text="foo matchXme mar")) must beEqualTo(false)
}
"hit a metadata field" in {
rule(Field.Metadata("foo"), "match me").matches(factory.document(metadataJson=Json.obj("foo" -> "match me"))) must beEqualTo(true)
}
"miss a metadata field" in {
rule(Field.Metadata("foo"), "match me").matches(factory.document(metadataJson=Json.obj("foo" -> "matchXme"))) must beEqualTo(false)
}
"not hit when a different metadata field matches" in {
rule(Field.Metadata("foo"), "match me").matches(factory.document(metadataJson=Json.obj("foo1" -> "match me"))) must beEqualTo(false)
}
"not hit Field.All on metadata" in {
rule(Field.All, "match me") matches(factory.document(metadataJson=Json.obj("foo" -> "match me"))) must beEqualTo(false)
}
"hit a note" in {
val note = PdfNote(2, 3, 4, 5, 6, "match me")
val pdfNotes = PdfNoteCollection(Array(note))
rule(Field.Notes, "match me") matches(factory.document(pdfNotes=pdfNotes)) must beEqualTo(true)
}
"miss a note" in {
val note = PdfNote(2, 3, 4, 5, 6, "matchXme")
val pdfNotes = PdfNoteCollection(Array(note))
rule(Field.Notes, "match me") matches(factory.document(pdfNotes=pdfNotes)) must beEqualTo(false)
}
}
}
}
}
|
overview/overview-server
|
web/test/controllers/backend/DocumentSelectionBackendSpec.scala
|
Scala
|
agpl-3.0
| 5,825
|
import scala.quoted.*
inline def rewrite[T](inline x: Any): Any = ${ stringRewriter('x) }
private def stringRewriter(e: Expr[Any])(using Quotes): Expr[Any] =
StringRewriter.transform(e)
private object StringRewriter extends ExprMap {
def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = e match
case '{ ${Expr(s)}: String } =>
// checkIfValid(s)
val s2: String & T = s
Expr(s2)
case _ => transformChildren(e)
}
|
dotty-staging/dotty
|
tests/run-macros/expr-map-3/Macro_1.scala
|
Scala
|
apache-2.0
| 463
|
package org.eichelberger.sfc.study.locality
import org.eichelberger.sfc.examples.composition.contrast.FactoryXYZT
import org.eichelberger.sfc.study.{ColumnSpec, OutputMetadata, MirroredTSV}
import org.eichelberger.sfc.{ComposedCurve, CompactHilbertCurve, ZCurve, RowMajorCurve}
import org.eichelberger.sfc.SpaceFillingCurve._
import org.eichelberger.sfc.utils.LocalityEstimator
object LocalityEstimatorStudy
extends MirroredTSV(
"/tmp/locality.tsv",
OutputMetadata(Seq(
ColumnSpec("top.curve", isQuoted = true),
ColumnSpec("curve", isQuoted = true),
ColumnSpec("dimensions", isQuoted = false),
ColumnSpec("total.precision", isQuoted = false),
ColumnSpec("plys", isQuoted = false),
ColumnSpec("locality", isQuoted = false),
ColumnSpec("normalized.locality", isQuoted = false),
ColumnSpec("locality.inv", isQuoted = false),
ColumnSpec("normalized.locality.inv", isQuoted = false),
ColumnSpec("sample.size", isQuoted = false),
ColumnSpec("sample.coverage", isQuoted = false)
)),
writeHeader = true
) with App {
def test(curve: ComposedCurve): Unit = {
val loc = LocalityEstimator(curve).locality
val data = Seq(
curve.name.take(1),
curve.name,
curve.numLeafNodes,
curve.M,
curve.plys,
loc.locality,
loc.normalizedLocality,
loc.localityInverse,
loc.normalizedLocalityInverse,
loc.sampleSize,
loc.coverage
)
println(data)
}
for (totalPrecision <- 4 to 40 by 4) {
// 4D, horizontal
FactoryXYZT(totalPrecision, 1).getCurves.foreach(curve => test(curve))
// 4D, mixed (2, 2)
FactoryXYZT(totalPrecision, 2).getCurves.foreach(curve => test(curve))
// 4D, mixed (3, 1)
FactoryXYZT(totalPrecision, -2).getCurves.foreach(curve => test(curve))
// 4D, vertical
FactoryXYZT(totalPrecision, 3).getCurves.foreach(curve => test(curve))
}
close()
}
|
cne1x/sfseize
|
src/main/scala/org/eichelberger/sfc/study/locality/LocalityEstimatorStudy.scala
|
Scala
|
apache-2.0
| 1,925
|
package mesosphere.marathon
package core.check
import com.wix.accord._
import mesosphere.marathon.Protos.CheckDefinition.Protocol
import mesosphere.marathon.state._
import org.apache.mesos.{Protos => MesosProtos}
import scala.concurrent.duration._
/**
* Check is a Type to help with type conversions to and from different protos.
* It mirrors closely to HealthCheck which does the same thing.
* Check is the most abstract Check. The appDef works with MesosChecks.
*
* toProto takes this data structure into the CheckDefinition proto defined in Marathon used for storage.
* toMesos takes this data structure into Mesos CheckInfo for working with TaskBuilder
* Conversations to and from raml is in CheckConversion class.
*/
sealed trait Check {
def delay: FiniteDuration
def interval: FiniteDuration
def timeout: FiniteDuration
def toProto: Protos.CheckDefinition
protected def protoBuilder: Protos.CheckDefinition.Builder =
Protos.CheckDefinition.newBuilder
.setIntervalSeconds(this.interval.toSeconds.toInt)
.setTimeoutSeconds(this.timeout.toSeconds.toInt)
.setDelaySeconds(this.delay.toSeconds.toInt)
}
sealed trait PortReference extends Product with Serializable {
def apply(assignments: Seq[PortAssignment]): PortAssignment
def buildProto(builder: Protos.CheckDefinition.Builder): Unit
}
object PortReference {
case class ByIndex(value: Int) extends PortReference {
override def apply(assignments: Seq[PortAssignment]): PortAssignment =
assignments(value)
override def buildProto(builder: Protos.CheckDefinition.Builder): Unit =
builder.setPortIndex(value)
}
def apply(value: Int): PortReference = ByIndex(value)
def fromProto(pb: Protos.CheckDefinition): Option[PortReference] =
if (pb.hasPortIndex) Some(ByIndex(pb.getPortIndex))
else if (!pb.hasPort) Some(ByIndex(0))
else None
}
sealed trait CheckWithPort extends Check {
def portIndex: Option[PortReference]
def port: Option[Int]
}
object CheckWithPort {
val DefaultPortIndex = None
val DefaultPort = None
import mesosphere.marathon.api.v2.Validation.isTrue
implicit val Validator: Validator[CheckWithPort] =
isTrue("Check must specify either a port or a portIndex") { hc =>
hc.portIndex.isDefined ^ hc.port.isDefined
}
}
sealed trait MesosCheck extends Check {
def interval: FiniteDuration
def timeout: FiniteDuration
def delay: FiniteDuration
override protected def protoBuilder: Protos.CheckDefinition.Builder =
super.protoBuilder.setDelaySeconds(delay.toSeconds.toInt)
def toMesos(portAssignments: Seq[PortAssignment]): Option[MesosProtos.CheckInfo]
}
sealed trait MesosCheckWithPorts extends CheckWithPort { this: Check =>
def effectivePort(portAssignments: Seq[PortAssignment]): Option[Int] = {
port.orElse {
val portAssignment: Option[PortAssignment] = portIndex.map(index => index(portAssignments))
// Mesos enters the container's network to probe the port, hence we prefer `containerPort`
// to `hostPort` here (as opposed to MarathonCheck which is the opposite)
portAssignment.flatMap(_.containerPort).orElse(portAssignment.flatMap(_.hostPort))
}
}
}
case class MesosCommandCheck(
interval: FiniteDuration = Check.DefaultInterval,
timeout: FiniteDuration = Check.DefaultTimeout,
delay: FiniteDuration = Check.DefaultDelay,
command: Executable
) extends Check
with MesosCheck {
override def toProto: Protos.CheckDefinition = {
protoBuilder
.setProtocol(Protos.CheckDefinition.Protocol.COMMAND)
.setCommand(Executable.toProto(command))
.build
}
override def toMesos(portAssignments: Seq[PortAssignment]): Option[MesosProtos.CheckInfo] = {
Option(
MesosProtos.CheckInfo.newBuilder
.setType(MesosProtos.CheckInfo.Type.COMMAND)
.setIntervalSeconds(this.interval.toSeconds.toDouble)
.setTimeoutSeconds(this.timeout.toSeconds.toDouble)
.setDelaySeconds(this.delay.toUnit(SECONDS))
.setCommand(MesosProtos.CheckInfo.Command.newBuilder().setCommand(Executable.toProto(this.command)))
.build()
)
}
}
object MesosCommandCheck {
def mergeFromProto(proto: Protos.CheckDefinition): MesosCommandCheck =
MesosCommandCheck(
timeout = proto.getTimeoutSeconds.seconds,
interval = proto.getIntervalSeconds.seconds,
delay = proto.getDelaySeconds.seconds,
command = Executable.mergeFromProto(proto.getCommand)
)
}
case class MesosHttpCheck(
interval: FiniteDuration = Check.DefaultInterval,
timeout: FiniteDuration = Check.DefaultTimeout,
portIndex: Option[PortReference] = CheckWithPort.DefaultPortIndex,
port: Option[Int] = CheckWithPort.DefaultPort,
path: Option[String] = MesosHttpCheck.DefaultPath,
protocol: Protocol = MesosHttpCheck.DefaultProtocol,
delay: FiniteDuration = Check.DefaultDelay
) extends Check
with MesosCheck
with MesosCheckWithPorts {
require(protocol == Protocol.HTTP)
override def toProto: Protos.CheckDefinition = {
val builder = protoBuilder
.setProtocol(protocol)
path.foreach(builder.setPath)
portIndex.foreach(_.buildProto(builder))
port.foreach(builder.setPort)
builder.build
}
override def toMesos(portAssignments: Seq[PortAssignment]): Option[MesosProtos.CheckInfo] = {
val port = effectivePort(portAssignments)
port.map { checkPort =>
val httpInfoBuilder = MesosProtos.CheckInfo.Http
.newBuilder()
.setPort(checkPort)
path.foreach(httpInfoBuilder.setPath)
MesosProtos.CheckInfo.newBuilder
.setType(MesosProtos.CheckInfo.Type.HTTP)
.setIntervalSeconds(this.interval.toSeconds.toDouble)
.setTimeoutSeconds(this.timeout.toSeconds.toDouble)
.setDelaySeconds(this.delay.toUnit(SECONDS))
.setHttp(httpInfoBuilder)
.build()
}
}
}
object MesosHttpCheck {
val DefaultPath = None
val DefaultProtocol = Protocol.HTTP
def mergeFromProto(proto: Protos.CheckDefinition): MesosHttpCheck =
MesosHttpCheck(
timeout = proto.getTimeoutSeconds.seconds,
interval = proto.getIntervalSeconds.seconds,
delay = proto.getDelaySeconds.seconds,
path = if (proto.hasPath) Some(proto.getPath) else DefaultPath,
portIndex = PortReference.fromProto(proto),
port = if (proto.hasPort) Some(proto.getPort) else CheckWithPort.DefaultPort,
protocol = if (proto.hasProtocol) proto.getProtocol else DefaultProtocol
)
}
case class MesosTcpCheck(
interval: FiniteDuration = Check.DefaultInterval,
timeout: FiniteDuration = Check.DefaultTimeout,
portIndex: Option[PortReference] = CheckWithPort.DefaultPortIndex,
port: Option[Int] = CheckWithPort.DefaultPort,
delay: FiniteDuration = Check.DefaultDelay
) extends Check
with MesosCheck
with MesosCheckWithPorts {
override def toProto: Protos.CheckDefinition = {
val builder = protoBuilder
.setProtocol(Protos.CheckDefinition.Protocol.TCP)
portIndex.foreach(_.buildProto(builder))
port.foreach(builder.setPort)
builder.build
}
override def toMesos(portAssignments: Seq[PortAssignment]): Option[MesosProtos.CheckInfo] = {
val port = effectivePort(portAssignments)
port.map { checkPort =>
val tcpInfoBuilder = MesosProtos.CheckInfo.Tcp.newBuilder().setPort(checkPort)
MesosProtos.CheckInfo.newBuilder
.setType(MesosProtos.CheckInfo.Type.TCP)
.setIntervalSeconds(this.interval.toSeconds.toDouble)
.setTimeoutSeconds(this.timeout.toSeconds.toDouble)
.setDelaySeconds(this.delay.toUnit(SECONDS))
.setTcp(tcpInfoBuilder)
.build()
}
}
}
object MesosTcpCheck {
def mergeFromProto(proto: Protos.CheckDefinition): MesosTcpCheck =
MesosTcpCheck(
timeout = proto.getTimeoutSeconds.seconds,
interval = proto.getIntervalSeconds.seconds,
delay = proto.getDelaySeconds.seconds,
portIndex = PortReference.fromProto(proto),
port = if (proto.hasPort) Some(proto.getPort) else None
)
}
object Check {
val DefaultProtocol = Protocol.HTTP
val DefaultInterval = 1.minute
val DefaultTimeout = 20.seconds
val DefaultDelay = 15.seconds
implicit val Validator: Validator[Check] = new Validator[Check] {
override def apply(hc: Check): Result = {
hc match {
case h: CheckWithPort => CheckWithPort.Validator(h)
case _ => Success
}
}
}
def fromProto(proto: Protos.CheckDefinition): Check = {
proto.getProtocol match {
case Protocol.COMMAND => MesosCommandCheck.mergeFromProto(proto)
case Protocol.TCP => MesosTcpCheck.mergeFromProto(proto)
case Protocol.HTTP => MesosHttpCheck.mergeFromProto(proto)
}
}
}
|
mesosphere/marathon
|
src/main/scala/mesosphere/marathon/core/check/Check.scala
|
Scala
|
apache-2.0
| 8,788
|
package mesosphere.elasticsearch
import java.util
import org.apache.mesos.Protos._
import org.apache.mesos.{Protos, MesosSchedulerDriver, SchedulerDriver, Scheduler}
import scala.collection.JavaConverters._
import scala.collection.mutable
import java.util.concurrent.CountDownLatch
import java.text.SimpleDateFormat
import java.util.{TimeZone, Date}
/**
* Mesos scheduler for ElasticSearch
* Takes care of most of the "annoying things" like distributing binaries and configuration out to the nodes.
*
* @author erich<IDonLikeSpam>nachbar.biz
*/
class ElasticSearchScheduler(masterUrl: String,
execUri: String,
confServerHostName: String,
confServerPort: Int,
resources: mutable.Map[String, Float],
numberOfHwNodes: Int)
extends Scheduler with Runnable with Logger {
val initialized = new CountDownLatch(1)
val taskSet = mutable.Set[Task]()
// Using a format without colons because task IDs become paths, and colons in paths break the JVM's CLASSPATH
val isoDateFormat = new SimpleDateFormat("yyyyMMdd'T'HHmmss.SSS'Z'")
isoDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"))
def error(driver: SchedulerDriver, message: String) {
error(message)
//TODO erich implement
}
def executorLost(driver: SchedulerDriver, executorId: ExecutorID, slaveId: SlaveID, status: Int) {
warn(s"Executor lost: '${executorId.getValue}' " +
s"on slave '${slaveId.getValue}' " +
s"with status '${status}'")
//TODO erich implement
}
def slaveLost(driver: SchedulerDriver, slaveId: SlaveID) {
warn(s"Slave lost: '${slaveId.getValue}'")
}
def disconnected(driver: SchedulerDriver) {
warn("Disconnected")
}
def frameworkMessage(driver: SchedulerDriver, executorId: ExecutorID, slaveId: SlaveID, data: Array[Byte]) {
warn(s"FrameworkMessage from executor: '${executorId.getValue}' " +
s"on slave '${slaveId.getValue}'")
}
def statusUpdate(driver: SchedulerDriver, status: TaskStatus) {
info(s"received status update $status")
status.getState match {
case TaskState.TASK_FAILED | TaskState.TASK_FINISHED |
TaskState.TASK_KILLED | TaskState.TASK_LOST =>
taskSet.find(_.taskId == status.getTaskId.getValue).foreach(taskSet.remove)
case _ =>
}
}
def offerRescinded(driver: SchedulerDriver, offerId: OfferID) {
warn(s"Offer ${offerId.getValue} rescinded")
}
// Blocks with CountDown latch until we have enough seed nodes.
def waitUnitInit {
initialized.await()
}
def resourceOffers(driver: SchedulerDriver, offers: util.List[Offer]) {
// Construct command to run
val cmd = CommandInfo.newBuilder
.addUris(CommandInfo.URI.newBuilder.setValue(execUri))
.setValue(s"cd elasticsearch-mesos* && " +
s"cd config && rm elasticsearch.yml " +
s"&& curl -sSfLO http://${confServerHostName}:${confServerPort}/elasticsearch.yml " +
s"&& rm logging.yml " +
s"&& curl -sSfLO http://${confServerHostName}:${confServerPort}/logging.yml " +
s"&& cd .. " +
s"&& bin/elasticsearch -f")
// Create all my resources
val res = resources.map {
case (k, v) => Resource.newBuilder()
.setName(k)
.setType(Value.Type.SCALAR)
.setScalar(Value.Scalar.newBuilder().setValue(v).build())
.build()
}
// Let's make sure we don't start multiple ElasticSearches from the same cluster on the same box.
// We can't hand out the same port multiple times.
for (offer <- offers.asScala) {
if (isOfferGood(offer) && !haveEnoughNodes) {
debug(s"offer $offer")
info("Accepted offer: " + offer.getHostname)
val id = s"elasticsearch_${offer.getHostname}_${isoDateFormat.format(new Date())}"
val task = TaskInfo.newBuilder
.setCommand(cmd)
.setName(id)
.setTaskId(TaskID.newBuilder.setValue(id))
.addAllResources(res.asJava)
.setSlaveId(offer.getSlaveId)
.build
driver.launchTasks(offer.getId, List(task).asJava)
taskSet += Task(id, offer.getHostname)
} else {
debug("Rejecting offer " + offer.getHostname)
driver.declineOffer(offer.getId)
}
}
// If we have at least one node the assumption is that we are good to go.
if (haveEnoughNodes)
initialized.countDown()
}
def haveEnoughNodes = {
taskSet.size == numberOfHwNodes
}
// Check if offer is reasonable
def isOfferGood(offer: Offer) = {
// Make a list of offered resources
val offeredRes = offer.getResourcesList.asScala.toList.map {
k => (k.getName, k.getScalar.getValue)
}
// Make a list of resources we need
val requiredRes = resources.toList
debug("resources offered: " + offeredRes)
debug("resources required: " + requiredRes)
// creates map structure: resourceName, List(offer, required) and
val resCompList = (offeredRes ++ requiredRes)
.groupBy(_._1)
.mapValues(_.map(_._2)
.toList)
// throws out resources that have no resource offer or resource requirement
// counts how many are too small
val offersTooSmall = resCompList.filter {
_._2.size > 1
}.map {
case (name, values: List[AnyVal]) =>
values(0).toString.toFloat >= values(1).toString.toFloat
}.filter {
!_
}.size
// don't start the same framework multiple times on the same host and
// make sure we got all resources we asked for
taskSet.forall(_.hostname != offer.getHostname) && offersTooSmall == 0
}
def reregistered(driver: SchedulerDriver, masterInfo: MasterInfo) {
//TODO erich implement
}
def registered(driver: SchedulerDriver, frameworkId: FrameworkID, masterInfo: MasterInfo) {
info(s"Framework registered as ${frameworkId.getValue}")
val cpuResource = Resource.newBuilder()
.setName("cpus")
.setType(Value.Type.SCALAR)
.setScalar(Value.Scalar.newBuilder().setValue(1.0).build())
.build()
val request = Request.newBuilder()
.addResources(cpuResource)
.build()
val r = new util.ArrayList[Protos.Request]
r.add(request)
driver.requestResources(r)
}
def run() {
info("Starting up...")
val driver = new MesosSchedulerDriver(this, FrameworkInfo.newBuilder().setUser("").setName("ElasticSearch").build(), masterUrl)
driver.run().getValueDescriptor.getFullName
}
//TODO not used yet - we only do Scalar resources as of yet
def makeRangeResource(name: String, start: Long, end: Long) = {
Resource.newBuilder()
.setName(name)
.setType(Value.Type.RANGES)
.setRanges(Value.Ranges.newBuilder()
.addRange(Value.Range.newBuilder().setBegin(start).setEnd(end)))
.build
}
case class Task(taskId: String, hostname: String)
}
|
mesosphere/elasticsearch-mesos
|
src/main/scala/mesosphere/elasticsearch/ElasticSearchScheduler.scala
|
Scala
|
apache-2.0
| 6,919
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import java.nio.{ByteBuffer, ByteOrder}
import scala.annotation.tailrec
import org.apache.spark.sql.catalyst.expressions.{MutableRow, UnsafeArrayData, UnsafeMapData, UnsafeRow}
import org.apache.spark.sql.execution.columnar.compression.CompressibleColumnAccessor
import org.apache.spark.sql.types._
/**
* An `Iterator` like trait used to extract values from columnar byte buffer. When a value is
* extracted from the buffer, instead of directly returning it, the value is set into some field of
* a [[MutableRow]]. In this way, boxing cost can be avoided by leveraging the setter methods
* for primitive values provided by [[MutableRow]].
*/
private[columnar] trait ColumnAccessor {
initialize()
protected def initialize()
def hasNext: Boolean
def extractTo(row: MutableRow, ordinal: Int): Unit
protected def underlyingBuffer: ByteBuffer
}
private[columnar] abstract class BasicColumnAccessor[JvmType](
protected val buffer: ByteBuffer,
protected val columnType: ColumnType[JvmType])
extends ColumnAccessor {
protected def initialize() {}
override def hasNext: Boolean = buffer.hasRemaining
override def extractTo(row: MutableRow, ordinal: Int): Unit = {
extractSingle(row, ordinal)
}
def extractSingle(row: MutableRow, ordinal: Int): Unit = {
columnType.extract(buffer, row, ordinal)
}
protected def underlyingBuffer = buffer
}
private[columnar] class NullColumnAccessor(buffer: ByteBuffer)
extends BasicColumnAccessor[Any](buffer, NULL)
with NullableColumnAccessor
private[columnar] abstract class NativeColumnAccessor[T <: AtomicType](
override protected val buffer: ByteBuffer,
override protected val columnType: NativeColumnType[T])
extends BasicColumnAccessor(buffer, columnType)
with NullableColumnAccessor
with CompressibleColumnAccessor[T]
private[columnar] class BooleanColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, BOOLEAN)
private[columnar] class ByteColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, BYTE)
private[columnar] class ShortColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, SHORT)
private[columnar] class IntColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, INT)
private[columnar] class LongColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, LONG)
private[columnar] class FloatColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, FLOAT)
private[columnar] class DoubleColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, DOUBLE)
private[columnar] class StringColumnAccessor(buffer: ByteBuffer)
extends NativeColumnAccessor(buffer, STRING)
private[columnar] class BinaryColumnAccessor(buffer: ByteBuffer)
extends BasicColumnAccessor[Array[Byte]](buffer, BINARY)
with NullableColumnAccessor
private[columnar] class CompactDecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
extends NativeColumnAccessor(buffer, COMPACT_DECIMAL(dataType))
private[columnar] class DecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
extends BasicColumnAccessor[Decimal](buffer, LARGE_DECIMAL(dataType))
with NullableColumnAccessor
private[columnar] class StructColumnAccessor(buffer: ByteBuffer, dataType: StructType)
extends BasicColumnAccessor[UnsafeRow](buffer, STRUCT(dataType))
with NullableColumnAccessor
private[columnar] class ArrayColumnAccessor(buffer: ByteBuffer, dataType: ArrayType)
extends BasicColumnAccessor[UnsafeArrayData](buffer, ARRAY(dataType))
with NullableColumnAccessor
private[columnar] class MapColumnAccessor(buffer: ByteBuffer, dataType: MapType)
extends BasicColumnAccessor[UnsafeMapData](buffer, MAP(dataType))
with NullableColumnAccessor
private[columnar] object ColumnAccessor {
@tailrec
def apply(dataType: DataType, buffer: ByteBuffer): ColumnAccessor = {
val buf = buffer.order(ByteOrder.nativeOrder)
dataType match {
case NullType => new NullColumnAccessor(buf)
case BooleanType => new BooleanColumnAccessor(buf)
case ByteType => new ByteColumnAccessor(buf)
case ShortType => new ShortColumnAccessor(buf)
case IntegerType | DateType => new IntColumnAccessor(buf)
case LongType | TimestampType => new LongColumnAccessor(buf)
case FloatType => new FloatColumnAccessor(buf)
case DoubleType => new DoubleColumnAccessor(buf)
case StringType => new StringColumnAccessor(buf)
case BinaryType => new BinaryColumnAccessor(buf)
case dt: DecimalType if dt.precision <= Decimal.MAX_LONG_DIGITS =>
new CompactDecimalColumnAccessor(buf, dt)
case dt: DecimalType => new DecimalColumnAccessor(buf, dt)
case struct: StructType => new StructColumnAccessor(buf, struct)
case array: ArrayType => new ArrayColumnAccessor(buf, array)
case map: MapType => new MapColumnAccessor(buf, map)
case udt: UserDefinedType[_] => ColumnAccessor(udt.sqlType, buffer)
case other =>
throw new Exception(s"not support type: $other")
}
}
}
|
gioenn/xSpark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
|
Scala
|
apache-2.0
| 5,938
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.atomic
/** Atomic references wrapping `Long` values.
*
* Note that the equality test in `compareAndSet` is value based,
* since `Long` is a primitive.
*/
final class AtomicLong private[atomic]
(initialValue: Long) extends AtomicNumber[Long] {
private[this] var ref = initialValue
def getAndSet(update: Long): Long = {
val current = ref
ref = update
current
}
def compareAndSet(expect: Long, update: Long): Boolean = {
if (ref == expect) {
ref = update
true
}
else
false
}
def set(update: Long): Unit = {
ref = update
}
def get(): Long = ref
def getAndSubtract(v: Long): Long = {
val c = ref
ref = ref - v
c
}
def subtractAndGet(v: Long): Long = {
ref = ref - v
ref
}
def subtract(v: Long): Unit = {
ref = ref - v
}
def getAndAdd(v: Long): Long = {
val c = ref
ref = ref + v
c
}
def getAndIncrement(v: Int = 1): Long = {
val c = ref
ref = ref + v
c
}
def addAndGet(v: Long): Long = {
ref = ref + v
ref
}
def incrementAndGet(v: Int = 1): Long = {
ref = ref + v
ref
}
def add(v: Long): Unit = {
ref = ref + v
}
def increment(v: Int = 1): Unit = {
ref = ref + v
}
def decrement(v: Int = 1): Unit = increment(-v)
def decrementAndGet(v: Int = 1): Long = incrementAndGet(-v)
def getAndDecrement(v: Int = 1): Long = getAndIncrement(-v)
}
/** @define createDesc Constructs an [[AtomicLong]] reference, allowing
* for fine-tuning of the created instance.
*
* A [[PaddingStrategy]] can be provided in order to counter
* the "false sharing" problem.
*
* Note that for ''Scala.js'' we aren't applying any padding,
* as it doesn't make much sense, since Javascript execution
* is single threaded, but this builder is provided for
* syntax compatibility anyway across the JVM and Javascript
* and we never know how Javascript engines will evolve.
*/
object AtomicLong {
/** Builds an [[AtomicLong]] reference.
*
* @param initialValue is the initial value with which to initialize the atomic
*/
def apply(initialValue: Long): AtomicLong =
new AtomicLong(initialValue)
/** $createDesc
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
*/
def withPadding(initialValue: Long, padding: PaddingStrategy): AtomicLong =
new AtomicLong(initialValue)
/** $createDesc
*
* Also this builder on top Java 8 also allows for turning off the
* Java 8 intrinsics, thus forcing usage of CAS-loops for
* `getAndSet` and for `getAndAdd`.
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
* @param allowPlatformIntrinsics is a boolean parameter that specifies whether
* the instance is allowed to use the Java 8 optimized operations
* for `getAndSet` and for `getAndAdd`
*/
def create(initialValue: Long, padding: PaddingStrategy, allowPlatformIntrinsics: Boolean): AtomicLong =
new AtomicLong(initialValue)
/** $createDesc
*
* This builder guarantees to construct a safe atomic reference that
* does not make use of `sun.misc.Unsafe`. On top of platforms that
* don't support it, notably some versions of Android or on top of
* the upcoming Java 9, this might be desirable.
*
* NOTE that explicit usage of this builder is not usually necessary
* because [[create]] can auto-detect whether the underlying platform
* supports `sun.misc.Unsafe` and if it does, then its usage is
* recommended, because the "safe" atomic instances have overhead.
*
* @param initialValue is the initial value with which to initialize the atomic
* @param padding is the [[PaddingStrategy]] to apply
*/
def safe(initialValue: Long, padding: PaddingStrategy): AtomicLong =
new AtomicLong(initialValue)
}
|
Wogan/monix
|
monix-execution/js/src/main/scala/monix/execution/atomic/AtomicLong.scala
|
Scala
|
apache-2.0
| 4,776
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils
import java.util.concurrent.atomic.AtomicInteger
import com.intel.analytics.bigdl.dllib.nn.Sequential
import com.intel.analytics.bigdl.dllib.nn.internal.{InputLayer, KerasLayer, Sequential => KSequential}
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity, TensorModule}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import org.scalatest.exceptions.TestCanceledException
import scala.reflect.ClassTag
object TestUtils {
/**
* Compare the output of `computeOutputShape` with the `forward` result
*/
def compareOutputShape(layer: AbstractModule[Activity, Activity, Float],
inputShapeWithoutBatch: Shape): Boolean = {
val inputData = Tensor[Float](Array(2) ++ inputShapeWithoutBatch.toSingle()).randn()
val runnableLayer = layer match {
case k: KerasLayer[_, _, _] =>
if (!k.isBuilt()) {
k.build(KerasLayer.addBatch(inputShapeWithoutBatch))
}
k
case a: AbstractModule[_, _, _] => a
}
val calcOutputShape = runnableLayer.computeOutputShape(
KerasLayer.addBatch(inputShapeWithoutBatch)).toSingle()
val forwardOutputShape = runnableLayer.forward(inputData).toTensor[Float].size()
calcOutputShape.slice(1, calcOutputShape.length).sameElements(
forwardOutputShape.slice(1, forwardOutputShape.length))
}
/**
* Process different paths format under windows and linux
*
* @param path
* @return
*/
def processPath(path: String): String = {
if (path.contains(":")) {
path.substring(1)
} else {
path
}
}
/**
* Some test case cannot run on windows, cancel such test cases
*/
def cancelOnWindows(): Unit = {
if (System.getProperty("os.name").toLowerCase().contains("win")) {
throw new TestCanceledException("This case should not be run on windows", 3)
}
}
/**
* This function returns the function value, partial derivatives
* and Hessian of the (general dimension) rosenbrock function, given by:
* f(x) = sum_{i=1:D-1} 100*(x(i+1) - x(i)^2)^2 + (1-x(i)) ^^ 2
* where D is the dimension of x. The true minimum is 0 at x = (1 1 ... 1).
*
* See more about rosenbrock function at
* https://en.wikipedia.org/wiki/Rosenbrock_function
*
* @param x
*/
def rosenBrock(x: Tensor[Double]): (Double, Tensor[Double]) = {
// (1) compute f(x)
val d = x.size(1)
// x1 = x(i)
val x1 = Tensor[Double](d - 1).copy(x.narrow(1, 1, d - 1))
// x(i + 1) - x(i)^2
x1.cmul(x1).mul(-1).add(x.narrow(1, 2, d - 1))
// 100 * (x(i + 1) - x(i)^2)^2
x1.cmul(x1).mul(100)
// x0 = x(i)
val x0 = Tensor[Double](d - 1).copy(x.narrow(1, 1, d - 1))
// 1-x(i)
x0.mul(-1).add(1)
x0.cmul(x0)
// 100*(x(i+1) - x(i)^2)^2 + (1-x(i))^2
x1.add(x0)
val fout = x1.sum()
// (2) compute f(x)/dx
val dxout = Tensor[Double]().resizeAs(x).zero()
// df(1:D-1) = - 400*x(1:D-1).*(x(2:D)-x(1:D-1).^2) - 2*(1-x(1:D-1));
x1.copy(x.narrow(1, 1, d - 1))
x1.cmul(x1).mul(-1).add(x.narrow(1, 2, d - 1)).cmul(x.narrow(1, 1, d - 1)).mul(-400)
x0.copy(x.narrow(1, 1, d - 1)).mul(-1).add(1).mul(-2)
x1.add(x0)
dxout.narrow(1, 1, d - 1).copy(x1)
// df(2:D) = df(2:D) + 200*(x(2:D)-x(1:D-1).^2);
x0.copy(x.narrow(1, 1, d - 1))
x0.cmul(x0).mul(-1).add(x.narrow(1, 2, d - 1)).mul(200)
dxout.narrow(1, 2, d - 1).add(x0)
(fout, dxout)
}
}
class ExceptionTest[T: ClassTag](failCountNumberLists: Array[Int], sleep: Boolean)
(implicit ev: TensorNumeric[T])
extends TensorModule[T] {
override def updateOutput(input: Tensor[T]): Tensor[T] = {
this.output = input
if (failCountNumberLists.contains(ExceptionTest.count.incrementAndGet())) {
if (sleep) {
Thread.sleep(10000)
}
throw new Exception("Fail task")
}
this.output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
this.gradInput = gradOutput
this.gradInput
}
override def toString(): String = {
s"nn.ExceptionTest"
}
}
object ExceptionTest {
var count = new AtomicInteger(0)
def resetCount(): Unit = {
count.set(0)
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/utils/TestUtils.scala
|
Scala
|
apache-2.0
| 4,923
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.nio.ByteBuffer
import kafka.utils.nonthreadsafe
import kafka.api.ApiUtils._
import scala.collection.immutable.Map
import kafka.common.{ErrorMapping, TopicAndPartition}
import kafka.consumer.ConsumerConfig
import java.util.concurrent.atomic.AtomicInteger
import kafka.network.RequestChannel
import kafka.message.MessageSet
case class PartitionFetchInfo(offset: Long, fetchSize: Int)
object FetchRequest {
val CurrentVersion = 0.shortValue
val DefaultMaxWait = 0
val DefaultMinBytes = 0
val DefaultCorrelationId = 0
def readFrom(buffer: ByteBuffer): FetchRequest = {
val versionId = buffer.getShort
val correlationId = buffer.getInt
val clientId = readShortString(buffer)
val replicaId = buffer.getInt
val maxWait = buffer.getInt
val minBytes = buffer.getInt
val topicCount = buffer.getInt
val pairs = (1 to topicCount).flatMap(_ => {
val topic = readShortString(buffer)
val partitionCount = buffer.getInt
(1 to partitionCount).map(_ => {
val partitionId = buffer.getInt
val offset = buffer.getLong
val fetchSize = buffer.getInt
(TopicAndPartition(topic, partitionId), PartitionFetchInfo(offset, fetchSize))
})
})
FetchRequest(versionId, correlationId, clientId, replicaId, maxWait, minBytes, Map(pairs:_*))
}
}
case class FetchRequest private[kafka] (versionId: Short = FetchRequest.CurrentVersion,
override val correlationId: Int = FetchRequest.DefaultCorrelationId,
clientId: String = ConsumerConfig.DefaultClientId,
replicaId: Int = Request.OrdinaryConsumerId,
maxWait: Int = FetchRequest.DefaultMaxWait,
minBytes: Int = FetchRequest.DefaultMinBytes,
requestInfo: Map[TopicAndPartition, PartitionFetchInfo])
extends RequestOrResponse(Some(RequestKeys.FetchKey), correlationId) {
/**
* Partitions the request info into a map of maps (one for each topic).
*/
lazy val requestInfoGroupedByTopic = requestInfo.groupBy(_._1.topic)
/**
* Public constructor for the clients
*/
def this(correlationId: Int,
clientId: String,
maxWait: Int,
minBytes: Int,
requestInfo: Map[TopicAndPartition, PartitionFetchInfo]) {
this(versionId = FetchRequest.CurrentVersion,
correlationId = correlationId,
clientId = clientId,
replicaId = Request.OrdinaryConsumerId,
maxWait = maxWait,
minBytes= minBytes,
requestInfo = requestInfo)
}
def writeTo(buffer: ByteBuffer) {
buffer.putShort(versionId)
buffer.putInt(correlationId)
writeShortString(buffer, clientId)
buffer.putInt(replicaId)
buffer.putInt(maxWait)
buffer.putInt(minBytes)
buffer.putInt(requestInfoGroupedByTopic.size) // topic count
requestInfoGroupedByTopic.foreach {
case (topic, partitionFetchInfos) =>
writeShortString(buffer, topic)
buffer.putInt(partitionFetchInfos.size) // partition count
partitionFetchInfos.foreach {
case (TopicAndPartition(_, partition), PartitionFetchInfo(offset, fetchSize)) =>
buffer.putInt(partition)
buffer.putLong(offset)
buffer.putInt(fetchSize)
}
}
}
def sizeInBytes: Int = {
2 + /* versionId */
4 + /* correlationId */
shortStringLength(clientId) +
4 + /* replicaId */
4 + /* maxWait */
4 + /* minBytes */
4 + /* topic count */
requestInfoGroupedByTopic.foldLeft(0)((foldedTopics, currTopic) => {
val (topic, partitionFetchInfos) = currTopic
foldedTopics +
shortStringLength(topic) +
4 + /* partition count */
partitionFetchInfos.size * (
4 + /* partition id */
8 + /* offset */
4 /* fetch size */
)
})
}
def isFromFollower = Request.isReplicaIdFromFollower(replicaId)
def isFromOrdinaryConsumer = replicaId == Request.OrdinaryConsumerId
def isFromLowLevelConsumer = replicaId == Request.DebuggingConsumerId
def numPartitions = requestInfo.size
override def toString(): String = {
describe(true)
}
override def handleError(e: Throwable, requestChannel: RequestChannel, request: RequestChannel.Request): Unit = {
val fetchResponsePartitionData = requestInfo.map {
case (topicAndPartition, data) =>
(topicAndPartition, FetchResponsePartitionData(ErrorMapping.codeFor(e.getClass.asInstanceOf[Class[Throwable]]), -1, MessageSet.Empty))
}
val errorResponse = FetchResponse(correlationId, fetchResponsePartitionData)
requestChannel.sendResponse(new RequestChannel.Response(request, new FetchResponseSend(errorResponse)))
}
override def describe(details: Boolean): String = {
val fetchRequest = new StringBuilder
fetchRequest.append("Name: " + this.getClass.getSimpleName)
fetchRequest.append("; Version: " + versionId)
fetchRequest.append("; CorrelationId: " + correlationId)
fetchRequest.append("; ClientId: " + clientId)
fetchRequest.append("; ReplicaId: " + replicaId)
fetchRequest.append("; MaxWait: " + maxWait + " ms")
fetchRequest.append("; MinBytes: " + minBytes + " bytes")
if(details)
fetchRequest.append("; RequestInfo: " + requestInfo.mkString(","))
fetchRequest.toString()
}
}
@nonthreadsafe
class FetchRequestBuilder() {
private val correlationId = new AtomicInteger(0)
private val versionId = FetchRequest.CurrentVersion
private var clientId = ConsumerConfig.DefaultClientId
private var replicaId = Request.OrdinaryConsumerId
private var maxWait = FetchRequest.DefaultMaxWait
private var minBytes = FetchRequest.DefaultMinBytes
private val requestMap = new collection.mutable.HashMap[TopicAndPartition, PartitionFetchInfo]
def addFetch(topic: String, partition: Int, offset: Long, fetchSize: Int) = {
requestMap.put(TopicAndPartition(topic, partition), PartitionFetchInfo(offset, fetchSize))
this
}
def clientId(clientId: String): FetchRequestBuilder = {
this.clientId = clientId
this
}
/**
* Only for internal use. Clients shouldn't set replicaId.
*/
private[kafka] def replicaId(replicaId: Int): FetchRequestBuilder = {
this.replicaId = replicaId
this
}
def maxWait(maxWait: Int): FetchRequestBuilder = {
this.maxWait = maxWait
this
}
def minBytes(minBytes: Int): FetchRequestBuilder = {
this.minBytes = minBytes
this
}
def build() = {
val fetchRequest = FetchRequest(versionId, correlationId.getAndIncrement, clientId, replicaId, maxWait, minBytes, requestMap.toMap)
requestMap.clear()
fetchRequest
}
}
|
unix1986/universe
|
tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/api/FetchRequest.scala
|
Scala
|
bsd-2-clause
| 7,653
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.planner.logical
import org.neo4j.cypher.internal.frontend.v2_3.ast.{Identifier, LabelName}
import org.neo4j.cypher.internal.compiler.v2_3.pipes.LazyLabel
import org.neo4j.cypher.internal.compiler.v2_3.planner.LogicalPlanningTestSupport2
import org.neo4j.cypher.internal.compiler.v2_3.planner.logical.plans._
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
class UnionPlanningIntegrationTest extends CypherFunSuite with LogicalPlanningTestSupport2 {
test("MATCH (a:A) RETURN a AS a UNION ALL MATCH (a:B) RETURN a AS a") {
val setup = new given {
knownLabels = Set("A", "B")
}
implicit val (logicalPlan, semanticTable) = setup.getLogicalPlanFor("MATCH (a:A) RETURN a AS a UNION ALL MATCH (a:B) RETURN a AS a")
logicalPlan should equal(
ProduceResult(Seq("a"),
Union(
Projection(
NodeByLabelScan(" a@7", LazyLabel(LabelName("A") _), Set.empty)(solved),
Map("a" -> Identifier(" a@7") _)
)(solved),
Projection(
NodeByLabelScan(" a@43", LazyLabel(LabelName("B") _), Set.empty)(solved),
Map("a" -> Identifier(" a@43") _)
)(solved)
)(solved)
)
)
}
test("MATCH (a:A) RETURN a AS a UNION MATCH (a:B) RETURN a AS a") {
val setup = new given {
knownLabels = Set("A", "B")
}
implicit val (logicalPlan, semanticTable) = setup.getLogicalPlanFor("MATCH (a:A) RETURN a AS a UNION MATCH (a:B) RETURN a AS a")
logicalPlan should equal(
ProduceResult(Seq("a"),
Aggregation(
left = Union(
Projection(
NodeByLabelScan(" a@7", LazyLabel(LabelName("A") _), Set.empty)(solved),
Map("a" -> Identifier(" a@7") _)
)(solved),
Projection(
NodeByLabelScan(" a@39", LazyLabel(LabelName("B") _), Set.empty)(solved),
Map("a" -> Identifier(" a@39") _)
)(solved)
)(solved),
groupingExpressions = Map("a" -> ident("a")),
aggregationExpression = Map.empty
)(solved)
)
)
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/planner/logical/UnionPlanningIntegrationTest.scala
|
Scala
|
apache-2.0
| 2,971
|
package net.kwas.impatient.ch1
object Exercises extends App {
import math.sqrt
import math.pow
import math.BigInt
// --- Exercise 1
// Lots of stuff.
// See the Int Scaladoc.
// --- Exercise 2
// Rounding error
val notReallyThree = pow(sqrt(3),2)
println(s"Not Quite Three: ${notReallyThree}")
// --- Exercise 3
// val
// --- Exercise 4
// It repeats the string 4 times.
// See the StringOpts Scaladoc.
println("crazy" * 3)
// --- Exercise 5
// It checks if 10 is greater than 2.
// Check the Int Scaladoc.
// --- Exercise 6
val normalExponent = pow(2, 1024)
println(s"Normal Exponent: ${normalExponent}")
val bigExponent = BigInt(2) pow 1024
println(s"Big Exponent: ${bigExponent}")
// --- Exercise 7
import math.BigInt.probablePrime
import util.Random
val myPrime = probablePrime(100, Random)
println(s"My Prime: ${myPrime}")
// --- Exercise 8
val randomString = BigInt(50, Random) toString 36
println(s"Random String: ${randomString}")
// --- Exercise 9 & 10
// The functions are fun wrappers that emulate various usecases for
// the substring method.
// You don't have to worry about indicies like you do with substring.
// You can just say "gimme the last/first x chars" and go.
val myString = "gahnachies"
val firstChar = myString(0)
val otherFirstChar = myString take 1
val lastChar = myString(myString.length - 1)
val otherLastChar = myString takeRight 1
println(s"My String: ${myString}")
println(s"First Char: ${firstChar} and ${otherFirstChar}")
println(s"Last Char: ${lastChar} and ${otherLastChar}")
}
|
dkwasny/ScalaImpatient
|
src/main/scala/net/kwas/impatient/ch1/Exercises.scala
|
Scala
|
mit
| 1,637
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history
import java.util.concurrent.atomic.AtomicBoolean
import scala.util.matching.Regex
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.History._
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1
import org.apache.spark.util.kvstore.KVStore
private[spark] class HistoryAppStatusStore(
conf: SparkConf,
store: KVStore)
extends AppStatusStore(store, None) with Logging {
import HistoryAppStatusStore._
private val logUrlPattern: Option[String] = {
val appInfo = super.applicationInfo()
val applicationCompleted = appInfo.attempts.nonEmpty && appInfo.attempts.head.completed
if (applicationCompleted || conf.get(APPLY_CUSTOM_EXECUTOR_LOG_URL_TO_INCOMPLETE_APP)) {
conf.get(CUSTOM_EXECUTOR_LOG_URL)
} else {
None
}
}
private val informedForMissingAttributes = new AtomicBoolean(false)
override def executorList(activeOnly: Boolean): Seq[v1.ExecutorSummary] = {
val execList = super.executorList(activeOnly)
logUrlPattern match {
case Some(pattern) => execList.map(replaceLogUrls(_, pattern))
case None => execList
}
}
override def executorSummary(executorId: String): v1.ExecutorSummary = {
val execSummary = super.executorSummary(executorId)
logUrlPattern match {
case Some(pattern) => replaceLogUrls(execSummary, pattern)
case None => execSummary
}
}
private def replaceLogUrls(exec: v1.ExecutorSummary, urlPattern: String): v1.ExecutorSummary = {
val attributes = exec.attributes
// Relation between pattern {{FILE_NAME}} and attribute {{LOG_FILES}}
// Given that HistoryAppStatusStore don't know which types of log files can be provided
// from resource manager, we require resource manager to provide available types of log
// files, which are encouraged to be same as types of log files provided in original log URLs.
// Once we get the list of log files, we need to expose them to end users as a pattern
// so that end users can compose custom log URL(s) including log file name(s).
val allPatterns = CUSTOM_URL_PATTERN_REGEX.findAllMatchIn(urlPattern).map(_.group(1)).toSet
val allPatternsExceptFileName = allPatterns.filter(_ != "FILE_NAME")
val allAttributeKeys = attributes.keySet
val allAttributeKeysExceptLogFiles = allAttributeKeys.filter(_ != "LOG_FILES")
if (allPatternsExceptFileName.diff(allAttributeKeysExceptLogFiles).nonEmpty) {
logFailToRenewLogUrls("some of required attributes are missing in app's event log.",
allPatternsExceptFileName, allAttributeKeys)
return exec
} else if (allPatterns.contains("FILE_NAME") && !allAttributeKeys.contains("LOG_FILES")) {
logFailToRenewLogUrls("'FILE_NAME' parameter is provided, but file information is " +
"missing in app's event log.", allPatternsExceptFileName, allAttributeKeys)
return exec
}
val updatedUrl = allPatternsExceptFileName.foldLeft(urlPattern) { case (orig, patt) =>
// we already checked the existence of attribute when comparing keys
orig.replace(s"{{$patt}}", attributes(patt))
}
val newLogUrlMap = if (allPatterns.contains("FILE_NAME")) {
// allAttributeKeys should contain "LOG_FILES"
attributes("LOG_FILES").split(",").map { file =>
file -> updatedUrl.replace("{{FILE_NAME}}", file)
}.toMap
} else {
Map("log" -> updatedUrl)
}
replaceExecutorLogs(exec, newLogUrlMap)
}
private def logFailToRenewLogUrls(
reason: String,
allPatterns: Set[String],
allAttributes: Set[String]): Unit = {
if (informedForMissingAttributes.compareAndSet(false, true)) {
logInfo(s"Fail to renew executor log urls: $reason. Required: $allPatterns / " +
s"available: $allAttributes. Falling back to show app's original log urls.")
}
}
private def replaceExecutorLogs(
source: v1.ExecutorSummary,
newExecutorLogs: Map[String, String]): v1.ExecutorSummary = {
new v1.ExecutorSummary(source.id, source.hostPort, source.isActive, source.rddBlocks,
source.memoryUsed, source.diskUsed, source.totalCores, source.maxTasks, source.activeTasks,
source.failedTasks, source.completedTasks, source.totalTasks, source.totalDuration,
source.totalGCTime, source.totalInputBytes, source.totalShuffleRead,
source.totalShuffleWrite, source.isBlacklisted, source.maxMemory, source.addTime,
source.removeTime, source.removeReason, newExecutorLogs, source.memoryMetrics,
source.blacklistedInStages, source.peakMemoryMetrics, source.attributes)
}
}
private[spark] object HistoryAppStatusStore {
val CUSTOM_URL_PATTERN_REGEX: Regex = "\\\\{\\\\{([A-Za-z0-9_\\\\-]+)\\\\}\\\\}".r
}
|
WindCanDie/spark
|
core/src/main/scala/org/apache/spark/deploy/history/HistoryAppStatusStore.scala
|
Scala
|
apache-2.0
| 5,635
|
package lila.report
import akka.actor._
import com.typesafe.config.Config
import lila.common.PimpedConfig._
final class Env(
config: Config,
db: lila.db.Env,
isOnline: lila.user.User.ID => Boolean,
noteApi: lila.user.NoteApi,
system: ActorSystem,
hub: lila.hub.Env) {
private val CollectionReport = config getString "collection.report"
private val ActorName = config getString "actor.name"
lazy val forms = new DataForm(hub.actor.captcher)
lazy val api = new ReportApi(reportColl, noteApi, isOnline)
// api actor
system.actorOf(Props(new Actor {
def receive = {
case lila.hub.actorApi.report.Cheater(userId, text) =>
api.autoCheatReport(userId, text)
case lila.hub.actorApi.report.Clean(userId) =>
api.clean(userId)
case lila.hub.actorApi.report.Check(userId) =>
api.autoProcess(userId)
case lila.hub.actorApi.report.MarkCheater(userId, by) =>
api.processEngine(userId, by)
case lila.hub.actorApi.report.MarkTroll(userId, by) =>
api.processTroll(userId, by)
case lila.hub.actorApi.report.Shutup(userId, text) =>
api.autoInsultReport(userId, text)
case lila.hub.actorApi.report.Booster(userId, accomplice) =>
api.autoBoostReport(userId, accomplice)
}
}), name = ActorName)
lazy val reportColl = db(CollectionReport)
}
object Env {
lazy val current = "report" boot new Env(
config = lila.common.PlayApp loadConfig "report",
db = lila.db.Env.current,
isOnline = lila.user.Env.current.isOnline,
noteApi = lila.user.Env.current.noteApi,
system = lila.common.PlayApp.system,
hub = lila.hub.Env.current)
}
|
clarkerubber/lila
|
modules/report/src/main/Env.scala
|
Scala
|
agpl-3.0
| 1,681
|
package mathParser.algebra.compile
import mathParser.SomeFunctions.someFunctions
import mathParser.algebra.{SpireBinaryOperator, SpireLanguage, SpireUnitaryOperator}
import mathParser.implicits._
import mathParser.{Compiler, LiteralParser, MathParser}
import org.scalatest.{Assertion, FunSuite, Matchers}
import spire.algebra.{Field, NRoot, Trig}
import mathParser.algebra.compile.SpireCompiler.compilerDouble1
import mathParser.algebra.compile.SpireCompiler.compilerComplex1
import scala.util.Try
class CompileSpec extends FunSuite with Matchers {
case object X
type V = X.type
testTemplate(MathParser.doubleLanguage, "double language")
testTemplate(MathParser.complexLanguage, "complex language")
def functionEquality[A: Field](f1: A => A, f2: A => A): Assertion = {
for (x <- Seq(1.0, 2.0, 0.0, Math.PI, Math.E, -1.0, -2.0, 1000.0, -1000.0).map(Field[A].fromDouble)) {
Try(f1(x)) equals Try(f2(x))
}
succeed
}
def testTemplate[A: Field : Trig : NRoot : LiteralParser](_lang: SpireLanguage[A, Nothing], langName: String)
(implicit compile: Compiler[SpireUnitaryOperator, SpireBinaryOperator, A, V, A => A]) = {
val lang = _lang.withVariables(List("x" -> X))
for (term <- someFunctions)
test(s"$langName: compile $term") {
val ast = lang.parse(term).get
val compiled: A => A = lang.compile[A => A](ast).get
functionEquality[A](compiled, x => lang.evaluate(ast)({ case X => x }))
}
}
}
|
gregor-i/mathParser
|
math-parser-compile-jvm/src/test/scala/mathParser/algebra/compile/CompileSpec.scala
|
Scala
|
mit
| 1,530
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.examples
import java.io.File
import org.apache.commons.io.FileUtils
import org.apache.spark.sql.SparkSession
import org.apache.spark.util.{CleanFiles, ShowSegments}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
object SparkSessionExample {
def main(args: Array[String]): Unit = {
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/examples/spark2/target/store"
val warehouse = s"$rootPath/examples/spark2/target/warehouse"
val metastoredb = s"$rootPath/examples/spark2/target/metastore_db"
// clean data folder
if (true) {
val clean = (path: String) => FileUtils.deleteDirectory(new File(path))
clean(storeLocation)
clean(warehouse)
clean(metastoredb)
}
val spark = SparkSession
.builder()
.master("local")
.appName("SparkSessionExample")
.enableHiveSupport()
.config("spark.sql.warehouse.dir", warehouse)
.config("javax.jdo.option.ConnectionURL",
s"jdbc:derby:;databaseName=$metastoredb;create=true")
.getOrCreate()
CarbonProperties.getInstance()
.addProperty("carbon.storelocation", storeLocation)
spark.sparkContext.setLogLevel("WARN")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
.addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
// Create table
spark.sql(
s"""
| CREATE TABLE carbon_table(
| shortField short,
| intField int,
| bigintField long,
| doubleField double,
| stringField string,
| timestampField timestamp,
| decimalField decimal(18,2),
| dateField date,
| charField char(5)
| )
| USING org.apache.spark.sql.CarbonSource
| OPTIONS('DICTIONARY_INCLUDE'='dateField, charField',
| 'dbName'='default', 'tableName'='carbon_table')
""".stripMargin)
// val prop = s"$rootPath/conf/dataload.properties.template"
// val tableName = "carbon_table"
val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
// TableLoader.main(Array[String](prop, tableName, path))
spark.sql(
s"""
| CREATE TABLE csv_table
| ( shortField short,
| intField int,
| bigintField long,
| doubleField double,
| stringField string,
| timestampField string,
| decimalField decimal(18,2),
| dateField string,
| charField char(5))
| ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
""".stripMargin)
spark.sql(
s"""
| LOAD DATA LOCAL INPATH '$path'
| INTO TABLE csv_table
""".stripMargin)
spark.sql("""
SELECT *
FROM csv_table
""").show
spark.sql(
s"""
| INSERT INTO TABLE carbon_table
| SELECT shortField, intField, bigintField, doubleField, stringField,
| from_unixtime(unix_timestamp(timestampField,'yyyy/MM/dd HH:mm:ss')) timestampField,
| decimalField,from_unixtime(unix_timestamp(dateField,'yyyy/MM/dd')), charField
| FROM csv_table
""".stripMargin)
spark.sql("""
SELECT *
FROM carbon_table
where stringfield = 'spark' and decimalField > 40
""").show
// Shows with raw data's timestamp format
spark.sql("""
SELECT
stringField, date_format(timestampField, "yyyy/MM/dd HH:mm:ss") as timestampField
FROM carbon_table where length(stringField) = 5
""").show
spark.sql("""
SELECT *
FROM carbon_table where date_format(dateField, "yyyy-MM-dd") = "2015-07-23"
""").show
spark.sql("""
select count(stringField) from carbon_table
""".stripMargin).show
spark.sql("""
SELECT sum(intField), stringField
FROM carbon_table
GROUP BY stringField
""").show
spark.sql(
"""
|select t1.*, t2.*
|from carbon_table t1, carbon_table t2
|where t1.stringField = t2.stringField
""".stripMargin).show
spark.sql(
"""
|with t1 as (
|select * from carbon_table
|union all
|select * from carbon_table
|)
|select t1.*, t2.*
|from t1, carbon_table t2
|where t1.stringField = t2.stringField
""".stripMargin).show
// Drop table
spark.sql("DROP TABLE IF EXISTS carbon_table")
spark.sql("DROP TABLE IF EXISTS csv_table")
spark.stop()
}
}
|
Sephiroth-Lin/incubator-carbondata
|
examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
|
Scala
|
apache-2.0
| 5,710
|
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2009 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common
* Development and Distribution License("CDDL") (collectively, the
* "License"). You may not use this file except in compliance with the
* License. You can obtain a copy of the License at
* http://www.netbeans.org/cddl-gplv2.html
* or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
* specific language governing permissions and limitations under the
* License. When distributing the software, include this License Header
* Notice in each file and include the License file at
* nbbuild/licenses/CDDL-GPL-2-CP. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the
* License Header, with the fields enclosed by brackets [] replaced by
* your own identifying information:
* "Portions Copyrighted [year] [name of copyright owner]"
*
* If you wish your version of this file to be governed by only the CDDL
* or only the GPL Version 2, indicate your decision by adding
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license." If you do not indicate a
* single choice of license, a recipient has the option to distribute
* your version of this file under either the CDDL, the GPL Version 2 or
* to extend the choice of license to its licensees as provided above.
* However, if you add GPL Version 2 code and therefore, elected the GPL
* Version 2 license, then the option applies only if the new code is
* made subject to such option by the copyright holder.
*
* Contributor(s):
*
* Portions Copyrighted 2009 Sun Microsystems, Inc.
*/
package org.netbeans.api.language.util.lex
import java.io.IOException
import javax.swing.text.{BadLocationException, Document}
import org.netbeans.modules.csl.api.OffsetRange
import org.netbeans.api.lexer.{Language, Token, TokenHierarchy, TokenId, TokenSequence}
import org.netbeans.editor.{BaseDocument, Utilities}
import org.netbeans.modules.parsing.spi.Parser
import org.openide.cookies.EditorCookie
import org.openide.filesystems.{FileObject, FileUtil}
import org.openide.loaders.{DataObject, DataObjectNotFoundException}
import org.openide.util.Exceptions
import scala.collection.mutable.{Stack}
/**
*
* @author Caoyuan Deng
*/
trait LexUtil {
val LANGUAGE: Language[TokenId]
val WS_COMMENTS: Set[TokenId]
val WS: Set[TokenId]
val DOC_COMMENTS: Set[TokenId]
val BLOCK_COMMENTS: Set[TokenId]
val LINE_COMMENTS: Set[TokenId]
/**
* Tokens that should cause indentation of the next line. This is true for all {@link #END_PAIRS},
* but also includes tokens like "else" that are not themselves matched with end but also contribute
* structure for indentation.
*
*/
val INDENT_WORDS: Set[TokenId]
/** Tokens that match a corresponding END statement. Even though while, unless etc.
* can be statement modifiers, those luckily have different token ids so are not a problem
* here.
*/
val END_PAIRS: Set[TokenId] = Set[TokenId]()
val WHITE_SPACE: TokenId
val NEW_LINE: TokenId
val LPAREN: TokenId
val RPAREN: TokenId
def getDocCommentRangeBefore(th: TokenHierarchy[_], lexOffset: Int): OffsetRange
/**
* Return the comment sequence (if any) for the comment prior to the given offset.
*/
// def TokenSequence<? extends FortressCommentTokenId> getCommentFor(doc:BaseDocument, offset:Int) {
// TokenSequence<?extends ScalaTokenId> jts = getTokenSequence(doc, offset);
// if (jts == null) {
// return null;
// }
// jts.move(offset);
//
// while (jts.movePrevious()) {
// id:TokenId = jts.token().id();
// if (id == ScalaTokenId.BLOCK_COMMENT) {
// return jts.embedded(FortressCommentTokenId.language());
// } else if (id != ScalaTokenId.WHITESPACE && id != ScalaTokenId.EOL) {
// return null;
// }
// }
//
// return null;
// }
/** For a possibly generated offset in an AST, return the corresponding lexing/true document offset */
def getLexerOffset(info: Parser.Result, astOffset: Int): Int = {
if (info != null) {
info.getSnapshot.getOriginalOffset(astOffset)
} else {
astOffset
}
}
def getLexerOffsets(info: Parser.Result, astRange: OffsetRange): OffsetRange = {
if (info != null) {
val rangeStart = astRange.getStart
val start = info.getSnapshot.getOriginalOffset(rangeStart)
if (start == rangeStart) {
astRange
} else if (start == -1) {
OffsetRange.NONE
} else {
// Assumes the translated range maintains size
new OffsetRange(start, start + astRange.getLength)
}
} else {
astRange
}
}
def getAstOffset(pResult: Parser.Result, lexOffset: Int): Int = {
if (pResult != null) {
pResult.getSnapshot.getEmbeddedOffset(lexOffset)
} else lexOffset
}
def getAstOffsets(pResult: Parser.Result, lexicalRange: OffsetRange): OffsetRange = {
if (pResult != null) {
val rangeStart = lexicalRange.getStart
pResult.getSnapshot.getEmbeddedOffset(rangeStart) match {
case `rangeStart` => lexicalRange
case -1 => OffsetRange.NONE
case start =>
// Assumes the translated range maintains size
new OffsetRange(start, start + lexicalRange.getLength)
}
} else lexicalRange
}
/** Find the token hierarchy (in case it's embedded in something else at the top level */
final def getTokenHierarchy(doc: BaseDocument, offset: Int): Option[TokenHierarchy[_]] = {
TokenHierarchy.get(doc) match {
case null => None
case x => Some(x)
}
}
/** Find the token sequence (in case it's embedded in something else at the top level */
final def getTokenSequence(doc: BaseDocument, offset: Int): Option[TokenSequence[TokenId]] = {
val th = TokenHierarchy.get(doc)
getTokenSequence(th, offset)
}
final def getTokenSequence(th: TokenHierarchy[_], offset: Int): Option[TokenSequence[TokenId]] = {
var ts = th.tokenSequence(LANGUAGE)
if (ts == null) {
// Possibly an embedding scenario such as an RHTML file
// First try with backward bias true
var list = th.embeddedTokenSequences(offset, true)
var itr = list.iterator
var break = false
while (itr.hasNext && !break) {
val t = itr.next
if (t.language == LANGUAGE) {
ts = t.asInstanceOf[TokenSequence[TokenId]]
break = true
}
}
if (ts == null) {
list = th.embeddedTokenSequences(offset, false)
itr = list.iterator
break = false
while (itr.hasNext && !break) {
val t = itr.next
if (t.language == LANGUAGE) {
ts = t.asInstanceOf[TokenSequence[TokenId]]
break = true
}
}
}
}
if (ts != null) Some(ts) else None
}
def getPositionedSequence(doc: BaseDocument, offset: Int): Option[TokenSequence[TokenId]] = {
getPositionedSequence(doc, offset, true)
}
def getPositionedSequence(doc: BaseDocument, offset: Int, lookBack: Boolean): Option[TokenSequence[TokenId]] = {
getTokenSequence(doc, offset) match {
case Some(ts) =>
try {
ts.move(offset)
} catch {
case ex: AssertionError => doc.getProperty(Document.StreamDescriptionProperty) match {
case dobj: DataObject => Exceptions.attachMessage(ex, FileUtil.getFileDisplayName(dobj.getPrimaryFile))
case _ =>
}
throw ex
}
if (!lookBack && !ts.moveNext || lookBack && !ts.moveNext && !ts.movePrevious) {
None
} else Some(ts)
case None => None
}
}
def getToken(doc: BaseDocument, offset: Int): Option[Token[TokenId]] = {
getPositionedSequence(doc, offset) match {
case Some(x) => x.token match {
case null => None
case token => Some(token)
}
case None => None
}
}
def getTokenId(doc: BaseDocument, offset: Int): Option[TokenId] = {
getToken(doc, offset).map{_.id}
}
def getTokenChar(doc: BaseDocument, offset: Int): Char = {
getToken(doc, offset) match {
case Some(x) =>
val text = x.text.toString
if (text.length > 0) { // Usually true, but I could have gotten EOF right?
text.charAt(0)
} else 0
case None => 0
}
}
def moveTo(ts: TokenSequence[TokenId], th: TokenHierarchy[_], token: Token[TokenId]) {
val offset = token.offset(th)
ts.move(offset)
ts.moveNext
}
final def findNextNoWsNoComment(ts: TokenSequence[TokenId]): Option[Token[TokenId]] = {
findNextNotIn(ts, WS_COMMENTS)
}
final def findPreviousNoWsNoComment(ts: TokenSequence[TokenId]): Option[Token[TokenId]] = {
findPreviousNotIn(ts, WS_COMMENTS)
}
final def findNextNoWs(ts: TokenSequence[TokenId]): Option[Token[TokenId]] = {
findNextNotIn(ts, WS)
}
final def findPreviousNoWs(ts: TokenSequence[TokenId]): Option[Token[TokenId]] = {
findPreviousNotIn(ts, WS)
}
final def findNextNotIn(ts: TokenSequence[TokenId], excludes: Set[TokenId]): Option[Token[TokenId]] = {
if (excludes.contains(ts.token.id)) {
while (ts.moveNext && excludes.contains(ts.token.id)) {}
}
val token = ts.token
if (token == null) None else Some(token)
}
final def findPreviousNotIn(ts:TokenSequence[TokenId], excludes:Set[TokenId]): Option[Token[TokenId]] = {
if (excludes.contains(ts.token.id)) {
while (ts.movePrevious && excludes.contains(ts.token.id)) {}
}
val token = ts.token
if (token == null) None else Some(token)
}
final def findNext(ts: TokenSequence[TokenId], id: TokenId): Option[Token[TokenId]] = {
if (ts.token.id != id) {
while (ts.moveNext && ts.token.id != id) {}
}
val token = ts.token
if (token == null) None else Some(token)
}
final def findNextIn(ts: TokenSequence[TokenId], includes: Set[TokenId]): Option[Token[TokenId]] = {
if (!includes.contains(ts.token.id)) {
while (ts.moveNext && !includes.contains(ts.token.id)) {}
}
val token = ts.token
if (token == null) None else Some(token)
}
final def findPrevious(ts: TokenSequence[TokenId], id: TokenId): Option[Token[TokenId]] = {
if (ts.token.id != id) {
while (ts.movePrevious && ts.token.id != id) {}
}
val token = ts.token
if (token == null) None else Some(token)
}
def findNextIncluding(ts: TokenSequence[TokenId], includes: Set[TokenId]): Option[Token[TokenId]] = {
while (ts.moveNext && !includes.contains(ts.token.id)) {}
val token = ts.token
if (token == null) None else Some(token)
}
final def findPreviousIn(ts: TokenSequence[TokenId], includes: Set[TokenId]): Option[Token[TokenId]] = {
if (!includes.contains(ts.token.id)) {
while (ts.movePrevious && !includes.contains(ts.token.id)) {}
}
val token = ts.token
if (token == null) None else Some(token)
}
def skipParenthesis(ts: TokenSequence[TokenId]): Boolean = {
skipParenthesis(ts, false)
}
/**
* Tries to skip parenthesis
*/
def skipParenthesis(ts: TokenSequence[TokenId], back: Boolean, left: TokenId = LPAREN, right: TokenId = RPAREN): Boolean = {
var balance = 0
var token = ts.token
if (token == null) {
return false
}
var id = token.id
// skip whitespace and comment
if (isWsComment(id)) {
while ((if (back) ts.movePrevious else ts.moveNext) && isWsComment(id)) {}
}
// if current token is not parenthesis
if (ts.token.id != (if (back) RPAREN else LPAREN)) {
return false
}
do {
token = ts.token
id = token.id
if (id == (if (back) RPAREN else LPAREN)) {
balance += 1
} else if (id == (if (back) LPAREN else RPAREN)) {
if (balance == 0) {
return false
} else if (balance == 1) {
if (back) {
ts.movePrevious
} else {
ts.moveNext
}
return true
}
balance -= 1
}
} while (if (back) ts.movePrevious else ts.moveNext)
false
}
/**
* Tries to skip pair, ts will be put at the found `left` token
*/
def skipPair(ts: TokenSequence[TokenId], back: Boolean, left: TokenId, right: TokenId): Boolean = {
var balance = 0
var token = ts.token
if (token == null) {
return false
}
// * skip whitespace and comment
var id = token.id
if (isWsComment(id)) {
while ((if (back) ts.movePrevious else ts.moveNext) && isWsComment(id)) {}
}
// * if current token is not of pair
if (ts.token.id != (if (back) right else left)) {
return false
}
do {
token = ts.token
id = token.id
if (id == (if (back) right else left)) {
balance += 1
} else if (id == (if (back) left else right)) {
if (balance == 0) {
return false
} else if (balance == 1) {
if (back) {
ts.movePrevious
} else {
ts.moveNext
}
return true
}
balance -= 1
}
} while (if (back) ts.movePrevious else ts.moveNext)
false
}
/** Search forwards in the token sequence until a token of type <code>down</code> is found */
def findPairFwd(ts: TokenSequence[TokenId], up: TokenId, down: TokenId): Option[Token[_]] = {
var balance = 0
while (ts.moveNext) {
val token = ts.token
val id = token.id
if (id == up) {
balance += 1
} else if (id == down) {
if (balance == 0) {
return Some(token)
}
balance -= 1
}
}
None
}
/** Search backwards in the token sequence until a token of type <code>up</code> is found */
def findPairBwd(ts: TokenSequence[TokenId], up: TokenId, down: TokenId): Option[Token[_]] = {
var balance = 0
while (ts.movePrevious) {
val token = ts.token
val id = token.id
if (id == up) {
if (balance == 0) {
return Some(token)
}
balance += 1
} else if (id == down) {
balance -= 1
}
}
None
}
/** Search forwards in the token sequence until a token of type <code>down</code> is found */
def findFwd(ts: TokenSequence[TokenId], up: TokenId, down: TokenId): OffsetRange = {
var balance = 0
while (ts.moveNext) {
val token = ts.token
val id = token.id
if (id == up) {
balance += 1
} else if (id == down) {
if (balance == 0) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
balance -= 1
}
}
OffsetRange.NONE
}
/** Search backwards in the token sequence until a token of type <code>up</code> is found */
def findBwd(ts: TokenSequence[TokenId], up: TokenId, down: TokenId): OffsetRange = {
var balance = 0
while (ts.movePrevious) {
val token = ts.token
val id = token.id
if (id == up) {
if (balance == 0) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
balance += 1
} else if (id == down) {
balance -= 1
}
}
OffsetRange.NONE
}
/** Search forwards in the token sequence until a token of type <code>down</code> is found */
def findFwd(ts: TokenSequence[TokenId], up: String, down: String): OffsetRange = {
var balance = 0
while (ts.moveNext) {
val token = ts.token
val id = token.id
val text = token.text.toString
if (text.equals(up)) {
balance += 1
} else if (text.equals(down)) {
if (balance == 0) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
balance -= 1
}
}
OffsetRange.NONE
}
/** Search backwards in the token sequence until a token of type <code>up</code> is found */
def findBwd(ts: TokenSequence[TokenId], up: String, down: String): OffsetRange = {
var balance = 0
while (ts.movePrevious) {
val token = ts.token
val id = token.id
val text = token.text.toString
if (text.equals(up)) {
if (balance == 0) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
balance += 1
} else if (text.equals(down)) {
balance -= 1
}
}
OffsetRange.NONE
}
/** Find the token that begins a block terminated by "end". This is a token
* in the END_PAIRS array. Walk backwards and find the corresponding token.
* It does not use indentation for clues since this could be wrong and be
* precisely the reason why the user is using pair matching to see what's wrong.
*/
def findBegin(ts: TokenSequence[TokenId]): OffsetRange = {
var balance = 0
while (ts.movePrevious) {
val token = ts.token
val text = token.text.toString
if (isBegin(token.id)) {
// No matching dot for "do" used in conditionals etc.)) {
if (balance == 0) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
balance -= 1
} else if (isEnd(token.id)) {
balance += 1
}
}
OffsetRange.NONE
}
def findEnd(ts: TokenSequence[TokenId]): OffsetRange = {
var balance = 0
while (ts.moveNext) {
val token = ts.token
val text = token.text.toString
if (isBegin(token.id)) {
balance -= 1
} else if (isEnd(token.id)) {
if (balance == 0) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
balance += 1
}
}
OffsetRange.NONE
}
/** Determine whether "do" is an indent-token (e.g. matches an end) or if
* it's simply a separator in while,until,for expressions)
*/
def isEndmatchingDo(doc: BaseDocument, offset: Int): Boolean = {
// In the following case, do is dominant:
// expression.do
// whatever
// end
//
// However, not here:
// while true do
// whatever
// end
//
// In the second case, the end matches the while, but in the first case
// the end matches the do
// Look at the first token of the current line
try {
val first = Utilities.getRowFirstNonWhite(doc, offset)
if (first != -1) {
getToken(doc, first) match {
case Some(x) =>
val text = x.text.toString
if (text.equals("while") || text.equals("for")) {
return false
}
case None => return true
}
}
} catch {case ble: BadLocationException => Exceptions.printStackTrace(ble)}
true
}
/** Compute the balance of begin/end tokens on the line.
* @param doc the document
* @param offset The offset somewhere on the line
* @param upToOffset If true, only compute the line balance up to the given offset (inclusive),
* and if false compute the balance for the whole line
*/
def getBeginEndLineBalance(doc: BaseDocument, offset: Int, upToOffset: Boolean): Int = {
try {
val begin = Utilities.getRowStart(doc, offset);
val end = if (upToOffset) offset else Utilities.getRowEnd(doc, offset)
val ts = getTokenSequence(doc, begin).getOrElse(return 0)
ts.move(begin)
if (!ts.moveNext) {
return 0
}
var balance = 0
do {
val token = ts.token
val text = token.text.toString
if (isBegin(token.id)) {
balance += 1
} else if (isEnd(token.id)) {
balance -= 1
}
} while (ts.moveNext && ts.offset <= end)
balance
} catch {
case ble: BadLocationException => Exceptions.printStackTrace(ble); 0
}
}
/** Compute the balance of up/down tokens on the line */
def getLineBalance(doc: BaseDocument, offset: Int, up: TokenId, down: TokenId): Stack[Token[TokenId]] = {
val balanceStack = new Stack[Token[TokenId]]
try {
val begin = Utilities.getRowStart(doc, offset)
val end = Utilities.getRowEnd(doc, offset)
val ts = getTokenSequence(doc, begin).getOrElse(return balanceStack)
ts.move(begin)
if (!ts.moveNext) {
return balanceStack
}
var balance = 0
do {
val token = ts.offsetToken
val id = token.id
if (id == up) {
balanceStack.push(token)
balance += 1
} else if (id == down) {
if (!balanceStack.isEmpty) {
balanceStack.pop
}
balance -= 1
}
} while (ts.moveNext && ts.offset <= end)
balanceStack
} catch {
case ble: BadLocationException => Exceptions.printStackTrace(ble); balanceStack
}
}
/**
* The same as braceBalance but generalized to any pair of matching
* tokens.
* @param open the token that increses the count
* @param close the token that decreses the count
*/
@throws(classOf[BadLocationException])
def getTokenBalance(doc: BaseDocument, open: TokenId, close: TokenId, offset: Int): Int = {
val ts = getTokenSequence(doc, 0).getOrElse(return 0)
// XXX Why 0? Why not offset?
ts.moveIndex(0)
if (!ts.moveNext) {
return 0
}
var balance = 0
do {
val t = ts.token
if (t.id == open) {
balance += 1
} else if (t.id == close) {
balance -= 1
}
} while (ts.moveNext)
balance
}
/**
* The same as braceBalance but generalized to any pair of matching
* tokens.
* @param open the token that increses the count
* @param close the token that decreses the count
*/
@throws(classOf[BadLocationException])
def getTokenBalance(doc: BaseDocument, open: String, close: String, offset: Int): Int = {
val ts = getTokenSequence(doc, 0).getOrElse(return 0)
// XXX Why 0? Why not offset?
ts.moveIndex(0)
if (!ts.moveNext) {
return 0
}
var balance = 0
do {
val token = ts.token
val text = token.text.toString
if (text.equals(open)) {
balance += 1
} else if (text.equals(text)) {
balance -= 1
}
} while (ts.moveNext)
balance
}
/**
* Return true iff the line for the given offset is a JavaScript comment line.
* This will return false for lines that contain comments (even when the
* offset is within the comment portion) but also contain code.
*/
@throws(classOf[BadLocationException])
def isCommentOnlyLine(doc: BaseDocument, offset: Int): Boolean = {
val begin = Utilities.getRowFirstNonWhite(doc, offset)
if (begin == -1) {
return false // whitespace only
}
getTokenId(doc, begin) match {
case Some(x) if isLineComment(x) => true
case _ => false
}
}
/**
* Return the string at the given position, or null if none
*/
/*_
def getStringAt(caretOffset:Int, th:TokenHierarchy[Document]): String = {
val ts = getTokenSequence(th, caretOffset)
if (ts == null) {
return null
}
ts.move(caretOffset)
if (!ts.moveNext() && !ts.movePrevious) {
return null
}
if (ts.offset == caretOffset) {
// We're looking at the offset to the RIGHT of the caret
// and here I care about what's on the left
ts.movePrevious
}
var token = ts.token
if (token != null) {
var id = token.id
// // We're within a String that has embedded Js. Drop into the
// // embedded language and see if we're within a literal string there.
// if (id == ScalaTokenId.EMBEDDED_RUBY) {
// ts = (TokenSequence)ts.embedded();
// assert ts != null;
// ts.move(caretOffset);
//
// if (!ts.moveNext() && !ts.movePrevious()) {
// return null;
// }
//
// token = ts.token();
// id = token.id();
// }
//
var string:String = null
// Skip over embedded Js segments and literal strings until you find the beginning
var segments = 0
while (id == ScalaTokenId.Error || id == ScalaTokenId.StringLiteral) {
string = token.text.toString
segments += 1
ts.movePrevious
token = ts.token
id = token.id
}
if (id == ScalaTokenId.STRING_BEGIN) {
if (segments == 1) {
return string
} else {
// Build up the String from the sequence
val sb = new StringBuilder
while (ts.moveNext) {
token = ts.token
id = token.id
if (id == ScalaTokenId.Error || id == ScalaTokenId.StringLiteral) {
sb.append(token.text)
} else {
break
}
}
return sb.toString
}
}
}
null
}
*/
// /**
// * Check if the caret is inside a literal string that is associated with
// * a require statement.
// *
// * @return The offset of the beginning of the require string, or -1
// * if the offset is not inside a require string.
// */
// def int getRequireStringOffset(caretOffset:Int, th:TokenHierarchy[Document]) {
// TokenSequence<?extends ScalaTokenId> ts = getTokenSequence(th, caretOffset);
//
// if (ts == null) {
// return -1;
// }
//
// ts.move(caretOffset);
//
// if (!ts.moveNext() && !ts.movePrevious()) {
// return -1;
// }
//
// if (ts.offset() == caretOffset) {
// // We're looking at the offset to the RIGHT of the caret
// // and here I care about what's on the left
// ts.movePrevious();
// }
//
// Token<?extends ScalaTokenId> token = ts.token();
//
// if (token != null) {
// id:TokenId = token.id();
//
// // Skip over embedded Js segments and literal strings until you find the beginning
// while ((id == ScalaTokenId.ERROR) || (id == ScalaTokenId.STRING_LITERAL)) {
// ts.movePrevious();
// token = ts.token();
// id = token.id();
// }
//
// int stringStart = ts.offset() + token.length();
//
// if (id == ScalaTokenId.STRING_BEGIN) {
// // Completion of literal strings within require calls
// while (ts.movePrevious()) {
// token = ts.token();
//
// id = token.id();
//
// if ((id == ScalaTokenId.WHITESPACE) || (id == ScalaTokenId.LPAREN) ||
// (id == ScalaTokenId.STRING_LITERAL)) {
// continue;
// }
//
// if (id == ScalaTokenId.IDENTIFIER) {
// String text = token.text().toString();
//
// if (text.equals("require") || text.equals("load")) {
// return stringStart;
// } else {
// return -1;
// }
// } else {
// return -1;
// }
// }
// }
// }
//
// return -1;
// }
//
/*_
def getSingleQuotedStringOffset(caretOffset:Int, th:TokenHierarchy[Document]): Int = {
getLiteralStringOffset(caretOffset, th, ScalaTokenId.STRING_BEGIN)
}
def getRegexpOffset(caretOffset:Int, th:TokenHierarchy[Document]): Int = {
getLiteralStringOffset(caretOffset, th, ScalaTokenId.REGEXP_BEGIN)
}
*/
/**
* Determine if the caret is inside a literal string, and if so, return its starting
* offset. Return -1 otherwise.
*/
/*
private def getLiteralStringOffset(caretOffset:Int, th:TokenHierarchy[Document], begin:ScalaTokenId): Int = {
val ts = getTokenSequence(th, caretOffset)
if (ts == null) {
return -1
}
ts.move(caretOffset)
if (!ts.moveNext && !ts.movePrevious) {
return -1
}
if (ts.offset == caretOffset) {
// We're looking at the offset to the RIGHT of the caret
// and here I care about what's on the left
ts.movePrevious
}
var token = ts.token
if (token != null) {
var id = token.id
// // We're within a String that has embedded Js. Drop into the
// // embedded language and see if we're within a literal string there.
// if (id == ScalaTokenId.EMBEDDED_RUBY) {
// ts = (TokenSequence)ts.embedded();
// assert ts != null;
// ts.move(caretOffset);
//
// if (!ts.moveNext() && !ts.movePrevious()) {
// return -1;
// }
//
// token = ts.token();
// id = token.id();
// }
// Skip over embedded Js segments and literal strings until you find the beginning
while ((id == ScalaTokenId.Error) || (id == ScalaTokenId.StringLiteral) ||
(id == ScalaTokenId.REGEXP_LITERAL)) {
ts.movePrevious
token = ts.token
id = token.id
}
if (id == begin) {
if (!ts.moveNext) {
return -1
}
return ts.offset
}
}
-1
}
*/
/*_
def isInsideRegexp(doc:BaseDocument, offset:Int): Boolean = {
val ts = getTokenSequence(doc, offset)
if (ts == null) {
return false
}
ts.move(offset)
if (ts.moveNext) {
val token = ts.token
val id = token.id
if (id == ScalaTokenId.REGEXP_LITERAL || id == ScalaTokenId.REGEXP_END) {
return true
}
}
if (ts.movePrevious()) {
val token = ts.token
val id = token.id
if (id == ScalaTokenId.REGEXP_LITERAL || id == ScalaTokenId.REGEXP_BEGIN) {
return true
}
}
false
}
*/
def getDocumentationRange(th: TokenHierarchy[_], nodeOffset: Int): OffsetRange = {
val astOffset = nodeOffset
// XXX This is wrong; I should do a
//int lexOffset = LexUtilities.getLexerOffset(result, astOffset);
// but I don't have the CompilationInfo in the ParseResult handed to the indexer!!
val lexOffset = astOffset
getDocCommentRangeBefore(th, lexOffset)
}
/**
* Get the comment block for the given offset. The offset may be either within the comment
* block, or the comment corresponding to a code node, depending on isAfter.
*
* @param doc The document
* @param caretOffset The offset in the document
* @param isAfter If true, the offset is pointing to some code AFTER the code block
* such as a method node. In this case it needs to back up to find the comment.
* @return
*/
def getCommentBlock(doc: BaseDocument, caretOffset: Int, isAfter: Boolean): OffsetRange = {
// Check if the caret is within a comment, and if so insert a new
// leaf "node" which contains the comment line and then comment block
try {
val ts = getTokenSequence(doc, caretOffset).getOrElse(return OffsetRange.NONE)
ts.move(caretOffset)
if (isAfter) {
while (ts.movePrevious) {
val id = ts.token.id
if (isComment(id)) {
return getCommentBlock(doc, ts.offset, false)
} else if (!isWs(id)) {
return OffsetRange.NONE
}
}
return OffsetRange.NONE
}
if (!ts.moveNext && !ts.movePrevious) {
return OffsetRange.NONE
}
val token = ts.token
if (token != null && isBlockComment(token.id)) {
return new OffsetRange(ts.offset, ts.offset + token.length)
}
if (token != null && isLineComment(token.id)) {
// First add a range for the current line
var begin = Utilities.getRowStart(doc, caretOffset)
var end = Utilities.getRowEnd(doc, caretOffset)
if (isCommentOnlyLine(doc, caretOffset)) {
var break = false
while (begin > 0 && !break) {
val newBegin = Utilities.getRowStart(doc, begin - 1)
if (newBegin < 0 || !isCommentOnlyLine(doc, newBegin)) {
begin = Utilities.getRowFirstNonWhite(doc, begin)
break = true
} else {
begin = newBegin
}
}
val length = doc.getLength
break = false
while (!break) {
val newEnd = Utilities.getRowEnd(doc, end + 1)
if (newEnd >= length || !isCommentOnlyLine(doc, newEnd)) {
end = Utilities.getRowLastNonWhite(doc, end) + 1
break = true
} else {
end = newEnd
}
}
if (begin < end) {
return new OffsetRange(begin, end)
}
} else {
// It's just a line comment next to some code
val th = TokenHierarchy.get(doc)
val offset = token.offset(th)
return new OffsetRange(offset, offset + token.length)
}
}
} catch {
case ble: BadLocationException => Exceptions.printStackTrace(ble)
}
OffsetRange.NONE
}
// def boolean isInsideQuotedString(doc:BaseDocument, offset:Int) {
// TokenSequence<?extends ScalaTokenId> ts = FortressLexUtilities.getTokenSequence(doc, offset);
//
// if (ts == null) {
// return false;
// }
//
// ts.move(offset);
//
// if (ts.moveNext()) {
// Token<?extends ScalaTokenId> token = ts.token();
// id:TokenId = token.id();
// if (id == ScalaTokenId.QUOTED_STRING_LITERAL || id == ScalaTokenId.QUOTED_STRING_END) {
// return true;
// }
// }
// if (ts.movePrevious()) {
// Token<?extends ScalaTokenId> token = ts.token();
// id:TokenId = token.id();
// if (id == ScalaTokenId.QUOTED_STRING_LITERAL || id == ScalaTokenId.QUOTED_STRING_BEGIN) {
// return true;
// }
// }
//
// return false;
// }
//
/**
* Back up to the first space character prior to the given offset - as long as
* it's on the same line! If there's only leading whitespace on the line up
* to the lex offset, return the offset itself
* @todo Rewrite this now that I have a separate newline token, EOL, that I can
* break on - no need to call Utilities.getRowStart.
*/
def findSpaceBegin(doc: BaseDocument, lexOffset: Int): Int = {
val ts = getTokenSequence(doc, lexOffset).getOrElse(return lexOffset)
var allowPrevLine = false
var lineStart: Int = 0
try {
lineStart = Utilities.getRowStart(doc, math.min(lexOffset, doc.getLength))
var prevLast = lineStart - 1
if (lineStart > 0) {
prevLast = Utilities.getRowLastNonWhite(doc, lineStart - 1);
if (prevLast != -1) {
val c = doc.getText(prevLast, 1).charAt(0)
if (c == ',') {
// Arglist continuation? // TODO: check lexing
allowPrevLine = true
}
}
}
if (!allowPrevLine) {
val firstNonWhite = Utilities.getRowFirstNonWhite(doc, lineStart)
if (lexOffset <= firstNonWhite || firstNonWhite == -1) {
return lexOffset
}
} else {
// Make lineStart so small that math.max won't cause any problems
val firstNonWhite = Utilities.getRowFirstNonWhite(doc, lineStart)
if (prevLast >= 0 && (lexOffset <= firstNonWhite || firstNonWhite == -1)) {
return prevLast + 1
}
lineStart = 0
}
} catch {
case ble:BadLocationException =>
Exceptions.printStackTrace(ble)
return lexOffset
}
ts.move(lexOffset)
if (ts.moveNext) {
if (lexOffset > ts.offset()) {
// We're in the middle of a token
return math.max(if (ts.token.id == WHITE_SPACE) ts.offset else lexOffset, lineStart)
}
while (ts.movePrevious) {
val token = ts.token
if (token.id != WHITE_SPACE) {
return math.max(ts.offset + token.length, lineStart)
}
}
}
lexOffset
}
/**
* Get the documentation associated with the given node in the given document.
* TODO: handle proper block comments
*/
def gatherDocumentation(info: Parser.Result, baseDoc: BaseDocument, nodeOffset: Int): List[String] = {
var comments: List[String] = Nil
var elementBegin = nodeOffset
if (info != null && info.getSnapshot.getSource.getDocument(true) == baseDoc) {
elementBegin = getLexerOffset(info, elementBegin)
if (elementBegin == -1) {
return Nil
}
}
try {
if (elementBegin >= baseDoc.getLength) {
return Nil
}
// Search to previous lines, locate comments. Once we have a non-whitespace line that isn't
// a comment, we're done
var offset = Utilities.getRowStart(baseDoc, elementBegin)
offset -= 1
// Skip empty and whitespace lines
var break = false
while (offset >= 0 && !break) {
// Find beginning of line
offset = Utilities.getRowStart(baseDoc, offset)
if (!Utilities.isRowEmpty(baseDoc, offset) &&
!Utilities.isRowWhite(baseDoc, offset)) {
break = true
} else {
offset -= 1
}
}
if (offset < 0) {
return Nil
}
break = true
while (offset >= 0 && !break) {
// Find beginning of line
offset = Utilities.getRowStart(baseDoc, offset)
if (Utilities.isRowEmpty(baseDoc, offset) || Utilities.isRowWhite(baseDoc, offset)) {
// Empty lines not allowed within an rdoc
break = true
} else {
// This is a comment line we should include
val lineBegin = Utilities.getRowFirstNonWhite(baseDoc, offset)
val lineEnd = Utilities.getRowLastNonWhite(baseDoc, offset) + 1
val line = baseDoc.getText(lineBegin, lineEnd - lineBegin)
// Tolerate "public", "private" and "protected" here --
// Test::Unit::Assertions likes to put these in front of each
// method.
if (line.startsWith("*")) {
// ignore end of block comment: "*/"
if (line.length == 1 || (line.length > 1 && line.charAt(1) != '/')) {
comments = line.substring(1).trim :: comments
}
// Previous line
offset -= 1
} else {
// No longer in a comment
break = true
}
}
}
} catch {
case ble:BadLocationException => Exceptions.printStackTrace(ble)
}
comments.toList
}
/**
* Return true iff the given token is a token that should be matched
* with a corresponding "end" token, such as "begin", "def", "module",
* etc.
*/
def isBegin(id: TokenId): Boolean = {
END_PAIRS.contains(id)
}
/**
* Return true iff the given token is a token that should be matched
* with a corresponding "end" token, such as "begin", "def", "module",
* etc.
*/
def isEnd(id: TokenId): Boolean = {
END_PAIRS.contains(id)
}
final def isWs(id: TokenId): Boolean = {
WS.contains(id)
}
final def isWsComment(id: TokenId): Boolean = {
WS_COMMENTS.contains(id)
}
final def isComment(id: TokenId): Boolean = {
isLineComment(id) || isBlockComment(id) || isDocComment(id)
}
final def isLineComment(id: TokenId): Boolean = {
LINE_COMMENTS.contains(id)
}
final def isDocComment(id: TokenId): Boolean = {
DOC_COMMENTS.contains(id)
}
final def isBlockComment(id: TokenId): Boolean = {
BLOCK_COMMENTS.contains(id)
}
/**
* Return true iff the given token is a token that indents its content,
* such as the various begin tokens as well as "else", "when", etc.
*/
def isIndent(id: TokenId): Boolean = {
INDENT_WORDS.contains(id)
}
def isKeyword(id: TokenId): Boolean = {
id.primaryCategory.equals("keyword")
}
final def getRangeOfToken(th: TokenHierarchy[_], token: Token[_ <: TokenId]): OffsetRange = {
val offset = token.offset(th)
new OffsetRange(offset, offset + token.length)
}
def getDocument(fo: FileObject, openIfNecessary: Boolean): Option[BaseDocument] = {
try {
val dobj = DataObject.find(fo)
val ec = dobj.getCookie(classOf[EditorCookie])
if (ec != null) {
return (if (openIfNecessary) Some(ec.openDocument) else Some(ec.getDocument)).asInstanceOf[Option[BaseDocument]]
}
} catch {
case ex:DataObjectNotFoundException => Exceptions.printStackTrace(ex)
case ex:IOException => Exceptions.printStackTrace(ex)
}
None
}
}
|
richardfontana/fontana2007-t
|
ScalaEditorLite/src/org/netbeans/api/language/util/lex/LexUtil.scala
|
Scala
|
gpl-3.0
| 41,018
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator
import java.net.SocketTimeoutException
import org.apache.samza.util.Util
import org.junit.{After, Test}
import org.junit.Assert._
import org.junit.rules.ExpectedException
import scala.collection.JavaConversions._
import org.apache.samza.config.MapConfig
import org.apache.samza.config.TaskConfig
import org.apache.samza.config.SystemConfig
import org.apache.samza.container.{SamzaContainer, TaskName}
import org.apache.samza.metrics.{MetricsRegistryMap, MetricsRegistry}
import org.apache.samza.config.Config
import org.apache.samza.system.SystemFactory
import org.apache.samza.system.SystemAdmin
import org.apache.samza.system.SystemStreamPartition
import org.apache.samza.system.SystemStreamMetadata
import org.apache.samza.system.SystemStreamMetadata.SystemStreamPartitionMetadata
import org.apache.samza.Partition
import org.apache.samza.job.model.JobModel
import org.apache.samza.job.model.ContainerModel
import org.apache.samza.job.model.TaskModel
import org.apache.samza.config.JobConfig
import org.apache.samza.system.IncomingMessageEnvelope
import org.apache.samza.system.SystemConsumer
import org.apache.samza.coordinator.stream.{MockCoordinatorStreamWrappedConsumer, MockCoordinatorStreamSystemFactory}
class TestJobCoordinator {
/**
* Builds a coordinator from config, and then compares it with what was
* expected. We simulate having a checkpoint manager that has 2 task
* changelog entries, and our model adds a third task. Expectation is that
* the JobCoordinator will assign the new task with a new changelog
* partition
*/
@Test
def testJobCoordinator {
val task0Name = new TaskName("Partition 0")
val checkpoint0 = Map(new SystemStreamPartition("test", "stream1", new Partition(0)) -> "4")
val task1Name = new TaskName("Partition 1")
val checkpoint1 = Map(new SystemStreamPartition("test", "stream1", new Partition(1)) -> "3")
val task2Name = new TaskName("Partition 2")
val checkpoint2 = Map(new SystemStreamPartition("test", "stream1", new Partition(2)) -> null)
// Construct the expected JobModel, so we can compare it to
// JobCoordinator's JobModel.
val container0Tasks = Map(
task0Name -> new TaskModel(task0Name, checkpoint0, new Partition(4)),
task2Name -> new TaskModel(task2Name, checkpoint2, new Partition(5)))
val container1Tasks = Map(
task1Name -> new TaskModel(task1Name, checkpoint1, new Partition(3)))
val containers = Map(
Integer.valueOf(0) -> new ContainerModel(0, container0Tasks),
Integer.valueOf(1) -> new ContainerModel(1, container1Tasks))
// The test does not pass offsets for task2 (Partition 2) to the checkpointmanager, this will verify that we get an offset 0 for this partition
val checkpointOffset0 = MockCoordinatorStreamWrappedConsumer.CHECKPOINTPREFIX + "mock:" +
task0Name.getTaskName() -> (Util.sspToString(checkpoint0.keySet.iterator.next()) + ":" + checkpoint0.values.iterator.next())
val checkpointOffset1 = MockCoordinatorStreamWrappedConsumer.CHECKPOINTPREFIX + "mock:" +
task1Name.getTaskName() -> (Util.sspToString(checkpoint1.keySet.iterator.next()) + ":" + checkpoint1.values.iterator.next())
val changelogInfo0 = MockCoordinatorStreamWrappedConsumer.CHANGELOGPREFIX + "mock:" + task0Name.getTaskName() -> "4"
val changelogInfo1 = MockCoordinatorStreamWrappedConsumer.CHANGELOGPREFIX + "mock:" + task1Name.getTaskName() -> "3"
val changelogInfo2 = MockCoordinatorStreamWrappedConsumer.CHANGELOGPREFIX + "mock:" + task2Name.getTaskName() -> "5"
// Configs which are processed by the MockCoordinatorStream as special configs which are interpreted as
// SetCheckpoint and SetChangelog
val otherConfigs = Map(
checkpointOffset0,
checkpointOffset1,
changelogInfo0,
changelogInfo1,
changelogInfo2
)
val config = Map(
JobConfig.JOB_NAME -> "test",
JobConfig.JOB_COORDINATOR_SYSTEM -> "coordinator",
JobConfig.JOB_CONTAINER_COUNT -> "2",
TaskConfig.INPUT_STREAMS -> "test.stream1",
SystemConfig.SYSTEM_FACTORY.format("test") -> classOf[MockSystemFactory].getCanonicalName,
SystemConfig.SYSTEM_FACTORY.format("coordinator") -> classOf[MockCoordinatorStreamSystemFactory].getName
)
// We want the mocksystemconsumer to use the same instance across runs
MockCoordinatorStreamSystemFactory.enableMockConsumerCache()
val coordinator = JobCoordinator(new MapConfig(config ++ otherConfigs))
coordinator.start
val jobModel = new JobModel(new MapConfig(config), containers)
assertEquals(new MapConfig(config), coordinator.jobModel.getConfig)
assertEquals(jobModel, coordinator.jobModel)
}
@Test
def testJobCoordinatorCheckpointing = {
System.out.println("test ")
val task0Name = new TaskName("Partition 0")
val checkpoint0 = Map(new SystemStreamPartition("test", "stream1", new Partition(0)) -> "4")
val task1Name = new TaskName("Partition 1")
val checkpoint1 = Map(new SystemStreamPartition("test", "stream1", new Partition(1)) ->"3")
val task2Name = new TaskName("Partition 2")
val checkpoint2 = Map(new SystemStreamPartition("test", "stream1", new Partition(2)) -> "8")
// Construct the expected JobModel, so we can compare it to
// JobCoordinator's JobModel.
val container0Tasks = Map(
task0Name -> new TaskModel(task0Name, checkpoint0, new Partition(4)),
task2Name -> new TaskModel(task2Name, checkpoint2, new Partition(5)))
val container1Tasks = Map(
task1Name -> new TaskModel(task1Name, checkpoint1, new Partition(3)))
val containers = Map(
Integer.valueOf(0) -> new ContainerModel(0, container0Tasks),
Integer.valueOf(1) -> new ContainerModel(1, container1Tasks))
// The test does not pass offsets for task2 (Partition 2) to the checkpointmanager, this will verify that we get an offset 0 for this partition
val checkpointOffset0 = MockCoordinatorStreamWrappedConsumer.CHECKPOINTPREFIX + "mock:" +
task0Name.getTaskName() -> (Util.sspToString(checkpoint0.keySet.iterator.next()) + ":" + checkpoint0.values.iterator.next())
val checkpointOffset1 = MockCoordinatorStreamWrappedConsumer.CHECKPOINTPREFIX + "mock:" +
task1Name.getTaskName() -> (Util.sspToString(checkpoint1.keySet.iterator.next()) + ":" + checkpoint1.values.iterator.next())
val checkpointOffset2 = MockCoordinatorStreamWrappedConsumer.CHECKPOINTPREFIX + "mock:" +
task2Name.getTaskName() -> (Util.sspToString(checkpoint2.keySet.iterator.next()) + ":" + checkpoint2.values.iterator.next())
val changelogInfo0 = MockCoordinatorStreamWrappedConsumer.CHANGELOGPREFIX + "mock:" + task0Name.getTaskName() -> "4"
val changelogInfo1 = MockCoordinatorStreamWrappedConsumer.CHANGELOGPREFIX + "mock:" + task1Name.getTaskName() -> "3"
val changelogInfo2 = MockCoordinatorStreamWrappedConsumer.CHANGELOGPREFIX + "mock:" + task2Name.getTaskName() -> "5"
// Configs which are processed by the MockCoordinatorStream as special configs which are interpreted as
// SetCheckpoint and SetChangelog
// Write a couple of checkpoints that the job coordinator will process
val otherConfigs = Map(
checkpointOffset0,
changelogInfo0
)
val config = Map(
JobConfig.JOB_NAME -> "test",
JobConfig.JOB_COORDINATOR_SYSTEM -> "coordinator",
JobConfig.JOB_CONTAINER_COUNT -> "2",
TaskConfig.INPUT_STREAMS -> "test.stream1",
SystemConfig.SYSTEM_FACTORY.format("test") -> classOf[MockSystemFactory].getCanonicalName,
SystemConfig.SYSTEM_FACTORY.format("coordinator") -> classOf[MockCoordinatorStreamSystemFactory].getName
)
// Enable caching on MockConsumer to add more messages later
MockCoordinatorStreamSystemFactory.enableMockConsumerCache()
// start the job coordinator and verify if it has all the checkpoints through http port
val coordinator = JobCoordinator(new MapConfig(config ++ otherConfigs))
coordinator.start
val url = coordinator.server.getUrl.toString
// Verify if the jobCoordinator has seen the checkpoints
var offsets = extractOffsetsFromJobCoordinator(url)
assertEquals(1, offsets.size)
assertEquals(checkpoint0.head._2, offsets.getOrElse(checkpoint0.head._1, fail()))
// Write more checkpoints
val wrappedConsumer = new MockCoordinatorStreamSystemFactory()
.getConsumer(null, null, null)
.asInstanceOf[MockCoordinatorStreamWrappedConsumer]
var moreMessageConfigs = Map(
checkpointOffset1
)
wrappedConsumer.addMoreMessages(new MapConfig(moreMessageConfigs))
// Verify if the coordinator has seen it
offsets = extractOffsetsFromJobCoordinator(url)
assertEquals(2, offsets.size)
assertEquals(checkpoint0.head._2, offsets.getOrElse(checkpoint0.head._1, fail()))
assertEquals(checkpoint1.head._2, offsets.getOrElse(checkpoint1.head._1, fail()))
// Write more checkpoints but block on read on the mock consumer
moreMessageConfigs = Map(
checkpointOffset2
)
wrappedConsumer.addMoreMessages(new MapConfig(moreMessageConfigs))
// Simulate consumer being blocked (Job coordinator waiting to read new checkpoints from coordinator after container failure)
val latch = wrappedConsumer.blockPool();
// verify if the port times out
var seenException = false
try {
extractOffsetsFromJobCoordinator(url)
}
catch {
case se: SocketTimeoutException => seenException = true
}
assertTrue(seenException)
// verify if it has read the new checkpoints after job coordinator has loaded the new checkpoints
latch.countDown()
offsets = extractOffsetsFromJobCoordinator(url)
assertEquals(offsets.size, 3)
assertEquals(checkpoint0.head._2, offsets.getOrElse(checkpoint0.head._1, fail()))
assertEquals(checkpoint1.head._2, offsets.getOrElse(checkpoint1.head._1, fail()))
assertEquals(checkpoint2.head._2, offsets.getOrElse(checkpoint2.head._1, fail()))
coordinator.stop
}
def extractOffsetsFromJobCoordinator(url : String) = {
val jobModel = SamzaContainer.readJobModel(url.toString)
val taskModels = jobModel.getContainers.values().flatMap(_.getTasks.values())
val offsets = taskModels.flatMap(_.getCheckpointedOffsets).toMap
offsets.filter(_._2 != null)
}
@After
def tearDown() = {
MockCoordinatorStreamSystemFactory.disableMockConsumerCache()
}
}
class MockSystemFactory extends SystemFactory {
def getConsumer(systemName: String, config: Config, registry: MetricsRegistry) = new SystemConsumer {
def start() {}
def stop() {}
def register(systemStreamPartition: SystemStreamPartition, offset: String) {}
def poll(systemStreamPartitions: java.util.Set[SystemStreamPartition], timeout: Long) = new java.util.HashMap[SystemStreamPartition, java.util.List[IncomingMessageEnvelope]]()
}
def getProducer(systemName: String, config: Config, registry: MetricsRegistry) = null
def getAdmin(systemName: String, config: Config) = new MockSystemAdmin
}
class MockSystemAdmin extends SystemAdmin {
def getOffsetsAfter(offsets: java.util.Map[SystemStreamPartition, String]) = null
def getSystemStreamMetadata(streamNames: java.util.Set[String]): java.util.Map[String, SystemStreamMetadata] = {
assertEquals(1, streamNames.size)
val partitionMetadata = Map(
new Partition(0) -> new SystemStreamPartitionMetadata(null, null, null),
new Partition(1) -> new SystemStreamPartitionMetadata(null, null, null),
// Create a new Partition(2), which wasn't in the prior changelog mapping.
new Partition(2) -> new SystemStreamPartitionMetadata(null, null, null))
Map(streamNames.toList.head -> new SystemStreamMetadata("foo", partitionMetadata))
}
override def createChangelogStream(topicName: String, numOfChangeLogPartitions: Int) {
new UnsupportedOperationException("Method not implemented.")
}
override def createCoordinatorStream(streamName: String) {
new UnsupportedOperationException("Method not implemented.")
}
}
|
Quantiply/samza
|
samza-core/src/test/scala/org/apache/samza/coordinator/TestJobCoordinator.scala
|
Scala
|
apache-2.0
| 12,974
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
import org.scalatest._
import SharedHelpers._
import events.TestFailed
import org.scalatest.exceptions.DuplicateTestNameException
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestRegistrationClosedException
import org.scalatest.events.InfoProvided
class FreeSpecSpec extends org.scalatest.FunSpec with PrivateMethodTester {
describe("A fixture.FreeSpec") {
it("should return the test names in order of registration from testNames") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"Something" - {
"should do that" in { fixture => ()
}
"should do this" in { fixture =>
}
}
}
assertResult(List("Something should do that", "Something should do this")) {
a.testNames.iterator.toList
}
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"Something" - {
"should do this" in { fixture =>
}
"should do that" in { fixture =>
}
}
}
assertResult(List("Something should do this", "Something should do that")) {
c.testNames.iterator.toList
}
}
it("should throw DuplicateTestNameException if a duplicate test name registration is attempted") {
intercept[DuplicateTestNameException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"should test this" in { fixture => }
"should test this" in { fixture => }
}
}
intercept[DuplicateTestNameException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"should test this" in { fixture => }
"should test this" ignore { fixture => }
}
}
intercept[DuplicateTestNameException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"should test this" ignore { fixture => }
"should test this" ignore { fixture => }
}
}
intercept[DuplicateTestNameException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"should test this" ignore { fixture => }
"should test this" in { fixture => }
}
}
}
it("should pass in the fixture to every test method") {
val a = new FreeSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"Something" - {
"should do this" in { fixture =>
assert(fixture === hello)
}
"should do that" in { fixture =>
assert(fixture === hello)
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
assert(!rep.eventsReceived.exists(_.isInstanceOf[TestFailed]))
}
it("should throw NullPointerException if a null test tag is provided") {
// it
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"hi" taggedAs(null) in { fixture => }
}
}
val caught = intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"hi" taggedAs(mytags.SlowAsMolasses, null) in { fixture => }
}
}
assert(caught.getMessage == "a test tag was null")
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"hi" taggedAs(mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) in { fixture => }
}
}
// ignore
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"hi" taggedAs(null) ignore { fixture => }
}
}
val caught2 = intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"hi" taggedAs(mytags.SlowAsMolasses, null) ignore { fixture => }
}
}
assert(caught2.getMessage == "a test tag was null")
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"hi" taggedAs(mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) ignore { fixture => }
}
}
// registerTest
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", null) { fixture => }
}
}
val caught3 = intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null) { fixture => }
}
}
assert(caught3.getMessage == "a test tag was null")
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => }
}
}
// registerIgnoredTest
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", null) { fixture => }
}
}
val caught4 = intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null) { fixture => }
}
}
assert(caught4.getMessage == "a test tag was null")
intercept[NullPointerException] {
new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
registerIgnoredTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) { fixture => }
}
}
}
it("should return a correct tags map from the tags method using is (pending)") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" ignore { fixture => }
"test that" is (pending)
}
assertResult(Map("test this" -> Set("org.scalatest.Ignore"))) {
a.tags
}
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" is (pending)
"test that" ignore { fixture => }
}
assertResult(Map("test that" -> Set("org.scalatest.Ignore"))) {
b.tags
}
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" ignore { fixture => }
"test that" ignore { fixture => }
}
assertResult(Map("test this" -> Set("org.scalatest.Ignore"), "test that" -> Set("org.scalatest.Ignore"))) {
c.tags
}
val d = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" taggedAs(mytags.SlowAsMolasses) is (pending)
"test that" taggedAs(mytags.SlowAsMolasses) ignore { fixture => }
}
assertResult(Map("test this" -> Set("org.scalatest.SlowAsMolasses"), "test that" -> Set("org.scalatest.Ignore", "org.scalatest.SlowAsMolasses"))) {
d.tags
}
val e = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" is (pending)
"test that" is (pending)
}
assertResult(Map()) {
e.tags
}
val f = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" taggedAs(mytags.SlowAsMolasses, mytags.WeakAsAKitten) is (pending)
"test that" taggedAs(mytags.SlowAsMolasses) is (pending)
}
assertResult(Map("test this" -> Set("org.scalatest.SlowAsMolasses", "org.scalatest.WeakAsAKitten"), "test that" -> Set("org.scalatest.SlowAsMolasses"))) {
f.tags
}
val g = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = Succeeded
"test this" taggedAs(mytags.SlowAsMolasses, mytags.WeakAsAKitten) is (pending)
"test that" taggedAs(mytags.SlowAsMolasses) is (pending)
}
assertResult(Map("test this" -> Set("org.scalatest.SlowAsMolasses", "org.scalatest.WeakAsAKitten"), "test that" -> Set("org.scalatest.SlowAsMolasses"))) {
g.tags
}
}
class TestWasCalledSuite extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"run this" in { fixture => theTestThisCalled = true }
"run that, maybe" in { fixture => theTestThatCalled = true }
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("run this"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, and not run, tests marked ignored") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" in { fixture => theTestThisCalled = true }
"test that" in { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" ignore { fixture => theTestThisCalled = true }
"test that" in { fixture => theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "test this")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" in { fixture => theTestThisCalled = true }
"test that" ignore { fixture => theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
// The order I want is order of appearance in the file.
// Will try and implement that tomorrow. Subtypes will be able to change the order.
val d = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" ignore { fixture => theTestThisCalled = true }
"test that" ignore { fixture => theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
// If I provide a specific testName to run, then it should ignore an Ignore on that test
// method and actually invoke it.
val e = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" ignore { fixture => theTestThisCalled = true }
"test that" in { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repE = new TestIgnoredTrackingReporter
e.run(Some("test this"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThisCalled = true }
"test that" in { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThisCalled = true }
"test that" in { fixture => theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) ignore { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
"test the other" in { fixture => theTestTheOtherCalled = true }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) ignore { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
"test the other" in { fixture => theTestTheOtherCalled = true }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
"test the other" ignore { fixture => theTestTheOtherCalled = true }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
"test the other" in { fixture => theTestTheOtherCalled = true }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
val i = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => theTestThatCalled = true }
"test the other" in { fixture => theTestTheOtherCalled = true }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) ignore { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) ignore { fixture => theTestThatCalled = true }
"test the other" in { fixture => theTestTheOtherCalled = true }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) ignore { fixture => theTestThisCalled = true }
"test that" taggedAs(mytags.SlowAsMolasses) ignore { fixture => theTestThatCalled = true }
"test the other" ignore { fixture => theTestTheOtherCalled = true }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should run only those registered tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that") { fixture => theTestThatCalled = true }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that") { fixture => theTestThatCalled = true }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded
val i = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { fixture => theTestThisCalled = true }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { fixture => theTestThatCalled = true }
registerIgnoredTest("test the other") { fixture => theTestTheOtherCalled = true }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"test this" in { fixture => }
"test that" in { fixture => }
}
assert(a.expectedTestCount(Filter()) == 2)
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"test this" ignore { fixture => }
"test that" in { fixture => }
}
assert(b.expectedTestCount(Filter()) ===1)
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"test this" taggedAs(mytags.FastAsLight) in { fixture => }
"test that" in { fixture => }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1)
val d = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"test this" taggedAs(mytags.FastAsLight, mytags.SlowAsMolasses) in { fixture => }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => }
"test the other thing" in { fixture => }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1)
assert(d.expectedTestCount(Filter()) == 3)
val e = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"test this" taggedAs(mytags.FastAsLight, mytags.SlowAsMolasses) in { fixture => }
"test that" taggedAs(mytags.SlowAsMolasses) in { fixture => }
"test the other thing" ignore { fixture => }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0)
assert(e.expectedTestCount(Filter()) == 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) == 10)
}
it("should return the correct test count from its expectedTestCount method when uses registerTest and registerIgnoredTest to register tests") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this") { fixture => }
registerTest("test that") { fixture => }
}
assert(a.expectedTestCount(Filter()) == 2)
val b = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerIgnoredTest("test this") { fixture => }
registerTest("test that") { fixture => }
}
assert(b.expectedTestCount(Filter()) == 1)
val c = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight) { fixture => }
registerTest("test that") { fixture => }
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1)
val d = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => }
registerTest("test that", mytags.SlowAsMolasses) { fixture => }
registerTest("test the other thing") { fixture => }
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1)
assert(d.expectedTestCount(Filter()) == 3)
val e = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) { fixture => }
registerTest("test that", mytags.SlowAsMolasses) { fixture => }
registerIgnoredTest("test the other thing") { fixture => }
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0)
assert(e.expectedTestCount(Filter()) == 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) == 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new FreeSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"should do this" is (pending)
"should do that" in { fixture =>
assert(fixture === hello)
}
"should do something else" in { fixture =>
assert(fixture === hello)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should generate a TestCanceled message when the test body includes a cancel invocation") {
val a = new FreeSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"should do this" in { fixture => cancel() }
"should do that" in { fixture =>
assert(fixture === hello)
}
"should do something else" in { fixture =>
assert(fixture === hello)
cancel("i changed my mind")
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testCanceledEventsReceived
assert(tp.size === 2)
}
it("should generate a TestCanceled message when the test body includes an assume invocation") {
val a = new FreeSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"should do this" in { fixture => assume(1 + 1 === 3, "ho") }
"should do that" in { fixture =>
assert(fixture === hello)
}
"should do something else" in { fixture =>
assert(fixture === hello)
assume(3 === 4)
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testCanceledEventsReceived
assert(tp.size === 2)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new FreeSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"This FreeSpec" - {
"should throw AssertionError" in { s => throw new AssertionError }
"should throw plain old Error" in { s => throw new Error }
"should throw Throwable" in { s => throw new Throwable }
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new FreeSpec {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"This FreeSpec" - {
"should throw AssertionError" in { s => throw new OutOfMemoryError }
}
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
/*
it("should send InfoProvided events with aboutAPendingTest set to true and aboutACanceledTest set to false for info " +
"calls made from a test that is pending and not canceled") {
val a = new FreeSpec with GivenWhenThen {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"A FreeSpec" - {
"should do something" in { s =>
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
pending
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testPending = rep.testPendingEventsReceived
assert(testPending.size === 1)
val recordedEvents = testPending(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && !ip.aboutACanceledTest.get)
}
val so = rep.scopeOpenedEventsReceived
assert(so.size === 1)
for (event <- so) {
assert(event.message == "A FreeSpec")
}
val sc = rep.scopeClosedEventsReceived
assert(so.size === 1)
for (event <- sc) {
assert(event.message == "A FreeSpec")
}
}
it("should send InfoProvided events with aboutAPendingTest and aboutACanceledTest both set to false for info " +
"calls made from a test that is neither pending nor canceled") {
val a = new FreeSpec with GivenWhenThen {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"A FreeSpec" - {
"should do something" in { s =>
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
assert(1 + 1 === 2)
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
val recordedEvents = testSucceeded(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && !ip.aboutACanceledTest.get)
}
val so = rep.scopeOpenedEventsReceived
assert(so.size === 1)
for (event <- so) {
assert(event.message == "A FreeSpec")
}
val sc = rep.scopeClosedEventsReceived
assert(so.size === 1)
for (event <- sc) {
assert(event.message == "A FreeSpec")
}
}
it("should send InfoProvided events with aboutAPendingTest set to false and aboutACanceledTest set to true for info " +
"calls made from a test that is pending and not canceled") {
val a = new FreeSpec with GivenWhenThen {
type FixtureParam = String
val hello = "Hello, world!"
def withFixture(test: OneArgTest): Outcome = {
test(hello)
}
"A FreeSpec" - {
"should do something" in { s =>
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
cancel()
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testCanceled = rep.testCanceledEventsReceived
assert(testCanceled.size === 1)
val recordedEvents = testCanceled(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
assert(ip.aboutACanceledTest.isDefined && ip.aboutACanceledTest.get)
}
val so = rep.scopeOpenedEventsReceived
assert(so.size === 1)
for (event <- so) {
assert(event.message == "A FreeSpec")
}
val sc = rep.scopeClosedEventsReceived
assert(so.size === 1)
for (event <- sc) {
assert(event.message == "A FreeSpec")
}
}
*/
it("should allow both tests that take fixtures and tests that don't") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
var takesAFixtureInvoked = false
"A FreeSpec" - {
"should take no args" in { () => takesNoArgsInvoked = true }
"should take a fixture" in { s => takesAFixtureInvoked = true }
}
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with test functions whose inferred result type is not Unit") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = {
test("Hello, world!")
}
var takesNoArgsInvoked = false
var takesAFixtureInvoked = false
"A FreeSpec" - {
"should take no args" in { () => takesNoArgsInvoked = true; true }
"should take a fixture" in { s => takesAFixtureInvoked = true; true }
}
}
import scala.language.reflectiveCalls
assert(!a.takesNoArgsInvoked)
assert(!a.takesAFixtureInvoked)
a.run(None, Args(SilentReporter))
assert(a.testNames.size === 2, a.testNames)
assert(a.takesNoArgsInvoked)
assert(a.takesAFixtureInvoked)
}
it("should work with ignored tests whose inferred result type is not Unit") {
val a = new FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
var takeNoArgsInvoked = false
var takeAFixtureInvoked = false
"A FreeSpec" - {
"should take no args" ignore { () => takeNoArgsInvoked = true; "hi" }
"should take a fixture" ignore { s => takeAFixtureInvoked = true; 42 }
}
}
import scala.language.reflectiveCalls
assert(!a.takeNoArgsInvoked)
assert(!a.takeAFixtureInvoked)
val reporter = new EventRecordingReporter
a.run(None, Args(reporter))
assert(reporter.testIgnoredEventsReceived.size === 2)
assert(!a.takeNoArgsInvoked)
assert(!a.takeAFixtureInvoked)
}
it("should pass a NoArgTest to withFixture for tests that take no fixture") {
class MySpec extends FreeSpec {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
"do something" in { () =>
assert(1 + 1 === 2)
}
}
val s = new MySpec
s.run(None, Args(SilentReporter))
assert(s.aNoArgTestWasPassed)
assert(!s.aOneArgTestWasPassed)
}
it("should not pass a NoArgTest to withFixture for tests that take a Fixture") {
class MySpec extends FreeSpec {
type FixtureParam = String
var aNoArgTestWasPassed = false
var aOneArgTestWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
aNoArgTestWasPassed = true
Succeeded
}
def withFixture(test: OneArgTest): Outcome = {
aOneArgTestWasPassed = true
Succeeded
}
"do something" in { fixture =>
assert(1 + 1 === 2)
}
}
val s = new MySpec
s.run(None, Args(SilentReporter))
assert(!s.aNoArgTestWasPassed)
assert(s.aOneArgTestWasPassed)
}
it("should pass a NoArgTest that invokes the no-arg test when the " +
"NoArgTest's no-arg apply method is invoked") {
class MySpec extends FreeSpec {
type FixtureParam = String
var theNoArgTestWasInvoked = false
def withFixture(test: OneArgTest): Outcome = {
// Shouldn't be called, but just in case don't invoke a OneArgTest
Succeeded
}
"do something" in { () =>
theNoArgTestWasInvoked = true
}
}
val s = new MySpec
s.run(None, Args(SilentReporter))
assert(s.theNoArgTestWasInvoked)
}
it("should pass the correct test name in the OneArgTest passed to withFixture") {
val a = new FreeSpec {
type FixtureParam = String
var correctTestNameWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctTestNameWasPassed = test.name == "do something"
test("hi")
}
"do something" in { fixture => }
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the OneArgTest passed to withFixture") {
val a = new FreeSpec {
type FixtureParam = String
var correctConfigMapWasPassed = false
def withFixture(test: OneArgTest): Outcome = {
correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7))
test("hi")
}
"do something" in { fixture => }
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
describe("(when a nesting rule has been violated)") {
it("should, if they call a - from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"in the wrong place, at the wrong time" - {
}
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"-\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a - with a nested in from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"in the wrong place, at the wrong time" - {
"should never run" in { fixture =>
assert(1 === 1)
}
}
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"-\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a nested it from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"should never run" in { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"should never run" taggedAs(mytags.SlowAsMolasses) in { fixture =>
assert(1 == 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a describe with a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"in the wrong place, at the wrong time" - {
"should never run" ignore { fixture =>
assert(1 === 1)
}
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"should never run" ignore { fixture =>
assert(1 === 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"should blow up" in { fixture =>
"should never run" taggedAs(mytags.SlowAsMolasses) ignore { fixture =>
assert(1 == 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerIgnoredTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
registerTest("should blow up") { fixture =>
registerIgnoredTest("should never run", mytags.SlowAsMolasses) { fixture =>
assert(1 == 1)
}
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
}
it("should allow test registration with registerTest and registerIgnoredTest") {
class TestSpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
val a = 1
registerTest("test 1") { fixture =>
val e = intercept[TestFailedException] {
assert(a == 2)
}
assert(e.message == Some("1 did not equal 2"))
assert(e.failedCodeFileName == Some("FreeSpecSpec.scala"))
assert(e.failedCodeLineNumber == Some(thisLineNumber - 4))
}
registerTest("test 2") { fixture =>
assert(a == 2)
}
registerTest("test 3") { fixture =>
pending
}
registerTest("test 4") { fixture =>
cancel
}
registerIgnoredTest("test 5") { fixture =>
assert(a == 2)
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
}
describe("when failure happens") {
it("should fire TestFailed event with correct stack depth info when test failed") {
class TestSpec extends FreeSpec {
type FixtureParam = String
def withFixture(test: OneArgTest): Outcome = { test("hi") }
"fail scenario" in { fixture =>
assert(1 === 2)
}
"a feature" - {
"nested fail scenario" in { fixture =>
assert(1 === 2)
}
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 2)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FreeSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 13)
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "FreeSpecSpec.scala")
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 11)
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an in nested inside an in") {
class TestSpec extends FreeSpec {
type FixtureParam = String
var registrationClosedThrown = false
"a feature" - {
"a scenario" in { fixture =>
"nested scenario" in { fixture =>
}
}
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FreeSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("An in clause may not appear inside another in clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an ignore nested inside an in") {
class TestSpec extends FreeSpec {
type FixtureParam = String
var registrationClosedThrown = false
"a feature" - {
"a scenario" in { fixture =>
"nested scenario" ignore { fixture =>
}
}
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FreeSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("An ignore clause may not appear inside an in clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerTest nested inside a registerTest") {
class TestSpec extends FreeSpec {
type FixtureParam = String
var registrationClosedThrown = false
"a feature" - {
registerTest("a scenario") { fixture =>
registerTest("nested scenario") { fixture =>
}
}
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FreeSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerIgnoredTest nested inside a registerTest") {
class TestSpec extends FreeSpec {
type FixtureParam = String
var registrationClosedThrown = false
"a feature" - {
registerTest("a scenario") { fixture =>
registerIgnoredTest("nested scenario") { fixture =>
}
}
}
override def withFixture(test: OneArgTest): Outcome = {
val outcome = test.apply("hi")
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("FreeSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/fixture/FreeSpecSpec.scala
|
Scala
|
apache-2.0
| 67,742
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.nio.ByteBuffer
import java.text.SimpleDateFormat
import java.util.{Date, HashMap => JHashMap}
import scala.collection.{Map, mutable}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import scala.util.DynamicVariable
import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus
import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.io.SequenceFile.CompressionType
import org.apache.hadoop.io.compress.CompressionCodec
import org.apache.hadoop.mapred.{FileOutputCommitter, FileOutputFormat, JobConf, OutputFormat}
import org.apache.hadoop.mapreduce.{Job => NewAPIHadoopJob, OutputFormat => NewOutputFormat,
RecordWriter => NewRecordWriter}
import org.apache.spark._
import org.apache.spark.Partitioner.defaultPartitioner
import org.apache.spark.annotation.Experimental
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.executor.{DataWriteMethod, OutputMetrics}
import org.apache.spark.mapreduce.SparkHadoopMapReduceUtil
import org.apache.spark.partial.{BoundedDouble, PartialResult}
import org.apache.spark.serializer.Serializer
import org.apache.spark.util.{SerializableConfiguration, Utils}
import org.apache.spark.util.collection.CompactBuffer
import org.apache.spark.util.random.StratifiedSamplingUtils
/**
* Extra functions available on RDDs of (key, value) pairs through an implicit conversion.
*/
class PairRDDFunctions[K, V](self: RDD[(K, V)])
(implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null)
extends Logging
with SparkHadoopMapReduceUtil
with Serializable
{
/**
* :: Experimental ::
* Generic function to combine the elements for each key using a custom set of aggregation
* functions. Turns an RDD[(K, V)] into a result of type RDD[(K, C)], for a "combined type" C
* Note that V and C can be different -- for example, one might group an RDD of type
* (Int, Int) into an RDD of type (Int, Seq[Int]). Users provide three functions:
*
* - `createCombiner`, which turns a V into a C (e.g., creates a one-element list)
* - `mergeValue`, to merge a V into a C (e.g., adds it to the end of a list)
* - `mergeCombiners`, to combine two C's into a single one.
*
* In addition, users can control the partitioning of the output RDD, and whether to perform
* map-side aggregation (if a mapper can produce multiple items with the same key).
*/
@Experimental
def combineByKeyWithClassTag[C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C,
partitioner: Partitioner,
mapSideCombine: Boolean = true,
serializer: Serializer = null)(implicit ct: ClassTag[C]): RDD[(K, C)] = self.withScope {
require(mergeCombiners != null, "mergeCombiners must be defined") // required as of Spark 0.9.0
if (keyClass.isArray) {
if (mapSideCombine) {
throw new SparkException("Cannot use map-side combining with array keys.")
}
if (partitioner.isInstanceOf[HashPartitioner]) {
throw new SparkException("Default partitioner cannot partition array keys.")
}
}
val aggregator = new Aggregator[K, V, C](
self.context.clean(createCombiner),
self.context.clean(mergeValue),
self.context.clean(mergeCombiners))
if (self.partitioner == Some(partitioner)) {
self.mapPartitions(iter => {
val context = TaskContext.get()
new InterruptibleIterator(context, aggregator.combineValuesByKey(iter, context))
}, preservesPartitioning = true)
} else {
new ShuffledRDD[K, V, C](self, partitioner)
.setSerializer(serializer)
.setAggregator(aggregator)
.setMapSideCombine(mapSideCombine)
}
}
/**
* Generic function to combine the elements for each key using a custom set of aggregation
* functions. This method is here for backward compatibility. It does not provide combiner
* classtag information to the shuffle.
*
* @see [[combineByKeyWithClassTag]]
*/
def combineByKey[C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C,
partitioner: Partitioner,
mapSideCombine: Boolean = true,
serializer: Serializer = null): RDD[(K, C)] = self.withScope {
combineByKeyWithClassTag(createCombiner, mergeValue, mergeCombiners,
partitioner, mapSideCombine, serializer)(null)
}
/**
* Simplified version of combineByKeyWithClassTag that hash-partitions the output RDD.
* This method is here for backward compatibility. It does not provide combiner
* classtag information to the shuffle.
*
* @see [[combineByKeyWithClassTag]]
*/
def combineByKey[C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C,
numPartitions: Int): RDD[(K, C)] = self.withScope {
combineByKeyWithClassTag(createCombiner, mergeValue, mergeCombiners, numPartitions)(null)
}
/**
* :: Experimental ::
* Simplified version of combineByKeyWithClassTag that hash-partitions the output RDD.
*/
@Experimental
def combineByKeyWithClassTag[C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C,
numPartitions: Int)(implicit ct: ClassTag[C]): RDD[(K, C)] = self.withScope {
combineByKeyWithClassTag(createCombiner, mergeValue, mergeCombiners,
new HashPartitioner(numPartitions))
}
/**
* Aggregate the values of each key, using given combine functions and a neutral "zero value".
* This function can return a different result type, U, than the type of the values in this RDD,
* V. Thus, we need one operation for merging a V into a U and one operation for merging two U's,
* as in scala.TraversableOnce. The former operation is used for merging values within a
* partition, and the latter is used for merging values between partitions. To avoid memory
* allocation, both of these functions are allowed to modify and return their first argument
* instead of creating a new U.
*/
def aggregateByKey[U: ClassTag](zeroValue: U, partitioner: Partitioner)(seqOp: (U, V) => U,
combOp: (U, U) => U): RDD[(K, U)] = self.withScope {
// Serialize the zero value to a byte array so that we can get a new clone of it on each key
val zeroBuffer = SparkEnv.get.serializer.newInstance().serialize(zeroValue)
val zeroArray = new Array[Byte](zeroBuffer.limit)
zeroBuffer.get(zeroArray)
lazy val cachedSerializer = SparkEnv.get.serializer.newInstance()
val createZero = () => cachedSerializer.deserialize[U](ByteBuffer.wrap(zeroArray))
// We will clean the combiner closure later in `combineByKey`
val cleanedSeqOp = self.context.clean(seqOp)
combineByKeyWithClassTag[U]((v: V) => cleanedSeqOp(createZero(), v),
cleanedSeqOp, combOp, partitioner)
}
/**
* Aggregate the values of each key, using given combine functions and a neutral "zero value".
* This function can return a different result type, U, than the type of the values in this RDD,
* V. Thus, we need one operation for merging a V into a U and one operation for merging two U's,
* as in scala.TraversableOnce. The former operation is used for merging values within a
* partition, and the latter is used for merging values between partitions. To avoid memory
* allocation, both of these functions are allowed to modify and return their first argument
* instead of creating a new U.
*/
def aggregateByKey[U: ClassTag](zeroValue: U, numPartitions: Int)(seqOp: (U, V) => U,
combOp: (U, U) => U): RDD[(K, U)] = self.withScope {
aggregateByKey(zeroValue, new HashPartitioner(numPartitions))(seqOp, combOp)
}
/**
* Aggregate the values of each key, using given combine functions and a neutral "zero value".
* This function can return a different result type, U, than the type of the values in this RDD,
* V. Thus, we need one operation for merging a V into a U and one operation for merging two U's,
* as in scala.TraversableOnce. The former operation is used for merging values within a
* partition, and the latter is used for merging values between partitions. To avoid memory
* allocation, both of these functions are allowed to modify and return their first argument
* instead of creating a new U.
*/
def aggregateByKey[U: ClassTag](zeroValue: U)(seqOp: (U, V) => U,
combOp: (U, U) => U): RDD[(K, U)] = self.withScope {
aggregateByKey(zeroValue, defaultPartitioner(self))(seqOp, combOp)
}
/**
* Merge the values for each key using an associative function and a neutral "zero value" which
* may be added to the result an arbitrary number of times, and must not change the result
* (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
*/
def foldByKey(
zeroValue: V,
partitioner: Partitioner)(func: (V, V) => V): RDD[(K, V)] = self.withScope {
// Serialize the zero value to a byte array so that we can get a new clone of it on each key
val zeroBuffer = SparkEnv.get.serializer.newInstance().serialize(zeroValue)
val zeroArray = new Array[Byte](zeroBuffer.limit)
zeroBuffer.get(zeroArray)
// When deserializing, use a lazy val to create just one instance of the serializer per task
lazy val cachedSerializer = SparkEnv.get.serializer.newInstance()
val createZero = () => cachedSerializer.deserialize[V](ByteBuffer.wrap(zeroArray))
val cleanedFunc = self.context.clean(func)
combineByKeyWithClassTag[V]((v: V) => cleanedFunc(createZero(), v),
cleanedFunc, cleanedFunc, partitioner)
}
/**
* Merge the values for each key using an associative function and a neutral "zero value" which
* may be added to the result an arbitrary number of times, and must not change the result
* (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
*/
def foldByKey(zeroValue: V, numPartitions: Int)(func: (V, V) => V): RDD[(K, V)] = self.withScope {
foldByKey(zeroValue, new HashPartitioner(numPartitions))(func)
}
/**
* Merge the values for each key using an associative function and a neutral "zero value" which
* may be added to the result an arbitrary number of times, and must not change the result
* (e.g., Nil for list concatenation, 0 for addition, or 1 for multiplication.).
*/
def foldByKey(zeroValue: V)(func: (V, V) => V): RDD[(K, V)] = self.withScope {
foldByKey(zeroValue, defaultPartitioner(self))(func)
}
/**
* Return a subset of this RDD sampled by key (via stratified sampling).
*
* Create a sample of this RDD using variable sampling rates for different keys as specified by
* `fractions`, a key to sampling rate map, via simple random sampling with one pass over the
* RDD, to produce a sample of size that's approximately equal to the sum of
* math.ceil(numItems * samplingRate) over all key values.
*
* @param withReplacement whether to sample with or without replacement
* @param fractions map of specific keys to sampling rates
* @param seed seed for the random number generator
* @return RDD containing the sampled subset
*/
def sampleByKey(withReplacement: Boolean,
fractions: Map[K, Double],
seed: Long = Utils.random.nextLong): RDD[(K, V)] = self.withScope {
require(fractions.values.forall(v => v >= 0.0), "Negative sampling rates.")
val samplingFunc = if (withReplacement) {
StratifiedSamplingUtils.getPoissonSamplingFunction(self, fractions, false, seed)
} else {
StratifiedSamplingUtils.getBernoulliSamplingFunction(self, fractions, false, seed)
}
self.mapPartitionsWithIndex(samplingFunc, preservesPartitioning = true)
}
/**
* ::Experimental::
* Return a subset of this RDD sampled by key (via stratified sampling) containing exactly
* math.ceil(numItems * samplingRate) for each stratum (group of pairs with the same key).
*
* This method differs from [[sampleByKey]] in that we make additional passes over the RDD to
* create a sample size that's exactly equal to the sum of math.ceil(numItems * samplingRate)
* over all key values with a 99.99% confidence. When sampling without replacement, we need one
* additional pass over the RDD to guarantee sample size; when sampling with replacement, we need
* two additional passes.
*
* @param withReplacement whether to sample with or without replacement
* @param fractions map of specific keys to sampling rates
* @param seed seed for the random number generator
* @return RDD containing the sampled subset
*/
@Experimental
def sampleByKeyExact(
withReplacement: Boolean,
fractions: Map[K, Double],
seed: Long = Utils.random.nextLong): RDD[(K, V)] = self.withScope {
require(fractions.values.forall(v => v >= 0.0), "Negative sampling rates.")
val samplingFunc = if (withReplacement) {
StratifiedSamplingUtils.getPoissonSamplingFunction(self, fractions, true, seed)
} else {
StratifiedSamplingUtils.getBernoulliSamplingFunction(self, fractions, true, seed)
}
self.mapPartitionsWithIndex(samplingFunc, preservesPartitioning = true)
}
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce.
*/
def reduceByKey(partitioner: Partitioner, func: (V, V) => V): RDD[(K, V)] = self.withScope {
combineByKeyWithClassTag[V]((v: V) => v, func, func, partitioner)
}
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce. Output will be hash-partitioned with numPartitions partitions.
*/
def reduceByKey(func: (V, V) => V, numPartitions: Int): RDD[(K, V)] = self.withScope {
reduceByKey(new HashPartitioner(numPartitions), func)
}
/**
* Merge the values for each key using an associative reduce function. This will also perform
* the merging locally on each mapper before sending results to a reducer, similarly to a
* "combiner" in MapReduce. Output will be hash-partitioned with the existing partitioner/
* parallelism level.
*/
def reduceByKey(func: (V, V) => V): RDD[(K, V)] = self.withScope {
reduceByKey(defaultPartitioner(self), func)
}
/**
* Merge the values for each key using an associative reduce function, but return the results
* immediately to the master as a Map. This will also perform the merging locally on each mapper
* before sending results to a reducer, similarly to a "combiner" in MapReduce.
*/
def reduceByKeyLocally(func: (V, V) => V): Map[K, V] = self.withScope {
val cleanedF = self.sparkContext.clean(func)
if (keyClass.isArray) {
throw new SparkException("reduceByKeyLocally() does not support array keys")
}
val reducePartition = (iter: Iterator[(K, V)]) => {
val map = new JHashMap[K, V]
iter.foreach { pair =>
val old = map.get(pair._1)
map.put(pair._1, if (old == null) pair._2 else cleanedF(old, pair._2))
}
Iterator(map)
} : Iterator[JHashMap[K, V]]
val mergeMaps = (m1: JHashMap[K, V], m2: JHashMap[K, V]) => {
m2.asScala.foreach { pair =>
val old = m1.get(pair._1)
m1.put(pair._1, if (old == null) pair._2 else cleanedF(old, pair._2))
}
m1
} : JHashMap[K, V]
self.mapPartitions(reducePartition).reduce(mergeMaps).asScala
}
/** Alias for reduceByKeyLocally */
@deprecated("Use reduceByKeyLocally", "1.0.0")
def reduceByKeyToDriver(func: (V, V) => V): Map[K, V] = self.withScope {
reduceByKeyLocally(func)
}
/**
* Count the number of elements for each key, collecting the results to a local Map.
*
* Note that this method should only be used if the resulting map is expected to be small, as
* the whole thing is loaded into the driver's memory.
* To handle very large results, consider using rdd.mapValues(_ => 1L).reduceByKey(_ + _), which
* returns an RDD[T, Long] instead of a map.
*/
def countByKey(): Map[K, Long] = self.withScope {
self.mapValues(_ => 1L).reduceByKey(_ + _).collect().toMap
}
/**
* :: Experimental ::
* Approximate version of countByKey that can return a partial result if it does
* not finish within a timeout.
*/
@Experimental
def countByKeyApprox(timeout: Long, confidence: Double = 0.95)
: PartialResult[Map[K, BoundedDouble]] = self.withScope {
self.map(_._1).countByValueApprox(timeout, confidence)
}
/**
* :: Experimental ::
*
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* The relative accuracy is approximately `1.054 / sqrt(2^p)`. Setting a nonzero `sp > p`
* would trigger sparse representation of registers, which may reduce the memory consumption
* and increase accuracy when the cardinality is small.
*
* @param p The precision value for the normal set.
* `p` must be a value between 4 and `sp` if `sp` is not zero (32 max).
* @param sp The precision value for the sparse set, between 0 and 32.
* If `sp` equals 0, the sparse representation is skipped.
* @param partitioner Partitioner to use for the resulting RDD.
*/
@Experimental
def countApproxDistinctByKey(
p: Int,
sp: Int,
partitioner: Partitioner): RDD[(K, Long)] = self.withScope {
require(p >= 4, s"p ($p) must be >= 4")
require(sp <= 32, s"sp ($sp) must be <= 32")
require(sp == 0 || p <= sp, s"p ($p) cannot be greater than sp ($sp)")
val createHLL = (v: V) => {
val hll = new HyperLogLogPlus(p, sp)
hll.offer(v)
hll
}
val mergeValueHLL = (hll: HyperLogLogPlus, v: V) => {
hll.offer(v)
hll
}
val mergeHLL = (h1: HyperLogLogPlus, h2: HyperLogLogPlus) => {
h1.addAll(h2)
h1
}
combineByKeyWithClassTag(createHLL, mergeValueHLL, mergeHLL, partitioner)
.mapValues(_.cardinality())
}
/**
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* @param relativeSD Relative accuracy. Smaller values create counters that require more space.
* It must be greater than 0.000017.
* @param partitioner partitioner of the resulting RDD
*/
def countApproxDistinctByKey(
relativeSD: Double,
partitioner: Partitioner): RDD[(K, Long)] = self.withScope {
require(relativeSD > 0.000017, s"accuracy ($relativeSD) must be greater than 0.000017")
val p = math.ceil(2.0 * math.log(1.054 / relativeSD) / math.log(2)).toInt
assert(p <= 32)
countApproxDistinctByKey(if (p < 4) 4 else p, 0, partitioner)
}
/**
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* @param relativeSD Relative accuracy. Smaller values create counters that require more space.
* It must be greater than 0.000017.
* @param numPartitions number of partitions of the resulting RDD
*/
def countApproxDistinctByKey(
relativeSD: Double,
numPartitions: Int): RDD[(K, Long)] = self.withScope {
countApproxDistinctByKey(relativeSD, new HashPartitioner(numPartitions))
}
/**
* Return approximate number of distinct values for each key in this RDD.
*
* The algorithm used is based on streamlib's implementation of "HyperLogLog in Practice:
* Algorithmic Engineering of a State of The Art Cardinality Estimation Algorithm", available
* <a href="http://dx.doi.org/10.1145/2452376.2452456">here</a>.
*
* @param relativeSD Relative accuracy. Smaller values create counters that require more space.
* It must be greater than 0.000017.
*/
def countApproxDistinctByKey(relativeSD: Double = 0.05): RDD[(K, Long)] = self.withScope {
countApproxDistinctByKey(relativeSD, defaultPartitioner(self))
}
/**
* Group the values for each key in the RDD into a single sequence. Allows controlling the
* partitioning of the resulting key-value pair RDD by passing a Partitioner.
* The ordering of elements within each group is not guaranteed, and may even differ
* each time the resulting RDD is evaluated.
*
* Note: This operation may be very expensive. If you are grouping in order to perform an
* aggregation (such as a sum or average) over each key, using [[PairRDDFunctions.aggregateByKey]]
* or [[PairRDDFunctions.reduceByKey]] will provide much better performance.
*
* Note: As currently implemented, groupByKey must be able to hold all the key-value pairs for any
* key in memory. If a key has too many values, it can result in an [[OutOfMemoryError]].
*/
def groupByKey(partitioner: Partitioner): RDD[(K, Iterable[V])] = self.withScope {
// groupByKey shouldn't use map side combine because map side combine does not
// reduce the amount of data shuffled and requires all map side data be inserted
// into a hash table, leading to more objects in the old gen.
val createCombiner = (v: V) => CompactBuffer(v)
val mergeValue = (buf: CompactBuffer[V], v: V) => buf += v
val mergeCombiners = (c1: CompactBuffer[V], c2: CompactBuffer[V]) => c1 ++= c2
val bufs = combineByKeyWithClassTag[CompactBuffer[V]](
createCombiner, mergeValue, mergeCombiners, partitioner, mapSideCombine = false)
bufs.asInstanceOf[RDD[(K, Iterable[V])]]
}
/**
* Group the values for each key in the RDD into a single sequence. Hash-partitions the
* resulting RDD with into `numPartitions` partitions. The ordering of elements within
* each group is not guaranteed, and may even differ each time the resulting RDD is evaluated.
*
* Note: This operation may be very expensive. If you are grouping in order to perform an
* aggregation (such as a sum or average) over each key, using [[PairRDDFunctions.aggregateByKey]]
* or [[PairRDDFunctions.reduceByKey]] will provide much better performance.
*
* Note: As currently implemented, groupByKey must be able to hold all the key-value pairs for any
* key in memory. If a key has too many values, it can result in an [[OutOfMemoryError]].
*/
def groupByKey(numPartitions: Int): RDD[(K, Iterable[V])] = self.withScope {
groupByKey(new HashPartitioner(numPartitions))
}
/**
* Return a copy of the RDD partitioned using the specified partitioner.
*/
def partitionBy(partitioner: Partitioner): RDD[(K, V)] = self.withScope {
if (keyClass.isArray && partitioner.isInstanceOf[HashPartitioner]) {
throw new SparkException("Default partitioner cannot partition array keys.")
}
if (self.partitioner == Some(partitioner)) {
self
} else {
new ShuffledRDD[K, V, V](self, partitioner)
}
}
/**
* Return an RDD containing all pairs of elements with matching keys in `this` and `other`. Each
* pair of elements will be returned as a (k, (v1, v2)) tuple, where (k, v1) is in `this` and
* (k, v2) is in `other`. Uses the given Partitioner to partition the output RDD.
*/
def join[W](other: RDD[(K, W)], partitioner: Partitioner): RDD[(K, (V, W))] = self.withScope {
this.cogroup(other, partitioner).flatMapValues( pair =>
for (v <- pair._1.iterator; w <- pair._2.iterator) yield (v, w)
)
}
/**
* Perform a left outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (v, Some(w))) for w in `other`, or the
* pair (k, (v, None)) if no elements in `other` have key k. Uses the given Partitioner to
* partition the output RDD.
*/
def leftOuterJoin[W](
other: RDD[(K, W)],
partitioner: Partitioner): RDD[(K, (V, Option[W]))] = self.withScope {
this.cogroup(other, partitioner).flatMapValues { pair =>
if (pair._2.isEmpty) {
pair._1.iterator.map(v => (v, None))
} else {
for (v <- pair._1.iterator; w <- pair._2.iterator) yield (v, Some(w))
}
}
}
/**
* Perform a right outer join of `this` and `other`. For each element (k, w) in `other`, the
* resulting RDD will either contain all pairs (k, (Some(v), w)) for v in `this`, or the
* pair (k, (None, w)) if no elements in `this` have key k. Uses the given Partitioner to
* partition the output RDD.
*/
def rightOuterJoin[W](other: RDD[(K, W)], partitioner: Partitioner)
: RDD[(K, (Option[V], W))] = self.withScope {
this.cogroup(other, partitioner).flatMapValues { pair =>
if (pair._1.isEmpty) {
pair._2.iterator.map(w => (None, w))
} else {
for (v <- pair._1.iterator; w <- pair._2.iterator) yield (Some(v), w)
}
}
}
/**
* Perform a full outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (Some(v), Some(w))) for w in `other`, or
* the pair (k, (Some(v), None)) if no elements in `other` have key k. Similarly, for each
* element (k, w) in `other`, the resulting RDD will either contain all pairs
* (k, (Some(v), Some(w))) for v in `this`, or the pair (k, (None, Some(w))) if no elements
* in `this` have key k. Uses the given Partitioner to partition the output RDD.
*/
def fullOuterJoin[W](other: RDD[(K, W)], partitioner: Partitioner)
: RDD[(K, (Option[V], Option[W]))] = self.withScope {
this.cogroup(other, partitioner).flatMapValues {
case (vs, Seq()) => vs.iterator.map(v => (Some(v), None))
case (Seq(), ws) => ws.iterator.map(w => (None, Some(w)))
case (vs, ws) => for (v <- vs.iterator; w <- ws.iterator) yield (Some(v), Some(w))
}
}
/**
* Simplified version of combineByKeyWithClassTag that hash-partitions the resulting RDD using the
* existing partitioner/parallelism level. This method is here for backward compatibility. It
* does not provide combiner classtag information to the shuffle.
*
* @see [[combineByKeyWithClassTag]]
*/
def combineByKey[C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C): RDD[(K, C)] = self.withScope {
combineByKeyWithClassTag(createCombiner, mergeValue, mergeCombiners)(null)
}
/**
* :: Experimental ::
* Simplified version of combineByKeyWithClassTag that hash-partitions the resulting RDD using the
* existing partitioner/parallelism level.
*/
@Experimental
def combineByKeyWithClassTag[C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C)(implicit ct: ClassTag[C]): RDD[(K, C)] = self.withScope {
combineByKeyWithClassTag(createCombiner, mergeValue, mergeCombiners, defaultPartitioner(self))
}
/**
* Group the values for each key in the RDD into a single sequence. Hash-partitions the
* resulting RDD with the existing partitioner/parallelism level. The ordering of elements
* within each group is not guaranteed, and may even differ each time the resulting RDD is
* evaluated.
*
* Note: This operation may be very expensive. If you are grouping in order to perform an
* aggregation (such as a sum or average) over each key, using [[PairRDDFunctions.aggregateByKey]]
* or [[PairRDDFunctions.reduceByKey]] will provide much better performance.
*/
def groupByKey(): RDD[(K, Iterable[V])] = self.withScope {
groupByKey(defaultPartitioner(self))
}
/**
* Return an RDD containing all pairs of elements with matching keys in `this` and `other`. Each
* pair of elements will be returned as a (k, (v1, v2)) tuple, where (k, v1) is in `this` and
* (k, v2) is in `other`. Performs a hash join across the cluster.
*/
def join[W](other: RDD[(K, W)]): RDD[(K, (V, W))] = self.withScope {
join(other, defaultPartitioner(self, other))
}
/**
* Return an RDD containing all pairs of elements with matching keys in `this` and `other`. Each
* pair of elements will be returned as a (k, (v1, v2)) tuple, where (k, v1) is in `this` and
* (k, v2) is in `other`. Performs a hash join across the cluster.
*/
def join[W](other: RDD[(K, W)], numPartitions: Int): RDD[(K, (V, W))] = self.withScope {
join(other, new HashPartitioner(numPartitions))
}
/**
* Perform a left outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (v, Some(w))) for w in `other`, or the
* pair (k, (v, None)) if no elements in `other` have key k. Hash-partitions the output
* using the existing partitioner/parallelism level.
*/
def leftOuterJoin[W](other: RDD[(K, W)]): RDD[(K, (V, Option[W]))] = self.withScope {
leftOuterJoin(other, defaultPartitioner(self, other))
}
/**
* Perform a left outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (v, Some(w))) for w in `other`, or the
* pair (k, (v, None)) if no elements in `other` have key k. Hash-partitions the output
* into `numPartitions` partitions.
*/
def leftOuterJoin[W](
other: RDD[(K, W)],
numPartitions: Int): RDD[(K, (V, Option[W]))] = self.withScope {
leftOuterJoin(other, new HashPartitioner(numPartitions))
}
/**
* Perform a right outer join of `this` and `other`. For each element (k, w) in `other`, the
* resulting RDD will either contain all pairs (k, (Some(v), w)) for v in `this`, or the
* pair (k, (None, w)) if no elements in `this` have key k. Hash-partitions the resulting
* RDD using the existing partitioner/parallelism level.
*/
def rightOuterJoin[W](other: RDD[(K, W)]): RDD[(K, (Option[V], W))] = self.withScope {
rightOuterJoin(other, defaultPartitioner(self, other))
}
/**
* Perform a right outer join of `this` and `other`. For each element (k, w) in `other`, the
* resulting RDD will either contain all pairs (k, (Some(v), w)) for v in `this`, or the
* pair (k, (None, w)) if no elements in `this` have key k. Hash-partitions the resulting
* RDD into the given number of partitions.
*/
def rightOuterJoin[W](
other: RDD[(K, W)],
numPartitions: Int): RDD[(K, (Option[V], W))] = self.withScope {
rightOuterJoin(other, new HashPartitioner(numPartitions))
}
/**
* Perform a full outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (Some(v), Some(w))) for w in `other`, or
* the pair (k, (Some(v), None)) if no elements in `other` have key k. Similarly, for each
* element (k, w) in `other`, the resulting RDD will either contain all pairs
* (k, (Some(v), Some(w))) for v in `this`, or the pair (k, (None, Some(w))) if no elements
* in `this` have key k. Hash-partitions the resulting RDD using the existing partitioner/
* parallelism level.
*/
def fullOuterJoin[W](other: RDD[(K, W)]): RDD[(K, (Option[V], Option[W]))] = self.withScope {
fullOuterJoin(other, defaultPartitioner(self, other))
}
/**
* Perform a full outer join of `this` and `other`. For each element (k, v) in `this`, the
* resulting RDD will either contain all pairs (k, (Some(v), Some(w))) for w in `other`, or
* the pair (k, (Some(v), None)) if no elements in `other` have key k. Similarly, for each
* element (k, w) in `other`, the resulting RDD will either contain all pairs
* (k, (Some(v), Some(w))) for v in `this`, or the pair (k, (None, Some(w))) if no elements
* in `this` have key k. Hash-partitions the resulting RDD into the given number of partitions.
*/
def fullOuterJoin[W](
other: RDD[(K, W)],
numPartitions: Int): RDD[(K, (Option[V], Option[W]))] = self.withScope {
fullOuterJoin(other, new HashPartitioner(numPartitions))
}
/**
* Return the key-value pairs in this RDD to the master as a Map.
*
* Warning: this doesn't return a multimap (so if you have multiple values to the same key, only
* one value per key is preserved in the map returned)
*/
def collectAsMap(): Map[K, V] = self.withScope {
val data = self.collect()
val map = new mutable.HashMap[K, V]
map.sizeHint(data.length)
data.foreach { pair => map.put(pair._1, pair._2) }
map
}
/**
* Pass each value in the key-value pair RDD through a map function without changing the keys;
* this also retains the original RDD's partitioning.
*/
def mapValues[U](f: V => U): RDD[(K, U)] = self.withScope {
val cleanF = self.context.clean(f)
new MapPartitionsRDD[(K, U), (K, V)](self,
(context, pid, iter) => iter.map { case (k, v) => (k, cleanF(v)) },
preservesPartitioning = true)
}
/**
* Pass each value in the key-value pair RDD through a flatMap function without changing the
* keys; this also retains the original RDD's partitioning.
*/
def flatMapValues[U](f: V => TraversableOnce[U]): RDD[(K, U)] = self.withScope {
val cleanF = self.context.clean(f)
new MapPartitionsRDD[(K, U), (K, V)](self,
(context, pid, iter) => iter.flatMap { case (k, v) =>
cleanF(v).map(x => (k, x))
},
preservesPartitioning = true)
}
/**
* For each key k in `this` or `other1` or `other2` or `other3`,
* return a resulting RDD that contains a tuple with the list of values
* for that key in `this`, `other1`, `other2` and `other3`.
*/
def cogroup[W1, W2, W3](other1: RDD[(K, W1)],
other2: RDD[(K, W2)],
other3: RDD[(K, W3)],
partitioner: Partitioner)
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2], Iterable[W3]))] = self.withScope {
if (partitioner.isInstanceOf[HashPartitioner] && keyClass.isArray) {
throw new SparkException("Default partitioner cannot partition array keys.")
}
val cg = new CoGroupedRDD[K](Seq(self, other1, other2, other3), partitioner)
cg.mapValues { case Array(vs, w1s, w2s, w3s) =>
(vs.asInstanceOf[Iterable[V]],
w1s.asInstanceOf[Iterable[W1]],
w2s.asInstanceOf[Iterable[W2]],
w3s.asInstanceOf[Iterable[W3]])
}
}
/**
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
* list of values for that key in `this` as well as `other`.
*/
def cogroup[W](other: RDD[(K, W)], partitioner: Partitioner)
: RDD[(K, (Iterable[V], Iterable[W]))] = self.withScope {
if (partitioner.isInstanceOf[HashPartitioner] && keyClass.isArray) {
throw new SparkException("Default partitioner cannot partition array keys.")
}
val cg = new CoGroupedRDD[K](Seq(self, other), partitioner)
cg.mapValues { case Array(vs, w1s) =>
(vs.asInstanceOf[Iterable[V]], w1s.asInstanceOf[Iterable[W]])
}
}
/**
* For each key k in `this` or `other1` or `other2`, return a resulting RDD that contains a
* tuple with the list of values for that key in `this`, `other1` and `other2`.
*/
def cogroup[W1, W2](other1: RDD[(K, W1)], other2: RDD[(K, W2)], partitioner: Partitioner)
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2]))] = self.withScope {
if (partitioner.isInstanceOf[HashPartitioner] && keyClass.isArray) {
throw new SparkException("Default partitioner cannot partition array keys.")
}
val cg = new CoGroupedRDD[K](Seq(self, other1, other2), partitioner)
cg.mapValues { case Array(vs, w1s, w2s) =>
(vs.asInstanceOf[Iterable[V]],
w1s.asInstanceOf[Iterable[W1]],
w2s.asInstanceOf[Iterable[W2]])
}
}
/**
* For each key k in `this` or `other1` or `other2` or `other3`,
* return a resulting RDD that contains a tuple with the list of values
* for that key in `this`, `other1`, `other2` and `other3`.
*/
def cogroup[W1, W2, W3](other1: RDD[(K, W1)], other2: RDD[(K, W2)], other3: RDD[(K, W3)])
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2], Iterable[W3]))] = self.withScope {
cogroup(other1, other2, other3, defaultPartitioner(self, other1, other2, other3))
}
/**
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
* list of values for that key in `this` as well as `other`.
*/
def cogroup[W](other: RDD[(K, W)]): RDD[(K, (Iterable[V], Iterable[W]))] = self.withScope {
cogroup(other, defaultPartitioner(self, other))
}
/**
* For each key k in `this` or `other1` or `other2`, return a resulting RDD that contains a
* tuple with the list of values for that key in `this`, `other1` and `other2`.
*/
def cogroup[W1, W2](other1: RDD[(K, W1)], other2: RDD[(K, W2)])
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2]))] = self.withScope {
cogroup(other1, other2, defaultPartitioner(self, other1, other2))
}
/**
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
* list of values for that key in `this` as well as `other`.
*/
def cogroup[W](
other: RDD[(K, W)],
numPartitions: Int): RDD[(K, (Iterable[V], Iterable[W]))] = self.withScope {
cogroup(other, new HashPartitioner(numPartitions))
}
/**
* For each key k in `this` or `other1` or `other2`, return a resulting RDD that contains a
* tuple with the list of values for that key in `this`, `other1` and `other2`.
*/
def cogroup[W1, W2](other1: RDD[(K, W1)], other2: RDD[(K, W2)], numPartitions: Int)
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2]))] = self.withScope {
cogroup(other1, other2, new HashPartitioner(numPartitions))
}
/**
* For each key k in `this` or `other1` or `other2` or `other3`,
* return a resulting RDD that contains a tuple with the list of values
* for that key in `this`, `other1`, `other2` and `other3`.
*/
def cogroup[W1, W2, W3](other1: RDD[(K, W1)],
other2: RDD[(K, W2)],
other3: RDD[(K, W3)],
numPartitions: Int)
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2], Iterable[W3]))] = self.withScope {
cogroup(other1, other2, other3, new HashPartitioner(numPartitions))
}
/** Alias for cogroup. */
def groupWith[W](other: RDD[(K, W)]): RDD[(K, (Iterable[V], Iterable[W]))] = self.withScope {
cogroup(other, defaultPartitioner(self, other))
}
/** Alias for cogroup. */
def groupWith[W1, W2](other1: RDD[(K, W1)], other2: RDD[(K, W2)])
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2]))] = self.withScope {
cogroup(other1, other2, defaultPartitioner(self, other1, other2))
}
/** Alias for cogroup. */
def groupWith[W1, W2, W3](other1: RDD[(K, W1)], other2: RDD[(K, W2)], other3: RDD[(K, W3)])
: RDD[(K, (Iterable[V], Iterable[W1], Iterable[W2], Iterable[W3]))] = self.withScope {
cogroup(other1, other2, other3, defaultPartitioner(self, other1, other2, other3))
}
/**
* Return an RDD with the pairs from `this` whose keys are not in `other`.
*
* Uses `this` partitioner/partition size, because even if `other` is huge, the resulting
* RDD will be <= us.
*/
def subtractByKey[W: ClassTag](other: RDD[(K, W)]): RDD[(K, V)] = self.withScope {
subtractByKey(other, self.partitioner.getOrElse(new HashPartitioner(self.partitions.length)))
}
/** Return an RDD with the pairs from `this` whose keys are not in `other`. */
def subtractByKey[W: ClassTag](
other: RDD[(K, W)],
numPartitions: Int): RDD[(K, V)] = self.withScope {
subtractByKey(other, new HashPartitioner(numPartitions))
}
/** Return an RDD with the pairs from `this` whose keys are not in `other`. */
def subtractByKey[W: ClassTag](other: RDD[(K, W)], p: Partitioner): RDD[(K, V)] = self.withScope {
new SubtractedRDD[K, V, W](self, other, p)
}
/**
* Return the list of values in the RDD for key `key`. This operation is done efficiently if the
* RDD has a known partitioner by only searching the partition that the key maps to.
*/
def lookup(key: K): Seq[V] = self.withScope {
self.partitioner match {
case Some(p) =>
val index = p.getPartition(key)
val process = (it: Iterator[(K, V)]) => {
val buf = new ArrayBuffer[V]
for (pair <- it if pair._1 == key) {
buf += pair._2
}
buf
} : Seq[V]
val res = self.context.runJob(self, process, Array(index))
res(0)
case None =>
self.filter(_._1 == key).map(_._2).collect()
}
}
/**
* Output the RDD to any Hadoop-supported file system, using a Hadoop `OutputFormat` class
* supporting the key and value types K and V in this RDD.
*/
def saveAsHadoopFile[F <: OutputFormat[K, V]](
path: String)(implicit fm: ClassTag[F]): Unit = self.withScope {
saveAsHadoopFile(path, keyClass, valueClass, fm.runtimeClass.asInstanceOf[Class[F]])
}
/**
* Output the RDD to any Hadoop-supported file system, using a Hadoop `OutputFormat` class
* supporting the key and value types K and V in this RDD. Compress the result with the
* supplied codec.
*/
def saveAsHadoopFile[F <: OutputFormat[K, V]](
path: String,
codec: Class[_ <: CompressionCodec])(implicit fm: ClassTag[F]): Unit = self.withScope {
val runtimeClass = fm.runtimeClass
saveAsHadoopFile(path, keyClass, valueClass, runtimeClass.asInstanceOf[Class[F]], codec)
}
/**
* Output the RDD to any Hadoop-supported file system, using a new Hadoop API `OutputFormat`
* (mapreduce.OutputFormat) object supporting the key and value types K and V in this RDD.
*/
def saveAsNewAPIHadoopFile[F <: NewOutputFormat[K, V]](
path: String)(implicit fm: ClassTag[F]): Unit = self.withScope {
saveAsNewAPIHadoopFile(path, keyClass, valueClass, fm.runtimeClass.asInstanceOf[Class[F]])
}
/**
* Output the RDD to any Hadoop-supported file system, using a new Hadoop API `OutputFormat`
* (mapreduce.OutputFormat) object supporting the key and value types K and V in this RDD.
*/
def saveAsNewAPIHadoopFile(
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[_ <: NewOutputFormat[_, _]],
conf: Configuration = self.context.hadoopConfiguration): Unit = self.withScope {
// Rename this as hadoopConf internally to avoid shadowing (see SPARK-2038).
val hadoopConf = conf
val job = new NewAPIHadoopJob(hadoopConf)
job.setOutputKeyClass(keyClass)
job.setOutputValueClass(valueClass)
job.setOutputFormatClass(outputFormatClass)
val jobConfiguration = SparkHadoopUtil.get.getConfigurationFromJobContext(job)
jobConfiguration.set("mapred.output.dir", path)
saveAsNewAPIHadoopDataset(jobConfiguration)
}
/**
* Output the RDD to any Hadoop-supported file system, using a Hadoop `OutputFormat` class
* supporting the key and value types K and V in this RDD. Compress with the supplied codec.
*/
def saveAsHadoopFile(
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[_ <: OutputFormat[_, _]],
codec: Class[_ <: CompressionCodec]): Unit = self.withScope {
saveAsHadoopFile(path, keyClass, valueClass, outputFormatClass,
new JobConf(self.context.hadoopConfiguration), Some(codec))
}
/**
* Output the RDD to any Hadoop-supported file system, using a Hadoop `OutputFormat` class
* supporting the key and value types K and V in this RDD.
*
* Note that, we should make sure our tasks are idempotent when speculation is enabled, i.e. do
* not use output committer that writes data directly.
* There is an example in https://issues.apache.org/jira/browse/SPARK-10063 to show the bad
* result of using direct output committer with speculation enabled.
*/
def saveAsHadoopFile(
path: String,
keyClass: Class[_],
valueClass: Class[_],
outputFormatClass: Class[_ <: OutputFormat[_, _]],
conf: JobConf = new JobConf(self.context.hadoopConfiguration),
codec: Option[Class[_ <: CompressionCodec]] = None): Unit = self.withScope {
// Rename this as hadoopConf internally to avoid shadowing (see SPARK-2038).
val hadoopConf = conf
hadoopConf.setOutputKeyClass(keyClass)
hadoopConf.setOutputValueClass(valueClass)
conf.setOutputFormat(outputFormatClass)
for (c <- codec) {
hadoopConf.setCompressMapOutput(true)
hadoopConf.set("mapred.output.compress", "true")
hadoopConf.setMapOutputCompressorClass(c)
hadoopConf.set("mapred.output.compression.codec", c.getCanonicalName)
hadoopConf.set("mapred.output.compression.type", CompressionType.BLOCK.toString)
}
// Use configured output committer if already set
if (conf.getOutputCommitter == null) {
hadoopConf.setOutputCommitter(classOf[FileOutputCommitter])
}
// When speculation is on and output committer class name contains "Direct", we should warn
// users that they may loss data if they are using a direct output committer.
val speculationEnabled = self.conf.getBoolean("spark.speculation", false)
val outputCommitterClass = hadoopConf.get("mapred.output.committer.class", "")
if (speculationEnabled && outputCommitterClass.contains("Direct")) {
val warningMessage =
s"$outputCommitterClass may be an output committer that writes data directly to " +
"the final location. Because speculation is enabled, this output committer may " +
"cause data loss (see the case in SPARK-10063). If possible, please use a output " +
"committer that does not have this behavior (e.g. FileOutputCommitter)."
logWarning(warningMessage)
}
FileOutputFormat.setOutputPath(hadoopConf,
SparkHadoopWriter.createPathFromString(path, hadoopConf))
saveAsHadoopDataset(hadoopConf)
}
/**
* Output the RDD to any Hadoop-supported storage system with new Hadoop API, using a Hadoop
* Configuration object for that storage system. The Conf should set an OutputFormat and any
* output paths required (e.g. a table name to write to) in the same way as it would be
* configured for a Hadoop MapReduce job.
*
* Note that, we should make sure our tasks are idempotent when speculation is enabled, i.e. do
* not use output committer that writes data directly.
* There is an example in https://issues.apache.org/jira/browse/SPARK-10063 to show the bad
* result of using direct output committer with speculation enabled.
*/
def saveAsNewAPIHadoopDataset(conf: Configuration): Unit = self.withScope {
// Rename this as hadoopConf internally to avoid shadowing (see SPARK-2038).
val hadoopConf = conf
val job = new NewAPIHadoopJob(hadoopConf)
val formatter = new SimpleDateFormat("yyyyMMddHHmm")
val jobtrackerID = formatter.format(new Date())
val stageId = self.id
val jobConfiguration = SparkHadoopUtil.get.getConfigurationFromJobContext(job)
val wrappedConf = new SerializableConfiguration(jobConfiguration)
val outfmt = job.getOutputFormatClass
val jobFormat = outfmt.newInstance
if (isOutputSpecValidationEnabled) {
// FileOutputFormat ignores the filesystem parameter
jobFormat.checkOutputSpecs(job)
}
val writeShard = (context: TaskContext, iter: Iterator[(K, V)]) => {
val config = wrappedConf.value
/* "reduce task" <split #> <attempt # = spark task #> */
val attemptId = newTaskAttemptID(jobtrackerID, stageId, isMap = false, context.partitionId,
context.attemptNumber)
val hadoopContext = newTaskAttemptContext(config, attemptId)
val format = outfmt.newInstance
format match {
case c: Configurable => c.setConf(config)
case _ => ()
}
val committer = format.getOutputCommitter(hadoopContext)
committer.setupTask(hadoopContext)
val (outputMetrics, bytesWrittenCallback) = initHadoopOutputMetrics(context)
val writer = format.getRecordWriter(hadoopContext).asInstanceOf[NewRecordWriter[K, V]]
require(writer != null, "Unable to obtain RecordWriter")
var recordsWritten = 0L
Utils.tryWithSafeFinally {
while (iter.hasNext) {
val pair = iter.next()
writer.write(pair._1, pair._2)
// Update bytes written metric every few records
maybeUpdateOutputMetrics(bytesWrittenCallback, outputMetrics, recordsWritten)
recordsWritten += 1
}
} {
writer.close(hadoopContext)
}
committer.commitTask(hadoopContext)
bytesWrittenCallback.foreach { fn => outputMetrics.setBytesWritten(fn()) }
outputMetrics.setRecordsWritten(recordsWritten)
1
} : Int
val jobAttemptId = newTaskAttemptID(jobtrackerID, stageId, isMap = true, 0, 0)
val jobTaskContext = newTaskAttemptContext(wrappedConf.value, jobAttemptId)
val jobCommitter = jobFormat.getOutputCommitter(jobTaskContext)
// When speculation is on and output committer class name contains "Direct", we should warn
// users that they may loss data if they are using a direct output committer.
val speculationEnabled = self.conf.getBoolean("spark.speculation", false)
val outputCommitterClass = jobCommitter.getClass.getSimpleName
if (speculationEnabled && outputCommitterClass.contains("Direct")) {
val warningMessage =
s"$outputCommitterClass may be an output committer that writes data directly to " +
"the final location. Because speculation is enabled, this output committer may " +
"cause data loss (see the case in SPARK-10063). If possible, please use a output " +
"committer that does not have this behavior (e.g. FileOutputCommitter)."
logWarning(warningMessage)
}
jobCommitter.setupJob(jobTaskContext)
self.context.runJob(self, writeShard)
jobCommitter.commitJob(jobTaskContext)
}
/**
* Output the RDD to any Hadoop-supported storage system, using a Hadoop JobConf object for
* that storage system. The JobConf should set an OutputFormat and any output paths required
* (e.g. a table name to write to) in the same way as it would be configured for a Hadoop
* MapReduce job.
*/
def saveAsHadoopDataset(conf: JobConf): Unit = self.withScope {
// Rename this as hadoopConf internally to avoid shadowing (see SPARK-2038).
val hadoopConf = conf
val outputFormatInstance = hadoopConf.getOutputFormat
val keyClass = hadoopConf.getOutputKeyClass
val valueClass = hadoopConf.getOutputValueClass
if (outputFormatInstance == null) {
throw new SparkException("Output format class not set")
}
if (keyClass == null) {
throw new SparkException("Output key class not set")
}
if (valueClass == null) {
throw new SparkException("Output value class not set")
}
SparkHadoopUtil.get.addCredentials(hadoopConf)
logDebug("Saving as hadoop file of type (" + keyClass.getSimpleName + ", " +
valueClass.getSimpleName + ")")
if (isOutputSpecValidationEnabled) {
// FileOutputFormat ignores the filesystem parameter
val ignoredFs = FileSystem.get(hadoopConf)
hadoopConf.getOutputFormat.checkOutputSpecs(ignoredFs, hadoopConf)
}
val writer = new SparkHadoopWriter(hadoopConf)
writer.preSetup()
val writeToFile = (context: TaskContext, iter: Iterator[(K, V)]) => {
// Hadoop wants a 32-bit task attempt ID, so if ours is bigger than Int.MaxValue, roll it
// around by taking a mod. We expect that no task will be attempted 2 billion times.
val taskAttemptId = (context.taskAttemptId % Int.MaxValue).toInt
val (outputMetrics, bytesWrittenCallback) = initHadoopOutputMetrics(context)
writer.setup(context.stageId, context.partitionId, taskAttemptId)
writer.open()
var recordsWritten = 0L
Utils.tryWithSafeFinally {
while (iter.hasNext) {
val record = iter.next()
writer.write(record._1.asInstanceOf[AnyRef], record._2.asInstanceOf[AnyRef])
// Update bytes written metric every few records
maybeUpdateOutputMetrics(bytesWrittenCallback, outputMetrics, recordsWritten)
recordsWritten += 1
}
} {
writer.close()
}
writer.commit()
bytesWrittenCallback.foreach { fn => outputMetrics.setBytesWritten(fn()) }
outputMetrics.setRecordsWritten(recordsWritten)
}
self.context.runJob(self, writeToFile)
writer.commitJob()
}
private def initHadoopOutputMetrics(context: TaskContext): (OutputMetrics, Option[() => Long]) = {
val bytesWrittenCallback = SparkHadoopUtil.get.getFSBytesWrittenOnThreadCallback()
val outputMetrics = new OutputMetrics(DataWriteMethod.Hadoop)
if (bytesWrittenCallback.isDefined) {
context.taskMetrics.outputMetrics = Some(outputMetrics)
}
(outputMetrics, bytesWrittenCallback)
}
private def maybeUpdateOutputMetrics(bytesWrittenCallback: Option[() => Long],
outputMetrics: OutputMetrics, recordsWritten: Long): Unit = {
if (recordsWritten % PairRDDFunctions.RECORDS_BETWEEN_BYTES_WRITTEN_METRIC_UPDATES == 0) {
bytesWrittenCallback.foreach { fn => outputMetrics.setBytesWritten(fn()) }
outputMetrics.setRecordsWritten(recordsWritten)
}
}
/**
* Return an RDD with the keys of each tuple.
*/
def keys: RDD[K] = self.map(_._1)
/**
* Return an RDD with the values of each tuple.
*/
def values: RDD[V] = self.map(_._2)
private[spark] def keyClass: Class[_] = kt.runtimeClass
private[spark] def valueClass: Class[_] = vt.runtimeClass
private[spark] def keyOrdering: Option[Ordering[K]] = Option(ord)
// Note: this needs to be a function instead of a 'val' so that the disableOutputSpecValidation
// setting can take effect:
private def isOutputSpecValidationEnabled: Boolean = {
val validationDisabled = PairRDDFunctions.disableOutputSpecValidation.value
val enabledInConf = self.conf.getBoolean("spark.hadoop.validateOutputSpecs", true)
enabledInConf && !validationDisabled
}
}
private[spark] object PairRDDFunctions {
val RECORDS_BETWEEN_BYTES_WRITTEN_METRIC_UPDATES = 256
/**
* Allows for the `spark.hadoop.validateOutputSpecs` checks to be disabled on a case-by-case
* basis; see SPARK-4835 for more details.
*/
val disableOutputSpecValidation: DynamicVariable[Boolean] = new DynamicVariable[Boolean](false)
}
|
pronix/spark
|
core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
|
Scala
|
apache-2.0
| 56,093
|
// Copyright (C) 2015 ENSIME Authors
// License: GPL 3.0
package org.ensime.core
import Predef.{any2stringadd => _}
import scala.reflect.internal.util.Position
/**
* Simulate methods that were added in later versions of the scalac
* API, or to generate fake methods that we can use in both versions.
*/
trait PresentationCompilerBackCompat
trait PositionBackCompat {
implicit class RichPosition(pos: Position) {
// annoyingly, {start, end}OrPoint is deprecated
def startOrCursor: Int = pos.start
def endOrCursor: Int = pos.end
}
}
|
jacobono/ensime-server
|
core/src/main/scala-2.11/org/ensime/core/PresentationCompilerBackCompat.scala
|
Scala
|
gpl-3.0
| 554
|
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.content
import com.ibm.spark.kernel.protocol.v5.KernelMessageContent
import play.api.libs.json.Json
case class ConnectReply(
shell_port: Int,
iopub_port: Int,
stdin_port: Int,
hb_port: Int
) extends KernelMessageContent {
override def content : String =
Json.toJson(this)(ConnectReply.connectReplyWrites).toString
}
object ConnectReply {
implicit val connectReplyReads = Json.reads[ConnectReply]
implicit val connectReplyWrites = Json.writes[ConnectReply]
}
|
bpburns/spark-kernel
|
protocol/src/main/scala/com/ibm/spark/kernel/protocol/v5/content/ConnectReply.scala
|
Scala
|
apache-2.0
| 1,114
|
package net.kwas.impatient.ch5
import org.scalatest._
class CounterSpec extends FlatSpec with Matchers {
behavior of "A Counter"
it should "should initalize to zero" in {
val counter = new Counter
counter.current() should be (0)
}
it should "be one after increment" in {
val counter = new Counter
counter.increment()
counter.current() should be (1)
}
it should "not overflow when greater than Int.MaxValue" in {
val counter = new Counter
counter.increment(Int.MaxValue)
counter.increment()
counter.current() should be (1L + Int.MaxValue)
}
}
|
dkwasny/ScalaImpatient
|
src/test/scala/net/kwas/impatient/ch5/CounterSpec.scala
|
Scala
|
mit
| 599
|
/*
* Copyright 2014β2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.run
import slamdata.Predef.{Exception, Product, Serializable, Throwable}
import quasar.api.datasource.DatasourceError.CreateError
import quasar.api.destination.DestinationError.{CreateError => DestCreateError}
import quasar.compile.SemanticErrors
import quasar.connector.ResourceError
import quasar.qscript.PlannerError
import quasar.run.store.StoreError
import quasar.sql.ParsingError
import argonaut.Json
import argonaut.JsonScalaz._
import monocle.Prism
import scalaz.Show
import scalaz.syntax.show._
import shims.{showToCats, showToScalaz}
sealed abstract trait QuasarError extends Product with Serializable
object QuasarError {
final case class Pushing(error: DestCreateError[Json]) extends QuasarError
final case class Compiling(errors: SemanticErrors) extends QuasarError
final case class Connecting(error: CreateError[Json]) extends QuasarError
final case class Evaluating(error: ResourceError) extends QuasarError
final case class Parsing(error: ParsingError) extends QuasarError
final case class Planning(error: PlannerError) extends QuasarError
final case class Storing(error: StoreError) extends QuasarError
val pushing: Prism[QuasarError, DestCreateError[Json]] =
Prism.partial[QuasarError, DestCreateError[Json]] {
case Pushing(err) => err
} (Pushing(_))
val compiling: Prism[QuasarError, SemanticErrors] =
Prism.partial[QuasarError, SemanticErrors] {
case Compiling(errs) => errs
} (Compiling(_))
val connecting: Prism[QuasarError, CreateError[Json]] =
Prism.partial[QuasarError, CreateError[Json]] {
case Connecting(err) => err
} (Connecting(_))
val evaluating: Prism[QuasarError, ResourceError] =
Prism.partial[QuasarError, ResourceError] {
case Evaluating(err) => err
} (Evaluating(_))
val parsing: Prism[QuasarError, ParsingError] =
Prism.partial[QuasarError, ParsingError] {
case Parsing(err) => err
} (Parsing(_))
val planning: Prism[QuasarError, PlannerError] =
Prism.partial[QuasarError, PlannerError] {
case Planning(err) => err
} (Planning(_))
val storing: Prism[QuasarError, StoreError] =
Prism.partial[QuasarError, StoreError] {
case Storing(err) => err
} (Storing(_))
val throwableP: Prism[Throwable, QuasarError] =
Prism.partial[Throwable, QuasarError] {
case QuasarException(qe) => qe
} (QuasarException(_))
implicit val show: Show[QuasarError] =
Show show {
case Compiling(e) => e.show
case Connecting(e) => e.show
case Evaluating(e) => e.show
case Parsing(e) => e.show
case Planning(e) => e.show
case Storing(e) => e.show
case Pushing(e) => e.show
}
////
private final case class QuasarException(qe: QuasarError) extends Exception(qe.shows)
}
|
slamdata/quasar
|
run/src/main/scala/quasar/run/QuasarError.scala
|
Scala
|
apache-2.0
| 3,410
|
package com.wavesplatform.http
import akka.http.scaladsl.model.{ContentTypes, HttpEntity, StatusCodes}
import com.wavesplatform.account.Alias
import com.wavesplatform.api.common.CommonTransactionsApi
import com.wavesplatform.api.common.CommonTransactionsApi.TransactionMeta
import com.wavesplatform.api.http.ApiError.ApiKeyNotValid
import com.wavesplatform.api.http.DebugApiRoute
import com.wavesplatform.block.SignedBlockHeader
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils._
import com.wavesplatform.db.WithDomain
import com.wavesplatform.db.WithState.AddrWithBalance
import com.wavesplatform.features.BlockchainFeatures
import com.wavesplatform.lagonaki.mocks.TestBlock
import com.wavesplatform.lang.script.v1.ExprScript
import com.wavesplatform.lang.v1.compiler.Terms.TRUE
import com.wavesplatform.lang.v1.estimator.v3.ScriptEstimatorV3
import com.wavesplatform.lang.v1.evaluator.ctx.impl.PureContext
import com.wavesplatform.lang.v1.traits.domain.{Issue, Lease, LeaseCancel, Recipient}
import com.wavesplatform.network.PeerDatabase
import com.wavesplatform.settings.{TestFunctionalitySettings, WavesSettings}
import com.wavesplatform.state.StateHash.SectionId
import com.wavesplatform.state.diffs.ENOUGH_AMT
import com.wavesplatform.state.reader.LeaseDetails
import com.wavesplatform.state.{AccountScriptInfo, AssetDescription, AssetScriptInfo, Blockchain, Height, InvokeScriptResult, NG, StateHash, TxMeta}
import com.wavesplatform.test._
import com.wavesplatform.transaction.assets.exchange.OrderType
import com.wavesplatform.transaction.smart.InvokeScriptTransaction
import com.wavesplatform.transaction.smart.InvokeScriptTransaction.Payment
import com.wavesplatform.transaction.smart.script.ScriptCompiler
import com.wavesplatform.transaction.transfer.TransferTransaction
import com.wavesplatform.transaction.{TxHelpers, TxVersion}
import com.wavesplatform.{BlockchainStubHelpers, NTPTime, TestValues, TestWallet}
import monix.eval.Task
import org.scalamock.scalatest.PathMockFactory
import play.api.libs.json.{JsArray, JsObject, JsValue, Json}
import scala.util.Random
//noinspection ScalaStyle
class DebugApiRouteSpec
extends RouteSpec("/debug")
with RestAPISettingsHelper
with TestWallet
with NTPTime
with PathMockFactory
with BlockchainStubHelpers
with WithDomain {
val wavesSettings = WavesSettings.default()
val configObject = wavesSettings.config.root()
trait Blockchain1 extends Blockchain with NG
val blockchain = stub[Blockchain1]
val block = TestBlock.create(Nil)
val testStateHash = {
def randomHash: ByteStr = ByteStr(Array.fill(32)(Random.nextInt(256).toByte))
val hashes = SectionId.values.map((_, randomHash)).toMap
StateHash(randomHash, hashes)
}
val debugApiRoute =
DebugApiRoute(
wavesSettings,
ntpTime,
blockchain,
null,
null,
stub[CommonTransactionsApi],
null,
PeerDatabase.NoOp,
null,
(_, _) => Task.raiseError(new NotImplementedError("")),
null,
null,
null,
null,
null,
null,
configObject,
_ => Seq.empty, {
case 2 => Some(testStateHash)
case _ => None
},
() => blockchain
)
import debugApiRoute._
routePath("/configInfo") - {
"requires api-key header" in {
Get(routePath("/configInfo?full=true")) ~> route should produce(ApiKeyNotValid)
Get(routePath("/configInfo?full=false")) ~> route should produce(ApiKeyNotValid)
}
}
routePath("/stateHash") - {
"works" in {
(blockchain.blockHeader(_: Int)).when(*).returning(Some(SignedBlockHeader(block.header, block.signature)))
Get(routePath("/stateHash/2")) ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[JsObject] shouldBe (Json.toJson(testStateHash).as[JsObject] ++ Json.obj("blockId" -> block.id().toString))
}
Get(routePath("/stateHash/3")) ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
}
routePath("/validate") - {
def routeWithBlockchain(blockchain: Blockchain with NG) =
debugApiRoute.copy(blockchain = blockchain, priorityPoolBlockchain = () => blockchain).route
def validatePost(tx: TransferTransaction) =
Post(routePath("/validate"), HttpEntity(ContentTypes.`application/json`, tx.json().toString()))
"takes the priority pool into account" in withDomain(balances = Seq(AddrWithBalance(TxHelpers.defaultAddress))) { d =>
d.appendBlock(TxHelpers.transfer(to = TxHelpers.secondAddress, amount = 1.waves + TestValues.fee))
val route = routeWithBlockchain(d.blockchain)
val tx = TxHelpers.transfer(TxHelpers.secondSigner, TestValues.address, 1.waves)
validatePost(tx) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe true
(json \\ "validationTime").as[Int] shouldBe 1000 +- 1000
}
}
"valid tx" in {
val blockchain = createBlockchainStub()
(blockchain.balance _).when(TxHelpers.defaultSigner.publicKey.toAddress, *).returns(Long.MaxValue)
val route = routeWithBlockchain(blockchain)
val tx = TxHelpers.transfer(TxHelpers.defaultSigner, TestValues.address, 1.waves)
validatePost(tx) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe true
(json \\ "validationTime").as[Int] shouldBe 1000 +- 1000
}
}
"invalid tx" in {
val blockchain = createBlockchainStub()
(blockchain.balance _).when(TxHelpers.defaultSigner.publicKey.toAddress, *).returns(0)
val route = routeWithBlockchain(blockchain)
val tx = TxHelpers.transfer(TxHelpers.defaultSigner, TestValues.address, Long.MaxValue)
validatePost(tx) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe false
(json \\ "validationTime").as[Int] shouldBe 1000 +- 1000
(json \\ "error").as[String] should include("Attempt to transfer unavailable funds")
}
}
"exchange tx with fail script" in {
val blockchain = createBlockchainStub { blockchain =>
(blockchain.balance _).when(TxHelpers.defaultAddress, *).returns(Long.MaxValue)
val (assetScript, comp) =
ScriptCompiler.compile("if true then throw(\\"error\\") else false", ScriptEstimatorV3(fixOverflow = true)).explicitGet()
(blockchain.assetScript _).when(TestValues.asset).returns(Some(AssetScriptInfo(assetScript, comp)))
(blockchain.assetDescription _)
.when(TestValues.asset)
.returns(
Some(
AssetDescription(
null,
null,
null,
null,
0,
reissuable = false,
null,
Height(1),
Some(AssetScriptInfo(assetScript, comp)),
0,
nft = false
)
)
)
}
val route = routeWithBlockchain(blockchain)
val tx = TxHelpers.exchangeFromOrders(TxHelpers.orderV3(OrderType.BUY, TestValues.asset), TxHelpers.orderV3(OrderType.SELL, TestValues.asset))
jsonPost(routePath("/validate"), tx.json()) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe false
(json \\ "validationTime").as[Int] shouldBe 1000 +- 1000
(json \\ "error").as[String] should include("not allowed by script of the asset")
(json \\ "trace").as[JsArray] shouldBe Json.parse(
"[{\\"type\\":\\"asset\\",\\"context\\":\\"orderAmount\\",\\"id\\":\\"5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx\\",\\"result\\":\\"failure\\",\\"vars\\":[],\\"error\\":\\"error\\"}]"
)
}
}
"invoke tx with asset failing" in {
val blockchain = createBlockchainStub { blockchain =>
(blockchain.balance _).when(*, *).returns(Long.MaxValue / 2)
val (assetScript, assetScriptComplexity) = ScriptCompiler
.compile(
"let test = true\\n" +
"if test then throw(\\"error\\") else !test",
ScriptEstimatorV3(fixOverflow = true)
)
.explicitGet()
(blockchain.assetScript _).when(TestValues.asset).returns(Some(AssetScriptInfo(assetScript, assetScriptComplexity)))
(blockchain.assetDescription _)
.when(TestValues.asset)
.returns(
Some(
AssetDescription(
TestValues.asset.id,
TxHelpers.defaultSigner.publicKey,
null,
null,
0,
reissuable = true,
BigInt(1),
Height(1),
Some(AssetScriptInfo(assetScript, assetScriptComplexity)),
0,
nft = false
)
)
)
val (dAppScript, _) = ScriptCompiler
.compile(
s"""
|{-# STDLIB_VERSION 4 #-}
|{-# SCRIPT_TYPE ACCOUNT #-}
|{-# CONTENT_TYPE DAPP #-}
|
|@Callable(i)
|func default() = []
|
|@Callable(i)
|func dataAndTransfer() = [
| IntegerEntry("key", 1),
| BooleanEntry("key", true),
| StringEntry("key", "str"),
| BinaryEntry("key", base58''),
| DeleteEntry("key"),
| ScriptTransfer(Address(base58'${TxHelpers.secondAddress}'), 1, base58'${TestValues.asset}')
|]
|
|@Callable(i)
|func issue() = {
| let decimals = 4
| [Issue("name", "description", 1000, decimals, true, unit, 0)]
|}
|
|@Callable(i)
|func reissue() = [Reissue(base58'${TestValues.asset}', 1, false)]
|
|@Callable(i)
|func burn() = [Burn(base58'${TestValues.asset}', 1)]
|""".stripMargin,
ScriptEstimatorV3(fixOverflow = true)
)
.explicitGet()
(blockchain.accountScript _)
.when(*)
.returns(
Some(
AccountScriptInfo(
TxHelpers.defaultSigner.publicKey,
dAppScript,
0L,
Map(3 -> Seq("default", "dataAndTransfer", "issue", "reissue", "burn", "sponsorFee").map(_ -> 1L).toMap)
)
)
)
(blockchain.hasAccountScript _).when(*).returns(true)
}
val route = routeWithBlockchain(blockchain)
def testFunction(name: String, result: InvokeScriptTransaction => String) = withClue(s"function $name") {
val tx = TxHelpers.invoke(TxHelpers.defaultAddress, func = Some(name), fee = 102500000)
jsonPost(routePath("/validate"), tx.json()) ~> route ~> check {
val json = Json.parse(responseAs[String])
if ((json \\ "valid").as[Boolean])
assert(tx.json().fieldSet subsetOf json.as[JsObject].fieldSet)
else
(json \\ "transaction").as[JsObject] should matchJson(tx.json())
(json \\ "trace").as[JsArray] should matchJson(result(tx))
}
}
def testPayment(result: String) = withClue("payment") {
val tx = TxHelpers.invoke(TxHelpers.secondAddress, fee = 1300000, payments = Seq(Payment(1L, TestValues.asset)))
jsonPost(routePath("/validate"), tx.json()) ~> route ~> check {
val json = Json.parse(responseAs[String])
if ((json \\ "valid").as[Boolean])
assert(tx.json().fieldSet subsetOf json.as[JsObject].fieldSet)
else
(json \\ "transaction").as[JsObject] should matchJson(tx.json())
(json \\ "trace").as[JsArray] should matchJson(Json.parse(result))
}
}
testPayment("""[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MuVqVJGmFsHeuFni5RbjRmALuGCkEwzZtC",
| "function" : "default",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ ]
|}, {
| "type" : "asset",
| "context" : "payment",
| "id" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "result" : "failure",
| "vars" : [ {
| "name" : "test",
| "type" : "Boolean",
| "value" : true
| } ],
| "error" : "error"
|} ]""".stripMargin)
testFunction(
"dataAndTransfer",
_ => """[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "dataAndTransfer",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ {
| "key" : "key",
| "type" : "integer",
| "value" : 1
| }, {
| "key" : "key",
| "type" : "boolean",
| "value" : true
| }, {
| "key" : "key",
| "type" : "string",
| "value" : "str"
| }, {
| "key" : "key",
| "type" : "binary",
| "value" : "base64:"
| }, {
| "key" : "key",
| "value" : null
| } ],
| "transfers" : [ {
| "address" : "3MuVqVJGmFsHeuFni5RbjRmALuGCkEwzZtC",
| "asset" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "amount" : 1
| } ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ ]
|}, {
| "type" : "asset",
| "context" : "transfer",
| "id" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "result" : "failure",
| "vars" : [ {
| "name" : "test",
| "type" : "Boolean",
| "value" : true
| } ],
| "error" : "error"
|} ]""".stripMargin
)
testFunction(
"issue",
tx => s"""[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "issue",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ {
| "assetId" : "${Issue.calculateId(4, "description", isReissuable = true, "name", 1000, 0, tx.id())}",
| "name" : "name",
| "description" : "description",
| "quantity" : 1000,
| "decimals" : 4,
| "isReissuable" : true,
| "compiledScript" : null,
| "nonce" : 0
| } ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ {
| "name" : "decimals",
| "type" : "Int",
| "value" : 4
| } ]
|} ]""".stripMargin
)
testFunction(
"reissue",
_ => """[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "reissue",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ {
| "assetId" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "isReissuable" : false,
| "quantity" : 1
| } ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ ]
|}, {
| "type" : "asset",
| "context" : "reissue",
| "id" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "result" : "failure",
| "vars" : [ {
| "name" : "test",
| "type" : "Boolean",
| "value" : true
| } ],
| "error" : "error"
|} ]""".stripMargin
)
testFunction(
"burn",
_ => """[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "burn",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ {
| "assetId" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "quantity" : 1
| } ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ ]
|}, {
| "type" : "asset",
| "context" : "burn",
| "id" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "result" : "failure",
| "vars" : [ {
| "name" : "test",
| "type" : "Boolean",
| "value" : true
| } ],
| "error" : "error"
|} ]""".stripMargin
)
}
"invoke tx returning leases" in {
val dAppPk = TxHelpers.defaultSigner.publicKey
val dAppAddress = dAppPk.toAddress
val invoke = TxHelpers.invoke(dAppPk.toAddress)
val leaseCancelId = ByteStr(bytes32gen.sample.get)
val amount1 = 100
val recipient1 = Recipient.Address(ByteStr.decodeBase58("3NAgxLPGnw3RGv9JT6NTDaG5D1iLUehg2xd").get)
val nonce1 = 0
val leaseId1 = Lease.calculateId(Lease(recipient1, amount1, nonce1), invoke.id())
val amount2 = 20
val recipient2 = Recipient.Alias("some_alias")
val nonce2 = 2
val leaseId2 = Lease.calculateId(Lease(recipient2, amount2, nonce2), invoke.id())
val blockchain = createBlockchainStub { blockchain =>
(blockchain.balance _).when(*, *).returns(Long.MaxValue)
(blockchain.resolveAlias _).when(Alias.create(recipient2.name).explicitGet()).returning(Right(TxHelpers.secondAddress))
val (dAppScript, _) = ScriptCompiler
.compile(
s"""
|{-# STDLIB_VERSION 5 #-}
|{-# SCRIPT_TYPE ACCOUNT #-}
|{-# CONTENT_TYPE DAPP #-}
|
|@Callable(i)
|func default() = {
| strict a = parseBigIntValue("${PureContext.BigIntMax}")
| let test = 1
| if (test == 1)
| then
| [
| Lease(Address(base58'${recipient1.bytes}'), $amount1, $nonce1),
| Lease(Alias("${recipient2.name}"), $amount2, $nonce2),
| LeaseCancel(base58'$leaseCancelId')
| ]
| else []
|}
|""".stripMargin,
ScriptEstimatorV3(fixOverflow = true)
)
.explicitGet()
(blockchain.accountScript _)
.when(*)
.returns(
Some(
AccountScriptInfo(
dAppPk,
dAppScript,
0L,
Map(3 -> Seq("default", "test1").map(_ -> 0L).toMap)
)
)
)
(blockchain.hasAccountScript _).when(*).returns(true)
(blockchain.transactionMeta _)
.when(leaseCancelId)
.returns(Some(TxMeta(Height(1), true, 0L)))
.anyNumberOfTimes()
(blockchain.leaseDetails _)
.when(leaseCancelId)
.returns(Some(LeaseDetails(dAppPk, TxHelpers.defaultAddress, 100, LeaseDetails.Status.Active, leaseCancelId, 1)))
.anyNumberOfTimes()
(blockchain.leaseDetails _)
.when(*)
.returns(None)
.anyNumberOfTimes()
(blockchain.resolveAlias _)
.when(*)
.returns(Right(accountGen.sample.get.toAddress))
.anyNumberOfTimes()
}
val route = debugApiRoute
.copy(
blockchain = blockchain,
priorityPoolBlockchain = () => blockchain
)
.route
Post(routePath("/validate"), HttpEntity(ContentTypes.`application/json`, invoke.json().toString())) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe true
(json \\ "stateChanges").as[JsObject] should matchJson(s"""{
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ {
| "id" : "$leaseId1",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "$dAppAddress",
| "recipient" : "${recipient1.bytes}",
| "amount" : 100,
| "height" : 1,
| "status" : "active",
| "cancelHeight" : null,
| "cancelTransactionId" : null
| }, {
| "id" : "$leaseId2",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "$dAppAddress",
| "recipient" : "${TxHelpers.secondAddress}",
| "amount" : 20,
| "height" : 1,
| "status" : "active",
| "cancelHeight" : null,
| "cancelTransactionId" : null
| } ],
| "leaseCancels" : [ {
| "id" : "$leaseCancelId",
| "originTransactionId" : "$leaseCancelId",
| "sender" : "$dAppAddress",
| "recipient" : "${TxHelpers.defaultAddress}",
| "amount" : 100,
| "height" : 1,
| "status" : "canceled",
| "cancelHeight" : 1,
| "cancelTransactionId" : "${invoke.id()}"
| } ],
| "invokes" : [ ]
|}""".stripMargin)
(json \\ "trace").as[JsArray] should matchJson(
s"""
|[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "default",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ {
| "id" : "$leaseId1",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "recipient" : "3NAgxLPGnw3RGv9JT6NTDaG5D1iLUehg2xd",
| "amount" : 100,
| "height" : 1,
| "status" : "active",
| "cancelHeight" : null,
| "cancelTransactionId" : null
| }, {
| "id" : "$leaseId2",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "recipient" : "3MuVqVJGmFsHeuFni5RbjRmALuGCkEwzZtC",
| "amount" : 20,
| "height" : 1,
| "status" : "active",
| "cancelHeight" : null,
| "cancelTransactionId" : null
| } ],
| "leaseCancels" : [ {
| "id" : "$leaseCancelId",
| "originTransactionId" : "$leaseCancelId",
| "sender" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "recipient" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "amount" : 100,
| "height" : 1,
| "status" : "canceled",
| "cancelHeight" : 1,
| "cancelTransactionId" : "${invoke.id()}"
| } ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ {
| "name" : "a",
| "type" : "BigInt",
| "value" : 6.703903964971298549787012499102923E+153
| }, {
| "name" : "test",
| "type" : "Int",
| "value" : 1
| } ]
|} ]
|
""".stripMargin
)
(json \\ "height").as[Int] shouldBe 1
}
}
"invoke tx with nested call" in {
val dAppPk = TxHelpers.defaultSigner.publicKey
val dAppAddress = dAppPk.toAddress
val invoke = TxHelpers.invoke(dAppPk.toAddress, func = Some("test1"))
val blockchain = createBlockchainStub { blockchain =>
(blockchain.balance _).when(*, *).returns(Long.MaxValue)
val (dAppScript, _) = ScriptCompiler
.compile(
s"""
|{-# STDLIB_VERSION 5 #-}
|{-# SCRIPT_TYPE ACCOUNT #-}
|{-# CONTENT_TYPE DAPP #-}
|
|@Callable(i)
|func test() = {
| strict a = parseBigIntValue("${PureContext.BigIntMax}")
| let test = 1
| if (test == 1)
| then [IntegerEntry("key", 1)]
| else []
|}
|
|@Callable(i)
|func test1() = {
| strict result = reentrantInvoke(this, "test", [], [])
| if (result == unit) then [] else []
|}
|""".stripMargin,
ScriptEstimatorV3(fixOverflow = true)
)
.explicitGet()
(blockchain.accountScript _)
.when(*)
.returns(
Some(
AccountScriptInfo(
dAppPk,
dAppScript,
0L,
Map(3 -> Seq("test", "test1").map(_ -> 0L).toMap)
)
)
)
(blockchain.hasAccountScript _).when(dAppAddress).returns(true)
}
val route = debugApiRoute
.copy(
blockchain = blockchain,
priorityPoolBlockchain = () => blockchain
)
.route
Post(routePath("/validate"), HttpEntity(ContentTypes.`application/json`, invoke.json().toString())) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe true
(json \\ "stateChanges").as[JsObject] should matchJson(s"""{
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ {
| "dApp" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "call" : {
| "function" : "test",
| "args" : [ ]
| },
| "payment" : [ ],
| "stateChanges" : {
| "data" : [ {
| "key" : "key",
| "type" : "integer",
| "value" : 1
| } ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| }
| } ]
|}
|""".stripMargin)
(json \\ "trace").as[JsArray] should matchJson(
s"""
|[ {
| "type" : "verifier",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "result" : "success",
| "error" : null
|}, {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "test1",
| "args" : [ ],
| "invocations" : [ {
| "type" : "dApp",
| "id" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "function" : "test",
| "args" : [ ],
| "invocations" : [ ],
| "result" : {
| "data" : [ {
| "key" : "key",
| "type" : "integer",
| "value" : 1
| } ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ {
| "name" : "a",
| "type" : "BigInt",
| "value" : 6.703903964971298549787012499102923E+153
| }, {
| "name" : "test",
| "type" : "Int",
| "value" : 1
| } ]
| } ],
| "result" : {
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ ],
| "leaseCancels" : [ ],
| "invokes" : [ ]
| },
| "error" : null,
| "vars" : [ {
| "name" : "result",
| "type" : "Unit",
| "value" : { }
| } ]
|} ]
""".stripMargin
)
(json \\ "height").as[Int] shouldBe 1
}
}
"transfer transaction with asset fail" in {
val blockchain = createBlockchainStub { blockchain =>
(blockchain.balance _).when(*, *).returns(Long.MaxValue / 2)
val (assetScript, assetScriptComplexity) = ScriptCompiler.compile("false", ScriptEstimatorV3(fixOverflow = true)).explicitGet()
(blockchain.assetScript _).when(TestValues.asset).returns(Some(AssetScriptInfo(assetScript, assetScriptComplexity)))
(blockchain.assetDescription _)
.when(TestValues.asset)
.returns(
Some(
AssetDescription(
TestValues.asset.id,
TxHelpers.defaultSigner.publicKey,
null,
null,
0,
reissuable = true,
BigInt(1),
Height(1),
Some(AssetScriptInfo(assetScript, assetScriptComplexity)),
0,
nft = false
)
)
)
}
val route = routeWithBlockchain(blockchain)
val tx = TxHelpers.transfer(TxHelpers.defaultSigner, TxHelpers.defaultAddress, 1, TestValues.asset)
jsonPost(routePath("/validate"), tx.json()) ~> route ~> check {
val json = responseAs[JsObject]
(json \\ "trace").as[JsArray] should matchJson("""[ {
| "type" : "asset",
| "context" : "transfer",
| "id" : "5PjDJaGfSPJj4tFzMRCiuuAasKg5n8dJKXKenhuwZexx",
| "result" : "failure",
| "vars" : [ ],
| "error" : null
| } ]""".stripMargin)
(json \\ "valid").as[Boolean] shouldBe false
(json \\ "transaction").as[JsObject] shouldBe tx.json()
}
}
"txs with empty and small verifier" in {
val blockchain = createBlockchainStub { blockchain =>
val settings = TestFunctionalitySettings.Enabled.copy(
preActivatedFeatures = Map(
BlockchainFeatures.SmartAccounts.id -> 0,
BlockchainFeatures.SmartAssets.id -> 0,
BlockchainFeatures.Ride4DApps.id -> 0,
BlockchainFeatures.FeeSponsorship.id -> 0,
BlockchainFeatures.DataTransaction.id -> 0,
BlockchainFeatures.BlockReward.id -> 0,
BlockchainFeatures.BlockV5.id -> 0,
BlockchainFeatures.SynchronousCalls.id -> 0
),
featureCheckBlocksPeriod = 1,
blocksForFeatureActivation = 1
)
(() => blockchain.settings).when().returns(WavesSettings.default().blockchainSettings.copy(functionalitySettings = settings))
(() => blockchain.activatedFeatures).when().returns(settings.preActivatedFeatures)
(blockchain.balance _).when(*, *).returns(ENOUGH_AMT)
val script = ExprScript(TRUE).explicitGet()
def info(complexity: Int) = Some(AccountScriptInfo(TxHelpers.secondSigner.publicKey, script, complexity))
(blockchain.accountScript _).when(TxHelpers.defaultSigner.toAddress).returns(info(199))
(blockchain.accountScript _).when(TxHelpers.secondSigner.toAddress).returns(info(201))
(blockchain.accountScript _).when(TxHelpers.signer(3).toAddress).returns(None)
}
val route = routeWithBlockchain(blockchain)
val transferFee = 100000
val tx = TxHelpers.transfer(TxHelpers.defaultSigner, TxHelpers.secondSigner.toAddress, 1.waves, fee = transferFee, version = TxVersion.V2)
validatePost(tx) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe true
}
val tx2 = TxHelpers.transfer(TxHelpers.secondSigner, TestValues.address, 1.waves, fee = transferFee, version = TxVersion.V2)
validatePost(tx2) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe false
(json \\ "error").as[String] should include("Requires 400000 extra fee")
}
val tx3 = TxHelpers.transfer(TxHelpers.signer(3), TestValues.address, 1.waves, fee = transferFee, version = TxVersion.V2)
validatePost(tx3) ~> route ~> check {
val json = Json.parse(responseAs[String])
(json \\ "valid").as[Boolean] shouldBe true
}
}
}
routePath("/stateChanges/info/") - {
"provides lease and lease cancel actions stateChanges" in {
val invokeAddress = accountGen.sample.get.toAddress
val leaseId1 = ByteStr(bytes32gen.sample.get)
val leaseId2 = ByteStr(bytes32gen.sample.get)
val leaseCancelId = ByteStr(bytes32gen.sample.get)
val recipientAddress = accountGen.sample.get.toAddress
val recipientAlias = aliasGen.sample.get
val invoke = TxHelpers.invoke(invokeAddress)
val scriptResult = InvokeScriptResult(
leases = Seq(InvokeScriptResult.Lease(recipientAddress, 100, 1, leaseId1), InvokeScriptResult.Lease(recipientAlias, 200, 3, leaseId2)),
leaseCancels = Seq(LeaseCancel(leaseCancelId))
)
(() => blockchain.activatedFeatures).when().returning(Map.empty).anyNumberOfTimes()
(transactionsApi.transactionById _)
.when(invoke.id())
.returning(Some(TransactionMeta.Invoke(Height(1), invoke, succeeded = true, 0L, Some(scriptResult))))
.once()
(blockchain.leaseDetails _)
.when(leaseId1)
.returning(Some(LeaseDetails(invoke.sender, recipientAddress, 100, LeaseDetails.Status.Active, invoke.id(), 1)))
(blockchain.leaseDetails _)
.when(leaseId2)
.returning(Some(LeaseDetails(invoke.sender, recipientAddress, 100, LeaseDetails.Status.Active, invoke.id(), 1)))
(blockchain.leaseDetails _)
.when(leaseCancelId)
.returning(Some(LeaseDetails(invoke.sender, recipientAddress, 100, LeaseDetails.Status.Cancelled(2, Some(leaseCancelId)), invoke.id(), 1)))
(blockchain.transactionMeta _).when(invoke.id()).returning(Some(TxMeta(Height(1), true, 1L)))
Get(routePath(s"/stateChanges/info/${invoke.id()}")) ~> route ~> check {
status shouldEqual StatusCodes.OK
val json = (responseAs[JsObject] \\ "stateChanges").as[JsObject]
json should matchJson(s"""
|{
| "data" : [ ],
| "transfers" : [ ],
| "issues" : [ ],
| "reissues" : [ ],
| "burns" : [ ],
| "sponsorFees" : [ ],
| "leases" : [ {
| "id" : "$leaseId1",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "recipient" : "$recipientAddress",
| "amount" : 100,
| "height" : 1,
| "status" : "active",
| "cancelHeight" : null,
| "cancelTransactionId" : null
| }, {
| "id" : "$leaseId2",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "recipient" : "$recipientAddress",
| "amount" : 100,
| "height" : 1,
| "status" : "active",
| "cancelHeight" : null,
| "cancelTransactionId" : null
| } ],
| "leaseCancels" : [ {
| "id" : "$leaseCancelId",
| "originTransactionId" : "${invoke.id()}",
| "sender" : "3MtGzgmNa5fMjGCcPi5nqMTdtZkfojyWHL9",
| "recipient" : "$recipientAddress",
| "amount" : 100,
| "height" : 1,
| "status" : "canceled",
| "cancelHeight" : 2,
| "cancelTransactionId" : "$leaseCancelId"
| } ],
| "invokes" : [ ]
|}""".stripMargin)
}
}
}
private[this] def jsonPost(path: String, json: JsValue) = {
Post(path, HttpEntity(ContentTypes.`application/json`, json.toString()))
}
}
|
wavesplatform/Waves
|
node/src/test/scala/com/wavesplatform/http/DebugApiRouteSpec.scala
|
Scala
|
mit
| 46,464
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
import java.util.Properties
import kafka.utils.VerifiableProperties
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.kafka.common.config.SaslConfigs
class SyncProducerConfig private (val props: VerifiableProperties) extends SyncProducerConfigShared {
def this(originalProps: Properties) {
this(new VerifiableProperties(originalProps))
// no need to verify the property since SyncProducerConfig is supposed to be used internally
}
/** the broker to which the producer sends events */
val host = props.getString("host")
/** the port on which the broker is running */
val port = props.getInt("port")
}
trait SyncProducerConfigShared {
val props: VerifiableProperties
val sendBufferBytes = props.getInt("send.buffer.bytes", 100*1024)
/* the client application sending the producer requests */
val clientId = props.getString("client.id", SyncProducerConfig.DefaultClientId)
/*
* The number of acknowledgments the producer requires the leader to have received before considering a request complete.
* This controls the durability of the messages sent by the producer.
*
* request.required.acks = 0 - means the producer will not wait for any acknowledgement from the leader.
* request.required.acks = 1 - means the leader will write the message to its local log and immediately acknowledge
* request.required.acks = -1 - means the leader will wait for acknowledgement from all in-sync replicas before acknowledging the write
*/
val requestRequiredAcks = props.getShortInRange("request.required.acks", SyncProducerConfig.DefaultRequiredAcks,(-1,1))
/*
* The ack timeout of the producer requests. Value must be non-negative and non-zero
*/
val requestTimeoutMs = props.getIntInRange("request.timeout.ms", SyncProducerConfig.DefaultAckTimeoutMs,
(1, Integer.MAX_VALUE))
val securityProtocol = props.getString("security.protocol", SyncProducerConfig.DefaultSecurityProtocol)
val saslKerberosKinitCmd = props.getString(SaslConfigs.SASL_KERBEROS_KINIT_CMD, SaslConfigs.DEFAULT_KERBEROS_KINIT_CMD)
val saslKerberosTicketRenewWindowFactor = props.getString(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_WINDOW_FACTOR, SaslConfigs.DEFAULT_KERBEROS_TICKET_RENEW_WINDOW_FACTOR.toString)
val saslKerberosTicketRenewJitter = props.getString(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER, SaslConfigs.DEFAULT_KERBEROS_TICKET_RENEW_JITTER.toString)
val saslKerberosMinTimeBeforeRelogin = props.getString(SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN, SaslConfigs.DEFAULT_KERBEROS_MIN_TIME_BEFORE_RELOGIN.toString)
}
object SyncProducerConfig {
val DefaultClientId = ""
val DefaultRequiredAcks : Short = 0
val DefaultAckTimeoutMs = 10000
val DefaultSecurityProtocol = SecurityProtocol.PLAINTEXT.toString
}
|
OpenPOWER-BigData/HDP-kafka
|
core/src/main/scala/kafka/producer/SyncProducerConfig.scala
|
Scala
|
apache-2.0
| 3,627
|
package org.odfi.indesign.core.module.webdraw.superchart.formats
import java.net.URL
import scala.io.Source
import java.net.URI
import scala.collection.mutable.ArrayBuffer
class CSVFormat(var source: URI) {
var series = new scala.collection.mutable.ArrayBuffer[(String, scala.collection.mutable.ArrayBuffer[Double])]
// Index of the parametric serie
var parametricSourceIndex: Option[Int] = None
var parametricSeries: Option[
scala.collection.mutable.ArrayBuffer[(Double, ArrayBuffer[(String, ArrayBuffer[(Double, Double)] )]) ]] = None
var pointsCount = 0
def isParametric = this.parametricSourceIndex.isDefined
def getParametricValuesImmutable = {
this.getParametricValues.toIterable
}
def getParametricValues = {
parametricSeries.getOrElse {
println(s"Sorting Parametric")
//-- Get Number of parameter values
var parametersCount = this.series(this.parametricSourceIndex.get)._2.distinct.size
//-- Prepare list with parameter value as 1 and (index,value) pairs as 2
var seriesByParameter = this.series(this.parametricSourceIndex.get)._2.distinct.map {
pv =>
// For each parameter, create the according lists for each series in the document
var sortedSeries = series.drop(2).map {
case (name,values) => (name,new scala.collection.mutable.ArrayBuffer[(Double, Double)]())
}
(pv, sortedSeries)
}
series.drop(2).zipWithIndex.foreach {
case ((name, values),serieIndex) =>
values.grouped(parametersCount).zipWithIndex.foreach {
case (valuesForParameters, groupIteration) =>
valuesForParameters.zipWithIndex.foreach {
case (v, localIteration) =>
var globalIndex = (groupIteration * parametersCount) + localIteration
seriesByParameter(localIteration)._2.apply(serieIndex)._2 += (series(0)._2.apply(globalIndex) -> v)
}
}
}
/*var first = seriesByParameter.apply(0)
println(s"For: ${first._1}")
first._2.foreach {
case (i,v)=>
println(s"$i -> $v");
}
*/
/*this.series(this.parametricSourceIndex.get)._2.grouped(parametersCount).foreach {
parametersGroup =>
parametersGroup.foreach {
parameterValue =>
}
}
this.series(this.parametricSourceIndex.get)._2.distinct.zipWithIndex.map {
case (parameterValue, iteration) =>
var pointIndexValue = series(0)._2.apply(iteration)
//-- For each remaining series:
//-- Take values parametersCount times and add each value to matching series in rebuild list
series.drop(2).foreach {
case (name, values) =>
values.grouped(parametersCount).foreach {
gv =>
gv.zipWithIndex.foreach {
case (v, i) => seriesByParameter(i)._2 += (pointIndexValue -> v)
}
}
}
}*/
parametricSeries = Some(seriesByParameter)
seriesByParameter
}
}
def parse = {
//-- Get Content
var src = Source.fromFile(source)
var reader = src.bufferedReader()
//var lines = src.getLines
//println(s"Number of lines: "+lines.size)
//-- Get Header
var headers = reader.readLine().split(",")
headers.foreach {
h =>
// println(s"Found header: " + h)
series += (h.trim -> new scala.collection.mutable.ArrayBuffer[Double]())
}
var line = reader.readLine
while (line != null) {
line.split(",").zipWithIndex.foreach {
case (vString, i) =>
series.apply(i)._2 += vString.trim.toDouble * 100.0 / 100.0
}
line = reader.readLine()
}
//-- Parse Rest
/*reader.re
lines.drop(1).foreach {
line =>
println(s"Found Line: "+line)
line.split(",").zipWithIndex.foreach {
case (vString,i) =>
series.apply(i)._2.append(vString.toDouble)
}
}*/
//-- Analyse
//----------------------
println(s"Done Parsing")
// Parametric
//-------------------
//-- Get Index Series
println(s"Index column is: " + series(0)._1)
var groupByIndex = series(0)._2.groupBy { index => index }
// For every index, the number of times it is repeated
var indexCounts = groupByIndex.values.map { v => v.size }
var maxAndMin = (indexCounts.max, indexCounts.min)
maxAndMin match {
//-- Indexes are repeated, and more than one -> parametric
case (max, min) if (max > 1 && min == max) =>
println(s"Parametric")
parametricSourceIndex = Some(1)
pointsCount = max
// Checks
if (series.size < 3) {
sys.error("Parametric File requires at least 3 columns: Index,Parameter values,Measurement Values")
}
case (max, min) =>
println(s"Normal")
pointsCount = series(0)._2.size
}
}
}
|
opendesignflow/indesign
|
indesign-webdraw/src/main/scala/org/odfi/indesign/core/module/webdraw/superchart/formats/CSVFormat.scala
|
Scala
|
gpl-3.0
| 5,079
|
package com.s21g.rubyist
import java.security.MessageDigest
object Digest {
object MD5 {
// code is derived from
// http://code-redefined.blogspot.com/2009/05/md5-sum-in-scala.html
def hexdigest(str:String): String = {
val md5 = MessageDigest.getInstance("MD5")
md5.reset()
md5.update(str.getBytes)
return md5.digest().map(0xFF & _).map { "%02x".format(_) }.foldLeft(""){_ + _}
}
}
}
|
s21g/rubyist
|
src/main/scala/com/s21g/rubyist/Digest.scala
|
Scala
|
mit
| 430
|
package org.jetbrains.plugins.scala
package codeInspection
package suppression
import java.util
import com.intellij.codeInsight.daemon.HighlightDisplayKey
import com.intellij.codeInsight.daemon.impl.actions.SuppressByCommentFix
import com.intellij.codeInspection.{InspectionsBundle, SuppressionUtil, SuppressionUtilCore}
import com.intellij.openapi.project.Project
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiComment, PsiElement}
import org.jetbrains.annotations.Nls
import org.jetbrains.plugins.scala.ScalaLanguage
import org.jetbrains.plugins.scala.lang.psi.api.ScalaPsiElement
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScDocCommentOwner, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createNewLine
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil
import scala.jdk.CollectionConverters._
/**
* @author Nikolay.Tropin
*/
abstract class ScalaSuppressByLineCommentFix(key: HighlightDisplayKey) extends SuppressByCommentFix(key, classOf[ScalaPsiElement]) {
override def createSuppression(project: Project, element: PsiElement, container: PsiElement): Unit = {
val text: String = SuppressionUtilCore.SUPPRESS_INSPECTIONS_TAG_NAME + " " + key.getID
val comment: PsiComment = SuppressionUtil.createComment(project, text, ScalaLanguage.INSTANCE)
val newLine = createNewLine()(element.getManager)
container match {
case owner: ScDocCommentOwner if owner.docComment.isDefined =>
val docComment = owner.docComment.get
container.addAfter(comment, docComment)
container.addAfter(newLine, docComment)
case owner: ScCommentOwner =>
val firstChild = owner.getFirstChild
owner.addBefore(comment, firstChild)
owner.addBefore(newLine, firstChild)
case _ =>
val parent = container.getParent
parent.addBefore(comment, container)
parent.addBefore(newLine, container)
}
}
override def getCommentsFor(container: PsiElement): util.List[_ <: PsiElement] = {
ScalaSuppressableInspectionTool.commentsFor(container).asJava
}
}
class ScalaSuppressForStatementFix(key: HighlightDisplayKey) extends ScalaSuppressByLineCommentFix(key) {
override def getText: String = InspectionsBundle.message("suppress.inspection.statement")
override def getContainer(context: PsiElement): PsiElement = ScalaRefactoringUtil.findEnclosingBlockStatement(context) match {
case None => null
case Some(_: ScDefinitionWithAssignment) => null
case Some(stmt) => stmt
}
}
abstract class ScalaSuppressForDefinitionFix(key: HighlightDisplayKey, @Nls text: String, defClasses: Class[_ <: PsiElement]*)
extends ScalaSuppressByLineCommentFix(key) {
override def getText: String = text
override def getContainer(context: PsiElement): PsiElement = PsiTreeUtil.getParentOfType(context, defClasses: _*)
}
class ScalaSuppressForClassFix(key: HighlightDisplayKey)
extends ScalaSuppressForDefinitionFix(key, InspectionsBundle.message("suppress.inspection.class"), classOf[ScTypeDefinition])
class ScalaSuppressForFunctionFix(key: HighlightDisplayKey)
extends ScalaSuppressForDefinitionFix(key, ScalaInspectionBundle.message("suppress.inspection.function"), classOf[ScFunctionDefinition], classOf[ScMacroDefinition])
class ScalaSuppressForVariableFix(key: HighlightDisplayKey)
extends ScalaSuppressForDefinitionFix(key, ScalaInspectionBundle.message("suppress.inspection.variable"), classOf[ScVariableDefinition], classOf[ScPatternDefinition])
class ScalaSuppressForTypeAliasFix(key: HighlightDisplayKey)
extends ScalaSuppressForDefinitionFix(key, ScalaInspectionBundle.message("suppress.inspection.typeAlias"), classOf[ScTypeAliasDefinition])
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/suppression/ScalaSuppressForStatementFix.scala
|
Scala
|
apache-2.0
| 3,862
|
package exercises.ch03
object Ex14 {
def append[A](as: List[A], x: A): List[A] = List.foldRight(as, List(x))((a,b) => Cons(a, b))
def main(args: Array[String]): Unit = {
println(append(List(), 1))
println(append(List(1), 2))
println(append(List(1,2), 3))
println(append(List(1,2,3), 4))
}
}
|
VladMinzatu/fpinscala-exercises
|
src/main/scala/exercises/ch03/Ex14.scala
|
Scala
|
mit
| 316
|
package model.dto
import model.Sponsor
import model.Resource
import org.squeryl.dsl.ast.LogicalBoolean
import org.squeryl.PrimitiveTypeMode._
case class SponsorWithLogo(sponsor: Sponsor, logo: Option[Resource]) {
def this() = this(new Sponsor(), None)
}
object SponsorWithLogo {
private def sponsorsWithLogo(whereLogic: (Sponsor, Option[Resource]) => LogicalBoolean): Seq[SponsorWithLogo] = {
join(model.Sponsor.sponsors, model.Resource.resources.leftOuter)((s, r) =>
where(whereLogic(s, r))
select (s, r)
orderBy (s.order)
on (s.logoResourceId === r.map(_.id)))
.toSeq.map { t => new SponsorWithLogo(t._1, t._2) }.toSeq
}
def lookup(id: Long): Option[SponsorWithLogo] = {
sponsorsWithLogo((s, r) => s.id === id).headOption
}
def hackathonSponsors(hackathonId: Long): Seq[SponsorWithLogo] = {
sponsorsWithLogo((s, r) => (s.hackathonId === hackathonId))
}
def portalSponsors(): Seq[SponsorWithLogo] = {
sponsorsWithLogo((s, r) => 1 === 1)
}
}
|
lukaszbudnik/hackaton-portal
|
app/model/dto/SponsorWithLogo.scala
|
Scala
|
apache-2.0
| 1,021
|
package com.rainm.scanote.ui
import android.os.Bundle
import android.support.v7.app.AppCompatActivity
import android.text.{Editable, TextWatcher}
import android.view.{MenuItem, Menu, View}
import android.view.View.OnClickListener
import android.widget.ListView
import com.rainm.scanote.db.DBManager
import com.rainm.scanote.{R, TR, TypedFindView}
/**
* Created by hackeris on 16/1/14.
*/
class SearchActivity extends AppCompatActivity with TypedFindView {
lazy val searchResult = findViewById(R.id.list_search_result).asInstanceOf[ListView]
override def onCreate(bundle: Bundle): Unit = {
super.onCreate(bundle)
setContentView(R.layout.search_note)
val toolbar = findView(TR.toolbar)
setSupportActionBar(toolbar)
getSupportActionBar.setDisplayHomeAsUpEnabled(true)
getSupportActionBar.setDisplayShowHomeEnabled(true)
toolbar.setNavigationOnClickListener(new OnClickListener {
override def onClick(v: View): Unit = {
finish()
}
})
doSearchAction()
setOnInputSearch()
}
def setOnInputSearch(): Unit = {
findView(TR.text_key_words).addTextChangedListener(new TextWatcher {
override def beforeTextChanged(s: CharSequence, start: Int, count: Int, after: Int): Unit = {}
override def onTextChanged(s: CharSequence, start: Int, before: Int, count: Int): Unit = doSearchAction()
override def afterTextChanged(s: Editable): Unit = doSearchAction()
})
}
def doSearchAction(): Unit = {
val keyWord = findView(TR.text_key_words).getText.toString
val manager = new DBManager(this)
val notes = manager.searchForSimpleNote(keyWord)
manager.close()
val adapter = new NotesListAdapter(this)
adapter.setNotes(notes)
searchResult.setAdapter(adapter)
}
override def onCreateOptionsMenu(menu: Menu): Boolean = {
getMenuInflater.inflate(R.menu.menu_search, menu)
true
}
override def onOptionsItemSelected(item: MenuItem): Boolean = {
item.getItemId match {
case R.id.action_clear => {
findView(TR.text_key_words).setText("")
return true
}
case _ =>
}
super.onOptionsItemSelected(item)
}
}
|
hackeris/ScaNote
|
src/main/scala/com/rainm/scanote/ui/SearchActivity.scala
|
Scala
|
apache-2.0
| 2,173
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.logical
import org.apache.flink.table.api.{BatchTableEnvironment, StreamTableEnvironment, TableEnvironment}
import org.apache.flink.table.expressions.ExpressionUtils.{isRowCountLiteral, isRowtimeAttribute, isTimeAttribute, isTimeIntervalLiteral}
import org.apache.flink.table.expressions._
import org.apache.flink.table.typeutils.TypeCheckUtils.{isTimePoint, isLong}
import org.apache.flink.table.validate.{ValidationFailure, ValidationResult, ValidationSuccess}
// ------------------------------------------------------------------------------------------------
// Tumbling group windows
// ------------------------------------------------------------------------------------------------
case class TumblingGroupWindow(
alias: Expression,
timeField: Expression,
size: Expression)
extends LogicalWindow(
alias,
timeField) {
override def resolveExpressions(resolve: (Expression) => Expression): LogicalWindow =
TumblingGroupWindow(
resolve(alias),
resolve(timeField),
resolve(size))
override def validate(tableEnv: TableEnvironment): ValidationResult =
super.validate(tableEnv).orElse(
tableEnv match {
// check size
case _ if !isTimeIntervalLiteral(size) && !isRowCountLiteral(size) =>
ValidationFailure(
"Tumbling window expects size literal of type Interval of Milliseconds " +
"or Interval of Rows.")
// check time attribute
case _: StreamTableEnvironment if !isTimeAttribute(timeField) =>
ValidationFailure(
"Tumbling window expects a time attribute for grouping in a stream environment.")
case _: BatchTableEnvironment
if !(isTimePoint(timeField.resultType) || isLong(timeField.resultType)) =>
ValidationFailure(
"Tumbling window expects a time attribute for grouping in a stream environment.")
// check row intervals on event-time
case _: StreamTableEnvironment
if isRowCountLiteral(size) && isRowtimeAttribute(timeField) =>
ValidationFailure(
"Event-time grouping windows on row intervals in a stream environment " +
"are currently not supported.")
case _ =>
ValidationSuccess
}
)
override def toString: String = s"TumblingGroupWindow($alias, $timeField, $size)"
}
// ------------------------------------------------------------------------------------------------
// Sliding group windows
// ------------------------------------------------------------------------------------------------
case class SlidingGroupWindow(
alias: Expression,
timeField: Expression,
size: Expression,
slide: Expression)
extends LogicalWindow(
alias,
timeField) {
override def resolveExpressions(resolve: (Expression) => Expression): LogicalWindow =
SlidingGroupWindow(
resolve(alias),
resolve(timeField),
resolve(size),
resolve(slide))
override def validate(tableEnv: TableEnvironment): ValidationResult =
super.validate(tableEnv).orElse(
tableEnv match {
// check size
case _ if !isTimeIntervalLiteral(size) && !isRowCountLiteral(size) =>
ValidationFailure(
"Sliding window expects size literal of type Interval of Milliseconds " +
"or Interval of Rows.")
// check slide
case _ if !isTimeIntervalLiteral(slide) && !isRowCountLiteral(slide) =>
ValidationFailure(
"Sliding window expects slide literal of type Interval of Milliseconds " +
"or Interval of Rows.")
// check same type of intervals
case _ if isTimeIntervalLiteral(size) != isTimeIntervalLiteral(slide) =>
ValidationFailure("Sliding window expects same type of size and slide.")
// check time attribute
case _: StreamTableEnvironment if !isTimeAttribute(timeField) =>
ValidationFailure(
"Sliding window expects a time attribute for grouping in a stream environment.")
case _: BatchTableEnvironment
if !(isTimePoint(timeField.resultType) || isLong(timeField.resultType)) =>
ValidationFailure(
"Sliding window expects a time attribute for grouping in a stream environment.")
// check row intervals on event-time
case _: StreamTableEnvironment
if isRowCountLiteral(size) && isRowtimeAttribute(timeField) =>
ValidationFailure(
"Event-time grouping windows on row intervals in a stream environment " +
"are currently not supported.")
case _ =>
ValidationSuccess
}
)
override def toString: String = s"SlidingGroupWindow($alias, $timeField, $size, $slide)"
}
// ------------------------------------------------------------------------------------------------
// Session group windows
// ------------------------------------------------------------------------------------------------
case class SessionGroupWindow(
alias: Expression,
timeField: Expression,
gap: Expression)
extends LogicalWindow(
alias,
timeField) {
override def resolveExpressions(resolve: (Expression) => Expression): LogicalWindow =
SessionGroupWindow(
resolve(alias),
resolve(timeField),
resolve(gap))
override def validate(tableEnv: TableEnvironment): ValidationResult =
super.validate(tableEnv).orElse(
tableEnv match {
// check size
case _ if !isTimeIntervalLiteral(gap) =>
ValidationFailure(
"Session window expects size literal of type Interval of Milliseconds.")
// check time attribute
case _: StreamTableEnvironment if !isTimeAttribute(timeField) =>
ValidationFailure(
"Session window expects a time attribute for grouping in a stream environment.")
case _: BatchTableEnvironment
if !(isTimePoint(timeField.resultType) || isLong(timeField.resultType)) =>
ValidationFailure(
"Session window expects a time attribute for grouping in a stream environment.")
case _ =>
ValidationSuccess
}
)
override def toString: String = s"SessionGroupWindow($alias, $timeField, $gap)"
}
|
hongyuhong/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/logical/groupWindows.scala
|
Scala
|
apache-2.0
| 7,117
|
/**
* Copyright (C) 2012 - 101loops.com <dev@101loops.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.crashnote.test.web.defs
import org.specs2.specification.Scope
import com.crashnote.core.Lifecycle
import com.crashnote.core.model.log.LogReport
import com.crashnote.core.model.data.DataObject
import com.crashnote.test.base.defs.BaseMockSpec
import com.crashnote.test.web.util.FactoryUtil
abstract class TargetMockSpec[T: Manifest]
extends BaseMockSpec[T] with WebEnv with FactoryUtil {
// ==== CONTEXTS
// stage #1: config the target
def configure(config: C): T
class Configured(fns: (C) => _*) extends Scope {
doSetup()
def doSetup() {
val m_conf = mockConfig()
fns.foreach(fn => fn.apply(m_conf))
target = configure(m_conf)
}
}
// stage #2: mock the target's components
def mock() {}
class Mock(fns: (C) => _*) extends Configured(fns: _*) {
mock()
}
// stage #3: start the target
def start() {
if (target.isInstanceOf[Lifecycle])
target.asInstanceOf[Lifecycle].start()
afterStarted()
}
def afterStarted() {}
class Started(fns: (C) => _*) extends Mock(fns: _*) {
start()
}
// ==== MATCHERS
def anyReport = any[LogReport]
def anyDataObj = any[DataObject]
// ==== CONFIGS
lazy val DISABLED = (config: C) => config.isEnabled returns false
lazy val ENABLED = (config: C) => config.isEnabled returns true
lazy val SYNC = (config: C) => config.isSync returns true
lazy val ASYNC = (config: C) => config.isSync returns false
lazy val DEBUG = (config: C) => config.isDebug returns true
lazy val WITH_IP = (config: C) => config.getHashRemoteIP returns false
lazy val WITHOUT_HEADER = (config: C) => config.getSkipHeaderData returns true
}
|
crashnote/crashnote-java
|
modules/web/src/test/scala/com/crashnote/test/web/defs/TargetMockSpec.scala
|
Scala
|
apache-2.0
| 2,289
|
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package xml
import com.intellij.psi.xml.XmlTokenType
import org.jetbrains.plugins.scala.lang.lexer.ScalaXmlTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
* Date: 18.04.2008
*/
/*
* STag ::= < Name {Attribute} [S] >
*/
object STag {
def parse(builder: ScalaPsiBuilder): Boolean = {
val tagMarker = builder.mark()
builder.getTokenType match {
case ScalaXmlTokenTypes.XML_START_TAG_START =>
builder.advanceLexer()
case _ =>
tagMarker.drop()
return false
}
builder.getTokenType match {
case ScalaXmlTokenTypes.XML_NAME =>
builder.advanceLexer()
case _ => builder error ErrMsg("xml.name.expected")
}
while (Attribute.parse(builder)) {}
builder.getTokenType match {
case XmlTokenType.XML_WHITE_SPACE => builder.advanceLexer()
case _ =>
}
builder.getTokenType match {
case ScalaXmlTokenTypes.XML_TAG_END =>
builder.advanceLexer()
case _ =>
builder error ErrMsg("xml.tag.end.expected")
}
tagMarker.done(ScalaElementTypes.XML_START_TAG)
true
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/parser/parsing/xml/STag.scala
|
Scala
|
apache-2.0
| 1,250
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.storage
import com.twitter.util.{Duration, Future}
import com.twitter.zipkin.common.Span
trait Storage {
/**
* Close the storage
*/
def close()
/**
* Store the span in the underlying storage for later retrieval.
* @return a future for the operation
*/
def storeSpan(span: Span) : Future[Unit]
/**
* Set the ttl of a trace. Used to store a particular trace longer than the
* default. It must be oh so interesting!
*/
def setTimeToLive(traceId: String, ttl: Duration): Future[Unit]
/**
* Get the time to live for a specific trace.
* If there are multiple ttl entries for one trace, pick the lowest one.
*/
def getTimeToLive(traceId: String): Future[Duration]
def tracesExist(traceIds: Seq[String]): Future[Set[String]]
/**
* Get the available trace information from the storage system.
* Spans in trace should be sorted by the first annotation timestamp
* in that span. First event should be first in the spans list.
*
* The return list will contain only spans that have been found, thus
* the return list may not match the provided list of ids.
*/
def getSpansByTraceIds(traceIds: Seq[String]): Future[Seq[Seq[Span]]]
def getSpansByTraceId(traceId: String): Future[Seq[Span]]
/**
* How long do we store the data before we delete it? In seconds.
*/
def getDataTimeToLive: Int
}
|
cogitate/twitter-zipkin-uuid
|
zipkin-common/src/main/scala/com/twitter/zipkin/storage/Storage.scala
|
Scala
|
apache-2.0
| 2,006
|
package play.api.libs.iteratee
import play.api.libs.iteratee.Execution.Implicits.{ defaultExecutionContext => dec }
import play.api.libs.iteratee.internal.{ executeIteratee, executeFuture }
import scala.language.reflectiveCalls
import scala.concurrent.{ ExecutionContext, Future }
/**
* Combines the roles of an Iteratee[From] and a Enumerator[To]. This allows adapting of streams to that modify input
* produced by an Enumerator, or to be consumed by a Iteratee.
*/
trait Enumeratee[From, To] {
parent =>
/**
* Create a new Iteratee that feeds its input, potentially modifying it along the way, into the inner Iteratee, and
* produces that Iteratee as its result.
*/
def applyOn[A](inner: Iteratee[To, A]): Iteratee[From, Iteratee[To, A]]
/**
* Alias for `applyOn`
*/
def apply[A](inner: Iteratee[To, A]): Iteratee[From, Iteratee[To, A]] = applyOn[A](inner)
/**
* Transform the given iteratee into an iteratee that accepts the input type that this enumeratee maps.
*/
def transform[A](inner: Iteratee[To, A]): Iteratee[From, A] = apply(inner).joinI
/**
* Alias for `transform`
*/
def &>>[A](inner: Iteratee[To, A]): Iteratee[From, A] = transform(inner)
/**
* Alias for `apply`
*/
def &>[A](inner: Iteratee[To, A]): Iteratee[From, Iteratee[To, A]] = apply(inner)
/**
* Compose this Enumeratee with another Enumeratee
*/
def compose[To2](other: Enumeratee[To, To2]): Enumeratee[From, To2] = {
new Enumeratee[From, To2] {
def applyOn[A](iteratee: Iteratee[To2, A]): Iteratee[From, Iteratee[To2, A]] = {
parent.applyOn(other.applyOn(iteratee)).joinI
}
}
}
/**
* Compose this Enumeratee with another Enumeratee
*/
def ><>[To2](other: Enumeratee[To, To2]): Enumeratee[From, To2] = compose(other)
/**
* Compose this Enumeratee with another Enumeratee, concatenating any input left by both Enumeratees when they
* are done.
*/
def composeConcat[X](other: Enumeratee[To, To])(implicit p: To => scala.collection.TraversableLike[X, To], bf: scala.collection.generic.CanBuildFrom[To, X, To]): Enumeratee[From, To] = {
new Enumeratee[From, To] {
def applyOn[A](iteratee: Iteratee[To, A]): Iteratee[From, Iteratee[To, A]] = {
parent.applyOn(other.applyOn(iteratee).joinConcatI)
}
}
}
/**
* Alias for `composeConcat`
*/
def >+>[X](other: Enumeratee[To, To])(implicit p: To => scala.collection.TraversableLike[X, To], bf: scala.collection.generic.CanBuildFrom[To, X, To]): Enumeratee[From, To] = composeConcat[X](other)
}
/**
* @define paramEcSingle @param ec The context to execute the supplied function with. The context is prepared on the calling thread before being used.
* @define paramEcMultiple @param ec The context to execute the supplied functions with. The context is prepared on the calling thread before being used.
*/
object Enumeratee {
/**
* An Enumeratee that checks to ensure that the passed in Iteratee is not done before doing any work.
*/
trait CheckDone[From, To] extends Enumeratee[From, To] {
def continue[A](k: Input[To] => Iteratee[To, A]): Iteratee[From, Iteratee[To, A]]
def applyOn[A](it: Iteratee[To, A]): Iteratee[From, Iteratee[To, A]] =
it.pureFlatFold[From, Iteratee[To, A]] {
case Step.Cont(k) => continue(k)
case _ => Done(it, Input.Empty)
}(dec)
}
/**
* flatten a [[scala.concurrent.Future]] of [[play.api.libs.iteratee.Enumeratee]]] into an Enumeratee
*
* @param futureOfEnumeratee a future of enumeratee
*/
def flatten[From, To](futureOfEnumeratee: Future[Enumeratee[From, To]]) = new Enumeratee[From, To] {
def applyOn[A](it: Iteratee[To, A]): Iteratee[From, Iteratee[To, A]] =
Iteratee.flatten(futureOfEnumeratee.map(_.applyOn[A](it))(dec))
}
/**
* Create an Enumeratee that zips two Iteratees together.
*
* Each input gets passed to each Iteratee, and the result is a tuple of both of their results.
*
* If either Iteratee encounters an error, the result will be an error.
*
* The Enumeratee will continue consuming input until both inner Iteratees are done. If one inner Iteratee finishes
* before the other, the result of that Iteratee is held, and the one continues by itself, until it too is finished.
*/
def zip[E, A, B](inner1: Iteratee[E, A], inner2: Iteratee[E, B]): Iteratee[E, (A, B)] = zipWith(inner1, inner2)((_, _))(dec)
/**
* Create an Enumeratee that zips two Iteratees together, using the passed in zipper function to combine the results
* of the two.
*
* @param inner1 The first Iteratee to combine.
* @param inner2 The second Iteratee to combine.
* @param zipper Used to combine the results of each Iteratee.
* $paramEcSingle
*/
def zipWith[E, A, B, C](inner1: Iteratee[E, A], inner2: Iteratee[E, B])(zipper: (A, B) => C)(implicit ec: ExecutionContext): Iteratee[E, C] = {
val pec = ec.prepare()
import Execution.Implicits.{ defaultExecutionContext => ec } // Shadow ec to make this the only implicit EC in scope
def getNext(it1: Iteratee[E, A], it2: Iteratee[E, B]): Iteratee[E, C] = {
val eventuallyIter =
for (
(a1, it1_) <- getInside(it1);
(a2, it2_) <- getInside(it2)
) yield checkDone(a1, a2) match {
case Left((msg, in)) => Error(msg, in)
case Right(None) => Cont(step(it1_, it2_))
case Right(Some(Left(Left(a)))) => it2_.map(b => zipper(a, b))(pec)
case Right(Some(Left(Right(b)))) => it1_.map(a => zipper(a, b))(pec)
case Right(Some(Right(((a, b), e)))) => executeIteratee(Done(zipper(a, b), e))(pec)
}
Iteratee.flatten(eventuallyIter)
}
def step(it1: Iteratee[E, A], it2: Iteratee[E, B])(in: Input[E]) = {
Iteratee.flatten(
for (
it1_ <- it1.feed(in);
it2_ <- it2.feed(in)
) yield getNext(it1_, it2_))
}
def getInside[T](it: Iteratee[E, T]): Future[(Option[Either[(String, Input[E]), (T, Input[E])]], Iteratee[E, T])] = {
it.pureFold {
case Step.Done(a, e) => Some(Right((a, e)))
case Step.Cont(k) => None
case Step.Error(msg, e) => Some(Left((msg, e)))
}(dec).map(r => (r, it))(dec)
}
def checkDone(x: Option[Either[(String, Input[E]), (A, Input[E])]], y: Option[Either[(String, Input[E]), (B, Input[E])]]): Either[(String, Input[E]), Option[Either[Either[A, B], ((A, B), Input[E])]]] =
(x, y) match {
case (Some(Right((a, e1))), Some(Right((b, e2)))) => Right(Some(Right(((a, b), e1 /* FIXME: should calculate smalled here*/ ))))
case (Some(Left((msg, e))), _) => Left((msg, e))
case (_, Some(Left((msg, e)))) => Left((msg, e))
case (Some(Right((a, _))), None) => Right(Some(Left(Left(a))))
case (None, Some(Right((b, _)))) => Right(Some(Left(Right(b))))
case (None, None) => Right(None)
}
getNext(inner1, inner2)
}
/**
* A partially-applied function returned by the `mapInput` method.
*/
trait MapInput[From] {
/**
* @param f Used to transform each input element.
* $paramEcSingle
*/
def apply[To](f: Input[From] => Input[To])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Create an Enumeratee that transforms its input using the given function.
*
* This is like the `map` function, except that it allows the Enumeratee to, for example, send EOF to the inner
* iteratee before EOF is encountered.
*/
def mapInput[From] = new MapInput[From] {
def apply[To](f: Input[From] => Input[To])(implicit ec: ExecutionContext) = new CheckDone[From, To] {
val pec = ec.prepare()
def step[A](k: K[To, A]): K[From, Iteratee[To, A]] = {
case in @ (Input.El(_) | Input.Empty) => new CheckDone[From, To] {
def continue[A](k: K[To, A]) = Cont(step(k))
} &> Iteratee.flatten(Future(f(in))(pec).map(in => k(in))(dec))
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[To, A]) = Cont(step(k))
}
}
/**
* A partially-applied function returned by the `mapConcatInput` method.
*/
trait MapConcatInput[From] {
/**
* @param f Used to transform each input element into a sequence of inputs.
* $paramEcSingle
*/
def apply[To](f: From => Seq[Input[To]])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Create an enumeratee that transforms its input into a sequence of inputs for the target iteratee.
*/
def mapConcatInput[From] = new MapConcatInput[From] {
def apply[To](f: From => Seq[Input[To]])(implicit ec: ExecutionContext) = mapFlatten[From](in => Enumerator.enumerateSeq2(f(in)))(ec)
}
/**
* A partially-applied function returned by the `mapConcat` method.
*/
trait MapConcat[From] {
/**
* @param f Used to transform each input element into a sequence of input elements.
* $paramEcSingle
*/
def apply[To](f: From => Seq[To])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Create an Enumeratee that transforms its input elements into a sequence of input elements for the target Iteratee.
*/
def mapConcat[From] = new MapConcat[From] {
def apply[To](f: From => Seq[To])(implicit ec: ExecutionContext) = mapFlatten[From](in => Enumerator.enumerateSeq1(f(in)))(ec)
}
/**
* A partially-applied function returned by the `mapFlatten` method.
*/
trait MapFlatten[From] {
/**
* @param f Used to transform each input element into an Enumerator.
* $paramEcSingle
*/
def apply[To](f: From => Enumerator[To])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Create an Enumeratee that transforms its input elements into an Enumerator that is fed into the target Iteratee.
*/
def mapFlatten[From] = new MapFlatten[From] {
def apply[To](f: From => Enumerator[To])(implicit ec: ExecutionContext) = new CheckDone[From, To] {
val pec = ec.prepare()
def step[A](k: K[To, A]): K[From, Iteratee[To, A]] = {
case Input.El(e) =>
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(k)) } &> Iteratee.flatten(Future(f(e))(pec).flatMap(_.apply(Cont(k)))(dec))
case in @ Input.Empty =>
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[To, A]) = Cont(step(k))
}
}
/**
* A partially-applied function returned by the `mapInputFlatten` method.
*/
trait MapInputFlatten[From] {
/**
* @param f Used to transform each input into an Enumerator.
* $paramEcSingle
*/
def apply[To](f: Input[From] => Enumerator[To])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Create an Enumeratee that transforms its input into an Enumerator that is fed into the target Iteratee.
*/
def mapInputFlatten[From] = new MapInputFlatten[From] {
def apply[To](f: Input[From] => Enumerator[To])(implicit ec: ExecutionContext) = new CheckDone[From, To] {
val pec = ec.prepare()
def step[A](k: K[To, A]): K[From, Iteratee[To, A]] = {
case in =>
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(k)) } &> Iteratee.flatten(Future(f(in))(pec).flatMap(_.apply(Cont(k)))(dec))
}
def continue[A](k: K[To, A]) = Cont(step(k))
}
}
/**
* A partially-applied function returned by the `mapInputM` method.
*/
trait MapInputM[From] {
/**
* @param f Used to transform each input.
* $paramEcSingle
*/
def apply[To](f: Input[From] => Future[Input[To]])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Like `mapInput`, but allows the map function to asynchronously return the mapped input.
*/
def mapInputM[From] = new MapInputM[From] {
def apply[To](f: Input[From] => Future[Input[To]])(implicit ec: ExecutionContext) = new CheckDone[From, To] {
val pec = ec.prepare()
def step[A](k: K[To, A]): K[From, Iteratee[To, A]] = {
case in @ (Input.El(_) | Input.Empty) =>
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(k)) } &> Iteratee.flatten(executeFuture(f(in))(pec).map(k(_))(dec))
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[To, A]) = Cont(step(k))
}
}
/**
* A partially-applied function returned by the `mapM` method.
*/
trait MapM[E] {
/**
* @param f Used to transform each input element.
* $paramEcSingle
*/
def apply[NE](f: E => Future[NE])(implicit ec: ExecutionContext): Enumeratee[E, NE]
}
/**
* Like `map`, but allows the map function to asynchronously return the mapped element.
*/
def mapM[E] = new MapM[E] {
def apply[NE](f: E => Future[NE])(implicit ec: ExecutionContext): Enumeratee[E, NE] = mapInputM[E] {
case Input.Empty => Future.successful(Input.Empty)
case Input.EOF => Future.successful(Input.EOF)
case Input.El(e) => f(e).map(Input.El(_))(dec)
}(ec)
}
/**
* A partially-applied function returned by the `map` method.
*/
trait Map[E] {
/**
* @param f A function to transform input elements.
* $paramEcSingle
*/
def apply[NE](f: E => NE)(implicit ec: ExecutionContext): Enumeratee[E, NE]
}
/**
* Create an Enumeratee which transforms its input using a given function
*/
def map[E] = new Map[E] {
def apply[NE](f: E => NE)(implicit ec: ExecutionContext): Enumeratee[E, NE] = mapInput[E](in => in.map(f))(ec)
}
/**
* Create an Enumeratee that will take `count` input elements to pass to the target Iteratee, and then be done
*
* @param count The number of elements to take
*/
def take[E](count: Int): Enumeratee[E, E] = new CheckDone[E, E] {
def step[A](remaining: Int)(k: K[E, A]): K[E, Iteratee[E, A]] = {
case in @ Input.El(_) if remaining == 1 => Done(k(in), Input.Empty)
case in @ Input.El(_) if remaining > 1 =>
new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(remaining - 1)(k)) } &> k(in)
case in @ Input.Empty if remaining > 0 =>
new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(remaining)(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
case in => Done(Cont(k), in)
}
def continue[A](k: K[E, A]) = if (count <= 0) Done(Cont(k), Input.EOF) else Cont(step(count)(k))
}
/**
* A partially-applied function returned by the `scanLeft` method.
*/
trait ScanLeft[From] {
def apply[To](seed: To)(f: (To, From) => To): Enumeratee[From, To]
}
def scanLeft[From] = new ScanLeft[From] {
def apply[To](seed: To)(f: (To, From) => To): Enumeratee[From, To] = new CheckDone[From, To] {
def step[A](lastTo: To)(k: K[To, A]): K[From, Iteratee[To, A]] = {
case in @ Input.El(e) =>
val next = f(lastTo, e)
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(next)(k)) } &> k(Input.El(next))
case in @ Input.Empty =>
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(lastTo)(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[To, A]) = Cont(step(seed)(k))
}
}
/**
* A partially-applied function returned by the `grouped` method.
*/
trait Grouped[From] {
def apply[To](folder: Iteratee[From, To]): Enumeratee[From, To]
}
/**
* Create an Enumeratee that groups input using the given Iteratee.
*
* This will apply that Iteratee over and over, passing the result each time as the input for the target Iteratee,
* until EOF is reached. For example, let's say you had an Iteratee that took a stream of characters and parsed a
* single line:
*
* {{{
* def takeLine = for {
* line <- Enumeratee.takeWhile[Char](_ != '\n') &>> Iteratee.getChunks
* _ <- Enumeratee.take(1) &>> Iteratee.ignore[Char]
* } yield line.mkString
* }}}
*
* This could be used to build an Enumeratee that converts a stream of characters into a stream of lines:
*
* {{{
* def asLines = Enumeratee.grouped(takeLine)
* }}}
*/
def grouped[From] = new Grouped[From] {
def apply[To](folder: Iteratee[From, To]): Enumeratee[From, To] = new CheckDone[From, To] {
def step[A](f: Iteratee[From, To])(k: K[To, A]): K[From, Iteratee[To, A]] = {
case in @ (Input.El(_) | Input.Empty) =>
Iteratee.flatten(f.feed(in)).pureFlatFold {
case Step.Done(a, left) => new CheckDone[From, To] {
def continue[A](k: K[To, A]) =
(left match {
case Input.El(_) => step(folder)(k)(left)
case _ => Cont(step(folder)(k))
})
} &> k(Input.El(a))
case Step.Cont(kF) => Cont(step(Cont(kF))(k))
case Step.Error(msg, e) => Error(msg, in)
}(dec)
case Input.EOF => Iteratee.flatten(f.run.map[Iteratee[From, Iteratee[To, A]]]((c: To) => Done(k(Input.El(c)), Input.EOF))(dec))
}
def continue[A](k: K[To, A]) = Cont(step(folder)(k))
}
}
/**
* Create an Enumeratee that filters the inputs using the given predicate
*
* @param predicate A function to filter the input elements.
* $paramEcSingle
*/
def filter[E](predicate: E => Boolean)(implicit ec: ExecutionContext): Enumeratee[E, E] = new CheckDone[E, E] {
val pec = ec.prepare()
def step[A](k: K[E, A]): K[E, Iteratee[E, A]] = {
case in @ Input.El(e) => Iteratee.flatten(Future(predicate(e))(pec).map { b =>
if (b) (new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(k)) } &> k(in)) else Cont(step(k))
}(dec))
case in @ Input.Empty =>
new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[E, A]) = Cont(step(k))
}
/**
* Create an Enumeratee that filters the inputs using the negation of the given predicate
*
* @param predicate A function to filter the input elements.
* $paramEcSingle
*/
def filterNot[E](predicate: E => Boolean)(implicit ec: ExecutionContext): Enumeratee[E, E] = filter[E](e => !predicate(e))(ec)
/**
* A partially-applied function returned by the `collect` method.
*/
trait Collect[From] {
/**
* @param transformer A function to transform and filter the input elements with.
* $paramSingleEc
*/
def apply[To](transformer: PartialFunction[From, To])(implicit ec: ExecutionContext): Enumeratee[From, To]
}
/**
* Create an Enumeratee that both filters and transforms its input. The input is transformed by the given
* PartialFunction. If the PartialFunction isn't defined for an input element then that element is discarded.
*/
def collect[From] = new Collect[From] {
def apply[To](transformer: PartialFunction[From, To])(implicit ec: ExecutionContext): Enumeratee[From, To] = new CheckDone[From, To] {
val pec = ec.prepare()
def step[A](k: K[To, A]): K[From, Iteratee[To, A]] = {
case in @ Input.El(e) => Iteratee.flatten(Future {
if (transformer.isDefinedAt(e)) {
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(k)) } &> k(Input.El(transformer(e)))
} else {
Cont(step(k))
}
}(pec))
case in @ Input.Empty =>
new CheckDone[From, To] { def continue[A](k: K[To, A]) = Cont(step(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[To, A]) = Cont(step(k))
}
}
def drop[E](count: Int): Enumeratee[E, E] = new CheckDone[E, E] {
def step[A](remaining: Int)(k: K[E, A]): K[E, Iteratee[E, A]] = {
case in @ Input.El(_) if remaining == 1 => passAlong[E](Cont(k))
case in @ Input.El(_) if remaining > 1 => Cont(step(remaining - 1)(k))
case in @ Input.Empty if remaining > 0 => Cont(step(remaining)(k))
case Input.EOF => Done(Cont(k), Input.EOF)
case in => passAlong[E] &> k(in)
}
def continue[A](k: K[E, A]) = Cont(step(count)(k))
}
/**
* Create an Enumeratee that drops input until a predicate is satisfied.
*
* @param f A predicate to test the input with.
* $paramEcSingle
*/
def dropWhile[E](p: E => Boolean)(implicit ec: ExecutionContext): Enumeratee[E, E] = {
val pec = ec.prepare()
new CheckDone[E, E] {
def step[A](k: K[E, A]): K[E, Iteratee[E, A]] = {
case in @ Input.El(e) => Iteratee.flatten(Future(p(e))(pec).map {
b => if (b) Cont(step(k)) else (passAlong[E] &> k(in))
}(dec))
case in @ Input.Empty => Cont(step(k))
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[E, A]) = Cont(step(k))
}
}
/**
* Create an Enumeratee that passes input through while a predicate is satisfied. Once the predicate
* fails, no more input is passed through.
*
* @param f A predicate to test the input with.
* $paramEcSingle
*/
def takeWhile[E](p: E => Boolean)(implicit ec: ExecutionContext): Enumeratee[E, E] = {
val pec = ec.prepare()
new CheckDone[E, E] {
def step[A](k: K[E, A]): K[E, Iteratee[E, A]] = {
case in @ Input.El(e) => Iteratee.flatten(Future(p(e))(pec).map {
b => if (b) (new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(k)) } &> k(in)) else Done(Cont(k), in)
}(dec))
case in @ Input.Empty =>
new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[E, A]) = Cont(step(k))
}
}
/**
* Create an Enumeratee that passes input through until a predicate is satisfied. Once the predicate
* is satisfied, no more input is passed through.
*
* @param f A predicate to test the input with.
* $paramEcSingle
*/
def breakE[E](p: E => Boolean)(implicit ec: ExecutionContext) = new Enumeratee[E, E] {
val pec = ec.prepare()
def applyOn[A](inner: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = {
def step(inner: Iteratee[E, A])(in: Input[E]): Iteratee[E, Iteratee[E, A]] = in match {
case Input.El(e) => Iteratee.flatten(Future(p(e))(pec).map(b => if (b) Done(inner, in) else stepNoBreak(inner)(in))(dec))
case _ => stepNoBreak(inner)(in)
}
def stepNoBreak(inner: Iteratee[E, A])(in: Input[E]): Iteratee[E, Iteratee[E, A]] =
inner.pureFlatFold {
case Step.Cont(k) => {
val next = k(in)
next.pureFlatFold {
case Step.Cont(k) => Cont(step(next))
case _ => Done(inner, in)
}(dec)
}
case _ => Done(inner, in)
}(dec)
Cont(step(inner))
}
}
def passAlong[M] = new Enumeratee.CheckDone[M, M] {
def step[A](k: K[M, A]): K[M, Iteratee[M, A]] = {
case in @ (Input.El(_) | Input.Empty) => new Enumeratee.CheckDone[M, M] { def continue[A](k: K[M, A]) = Cont(step(k)) } &> k(in)
case Input.EOF => Done(Cont(k), Input.EOF)
}
def continue[A](k: K[M, A]) = Cont(step(k))
}
def heading[E](es: Enumerator[E]) = new Enumeratee[E, E] {
def applyOn[A](it: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = passAlong[E] &> Iteratee.flatten(es(it))
}
def trailing[M](es: Enumerator[M]) = new Enumeratee.CheckDone[M, M] {
def step[A](k: K[M, A]): K[M, Iteratee[M, A]] = {
case in @ (Input.El(_) | Input.Empty) => new Enumeratee.CheckDone[M, M] { def continue[A](k: K[M, A]) = Cont(step(k)) } &> k(in)
case Input.EOF => Iteratee.flatten((es |>> Cont(k)).map[Iteratee[M, Iteratee[M, A]]](it => Done(it, Input.EOF))(dec))
}
def continue[A](k: K[M, A]) = Cont(step(k))
}
/**
* Create an Enumeratee that performs an action when its Iteratee is done.
*
* @param action The action to perform.
* $paramEcSingle
*/
def onIterateeDone[E](action: () => Unit)(implicit ec: ExecutionContext): Enumeratee[E, E] = new Enumeratee[E, E] {
val pec = ec.prepare()
def applyOn[A](iteratee: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = passAlong[E](iteratee).map(_.map { a => action(); a }(pec))(dec)
}
/**
* Create an Enumeratee that performs an action on EOF.
*
* @param action The action to perform.
* $paramEcSingle
*/
def onEOF[E](action: () => Unit)(implicit ec: ExecutionContext): Enumeratee[E, E] = new CheckDone[E, E] {
val pec = ec.prepare()
def step[A](k: K[E, A]): K[E, Iteratee[E, A]] = {
case Input.EOF =>
Iteratee.flatten(Future(action())(pec).map(_ => Done[E, Iteratee[E, A]](Cont(k), Input.EOF))(dec))
case in =>
new CheckDone[E, E] { def continue[A](k: K[E, A]) = Cont(step(k)) } &> k(in)
}
def continue[A](k: K[E, A]) = Cont(step(k))
}
/**
* Create an Enumeratee that recovers an iteratee in Error state.
*
* This will ignore the input that caused the iteratee's error state
* and use the previous state of the iteratee to handle the next input.
*
* {{{
* Enumerator(0, 2, 4) &> Enumeratee.recover { (error, input) =>
* Logger.error(f"oops failure occured with input: $input", error)
* } &> Enumeratee.map { i =>
* 8 / i
* } |>>> Iteratee.getChunks // => List(4, 2)
* }}}
*
* @param f Called when an error occurs with the cause of the error and the input associated with the error.
* $paramEcSingle
*/
def recover[E](f: (Throwable, Input[E]) => Unit = (_: Throwable, _: Input[E]) => ())(implicit ec: ExecutionContext): Enumeratee[E, E] = {
val pec = ec.prepare()
new Enumeratee[E, E] {
def applyOn[A](it: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = {
def step(it: Iteratee[E, A])(input: Input[E]): Iteratee[E, Iteratee[E, A]] = input match {
case in @ (Input.El(_) | Input.Empty) =>
val next: Future[Iteratee[E, Iteratee[E, A]]] = it.pureFlatFold[E, Iteratee[E, A]] {
case Step.Cont(k) =>
val n = k(in)
n.pureFlatFold[E, Iteratee[E, A]] {
case Step.Cont(k) => Cont(step(n))
case _ => Done(n, Input.Empty)
}(dec)
case other => Done(other.it, in)
}(dec).unflatten.map({ s =>
s.it
})(dec).recover({
case e: Throwable =>
f(e, in)
Cont(step(it))
})(pec)
Iteratee.flatten(next)
case Input.EOF =>
Done(it, Input.Empty)
}
Cont(step(it))
}
}
}
}
|
michaelahlers/team-awesome-wedding
|
vendor/play-2.2.1/framework/src/iteratees/src/main/scala/play/api/libs/iteratee/Enumeratee.scala
|
Scala
|
mit
| 26,884
|
/*
* Copyright Β© 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.twirl
import org.knora.webapi._
import org.knora.webapi.messages.v1.responder.searchmessages.SearchComparisonOperatorV1
/**
* The extended search template's representation of an extended search criterion.
*
* @param propertyIri the IRI of the property to be searched.
* @param comparisonOperator the comparison operator.
* @param valueType the type of value to search for.
* @param searchValue the value to compare with, if we are comparing strings or numbers.
* @param dateStart the start of the date range to compare with, if we are comparing dates.
* @param dateEnd the end of the date range to compare with, if we are comparing dates.
* @param matchBooleanPositiveTerms the terms to include if we are using MATCH BOOLEAN.
* @param matchBooleanNegativeTerms the terms to exclude if we are using MATCH BOOLEAN.
*/
case class SearchCriterion(propertyIri: IRI,
comparisonOperator: SearchComparisonOperatorV1.Value,
valueType: IRI,
searchValue: Option[String] = None,
dateStart: Option[Int] = None,
dateEnd: Option[Int] = None,
matchBooleanPositiveTerms: Set[String] = Set.empty[String],
matchBooleanNegativeTerms: Set[String] = Set.empty[String])
|
musicEnfanthen/Knora
|
webapi/src/main/scala/org/knora/webapi/twirl/SearchCriterion.scala
|
Scala
|
agpl-3.0
| 2,227
|
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.phoenix.spark
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.NullWritable
import org.apache.phoenix.mapreduce.PhoenixOutputFormat
import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil
import org.apache.spark.rdd.RDD
import scala.collection.JavaConversions._
class ProductRDDFunctions[A <: Product](data: RDD[A]) extends Serializable {
def saveToPhoenix(tableName: String, cols: Seq[String],
conf: Configuration = new Configuration, zkUrl: Option[String] = None, tenantId: Option[String] = None)
: Unit = {
// Create a configuration object to use for saving
@transient val outConfig = ConfigurationUtil.getOutputConfiguration(tableName, cols, zkUrl, tenantId, Some(conf))
// Retrieve the zookeeper URL
val zkUrlFinal = ConfigurationUtil.getZookeeperURL(outConfig)
// Map the row objects into PhoenixRecordWritable
val phxRDD = data.mapPartitions{ rows =>
// Create a within-partition config to retrieve the ColumnInfo list
@transient val partitionConfig = ConfigurationUtil.getOutputConfiguration(tableName, cols, zkUrlFinal, tenantId)
@transient val columns = PhoenixConfigurationUtil.getUpsertColumnMetadataList(partitionConfig).toList
rows.map { row =>
val rec = new PhoenixRecordWritable(columns)
row.productIterator.foreach { e => rec.add(e) }
(null, rec)
}
}
// Save it
phxRDD.saveAsNewAPIHadoopFile(
"",
classOf[NullWritable],
classOf[PhoenixRecordWritable],
classOf[PhoenixOutputFormat[PhoenixRecordWritable]],
outConfig
)
}
}
|
Guavus/phoenix
|
phoenix-spark/src/main/scala/org/apache/phoenix/spark/ProductRDDFunctions.scala
|
Scala
|
apache-2.0
| 2,225
|
package ex021
class Queens(val n: Int) {
def check(r: Int, c: Int, pat: List[(Int, Int)]): Boolean = {
// check placements whether overlap
pat.forall(p => c != p._2 && r-p._1 != math.abs(c-p._2))
}
def queen(r: Int): List[List[(Int, Int)]] = { // placements list
if (r==0) List(Nil)
else for (p <- queen(r-1); c <- 1 to n if check(r, c, p))
yield (r, c)::p
}
def start() {
queen(n).foreach(pat => println(pat.map(p =>
"+"*(p._2-1) + "Q" + "+"*(n-p._2)+"\\n").mkString))
}
}
object QueensApp extends App {
new Queens(8).start
}
|
0shimax/learn-ai-algo
|
scala/Queens.scala
|
Scala
|
mit
| 623
|
package org.apache.spark.ml.feature
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.junit.JUnitRunner
import TestHelper._
/**
* Test information theoretic feature selection on datasets from Peng's webpage
*
* @author Sergio Ramirez
*/
@RunWith(classOf[JUnitRunner])
class ITSelectorSuite extends FunSuite with BeforeAndAfterAll {
var sqlContext: SQLContext = null
override def beforeAll(): Unit = {
sqlContext = new SQLContext(SPARK_CTX)
}
/** Do mRMR feature selection on COLON data. */
test("Run ITFS on colon data (nPart = 10, nfeat = 10)") {
val df = readCSVData(sqlContext, "test_colon_s3.csv")
val cols = df.columns
val pad = 2
val allVectorsDense = true
val model = getSelectorModel(sqlContext, df, cols.drop(1), cols.head,
10, 10, allVectorsDense, pad)
assertResult("512, 764, 1324, 1380, 1411, 1422, 1581, 1670, 1671, 1971") {
model.selectedFeatures.mkString(", ")
}
}
/** Do mRMR feature selection on LEUKEMIA data. */
test("Run ITFS on leukemia data (nPart = 10, nfeat = 10)") {
val df = readCSVData(sqlContext, "test_leukemia_s3.csv")
val cols = df.columns
val pad = 2
val allVectorsDense = true
val model = getSelectorModel(sqlContext, df, cols.drop(1), cols.head,
10, 10, allVectorsDense, pad)
assertResult("1084, 1719, 1774, 1822, 2061, 2294, 3192, 4387, 4787, 6795") {
model.selectedFeatures.mkString(", ")
}
}
/** Do mRMR feature selection on LUNG data. */
test("Run ITFS on lung data (nPart = 10, nfeat = 10)") {
val df = readCSVData(sqlContext, "test_lung_s3.csv")
val cols = df.columns
val pad = 2
val allVectorsDense = true
val model = getSelectorModel(sqlContext, df, cols.drop(1), cols.head,
10, 10, allVectorsDense, pad)
assertResult("18, 22, 29, 125, 132, 150, 166, 242, 243, 269") {
model.selectedFeatures.mkString(", ")
}
}
/** Do mRMR feature selection on LYMPHOMA data. */
test("Run ITFS on lymphoma data (nPart = 10, nfeat = 10)") {
val df = readCSVData(sqlContext, "test_lymphoma_s3.csv")
val cols = df.columns
val pad = 2
val allVectorsDense = true
val model = getSelectorModel(sqlContext, df, cols.drop(1), cols.head,
10, 10, allVectorsDense, pad)
assertResult("236, 393, 759, 2747, 2818, 2841, 2862, 3014, 3702, 3792") {
model.selectedFeatures.mkString(", ")
}
}
/** Do mRMR feature selection on NCI data. */
test("Run ITFS on nci data (nPart = 10, nfeat = 10)") {
val df = readCSVData(sqlContext, "test_nci9_s3.csv")
val cols = df.columns
val pad = 2
val allVectorsDense = true
val model = getSelectorModel(sqlContext, df, cols.drop(1), cols.head,
10, 10, allVectorsDense, pad)
assertResult("443, 755, 1369, 1699, 3483, 5641, 6290, 7674, 9399, 9576") {
model.selectedFeatures.mkString(", ")
}
}
}
|
sramirez/spark-infotheoretic-feature-selection
|
src/test/scala/org/apache/spark/ml/feature/ITSelectorSuite.scala
|
Scala
|
apache-2.0
| 3,024
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoft.spark.streaming.examples.directdstream
import org.apache.spark.SparkContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.eventhubs.EventHubsUtils
object MultiStreamsJoin {
private def createNewStreamingContext(
sparkCheckpointDir: String,
progressDir: String,
policyNames: String,
policyKeys: String,
namespaces: String,
names: String,
batchDuration: Int,
rate: Int): StreamingContext = {
val ssc = new StreamingContext(new SparkContext(), Seconds(batchDuration))
ssc.checkpoint(sparkCheckpointDir)
val Array(policyName1, policyName2) = policyNames.split(",")
val Array(policykey1, policykey2) = policyKeys.split(",")
val Array(namespace1, namespace2) = namespaces.split(",")
val Array(name1, name2) = names.split(",")
val eventhubParameters = (name: String,
namespace: String,
policyName: String,
policyKey: String) => Map(name -> Map[String, String] (
"eventhubs.policyname" -> policyName,
"eventhubs.policykey" -> policyKey,
"eventhubs.namespace" -> namespace,
"eventhubs.name" -> name,
"eventhubs.partition.count" -> "32",
"eventhubs.maxRate" -> s"$rate",
"eventhubs.consumergroup" -> "$Default"
))
val inputDirectStream1 = EventHubsUtils.createDirectStreams(
ssc,
namespace1,
progressDir,
eventhubParameters(name1, namespace1, policyName1, policykey1))
val inputDirectStream2 = EventHubsUtils.createDirectStreams(
ssc,
namespace2,
progressDir,
eventhubParameters(name2, namespace2, policyName2, policykey2))
val kv1 = inputDirectStream1.map(receivedRecord => (new String(receivedRecord.getBody), 1))
.reduceByKey(_ + _)
val kv2 = inputDirectStream2.map(receivedRecord => (new String(receivedRecord.getBody), 1))
.reduceByKey(_ + _)
kv1.join(kv2).map {
case (k, (count1, count2)) =>
(k, count1 + count2)
}.print()
ssc
}
def main(args: Array[String]): Unit = {
if (args.length != 8) {
println("Usage: program progressDir PolicyName1,PolicyName2 PolicyKey1,PolicyKey2" +
" EventHubNamespace1,EventHubNamespace2 EventHubName1,EventHubName2" +
" BatchDuration(seconds)")
sys.exit(1)
}
val progressDir = args(0)
val policyNames = args(1)
val policyKeys = args(2)
val namespaces = args(3)
val names = args(4)
val batchDuration = args(5).toInt
val sparkCheckpointDir = args(6)
val rate = args(7).toInt
val ssc = StreamingContext.getOrCreate(sparkCheckpointDir, () =>
createNewStreamingContext(sparkCheckpointDir, progressDir, policyNames, policyKeys,
namespaces, names, batchDuration, rate))
ssc.start()
ssc.awaitTermination()
}
}
|
CodingCat/spark-eventhubs
|
examples/src/main/scala/com/microsoft/spark/streaming/examples/directdstream/MultiStreamsJoin.scala
|
Scala
|
apache-2.0
| 3,734
|
package xyz.hyperreal.dal
import xyz.hyperreal.numbers.BigDecimalMath
import xyz.hyperreal.numbers.BigDecimalMath.decimal128._
import java.{lang => boxed}
import scala.math.{BigInt, pow}
object ComplexDAL extends DAL {
special(DoubleType, ComplexBigIntType, ComplexDoubleType)
special(BigDecType, ComplexBigIntType, ComplexBigDecType)
special(RationalType, ComplexBigIntType, ComplexRationalType)
special(DoubleType, ComplexRationalType, ComplexDoubleType)
special(BigDecType, ComplexRationalType, ComplexBigDecType)
special(BigDecType, ComplexDoubleType, ComplexBigDecType)
operation(
Symbol("+"),
IntType -> ((l: Number, r: Number) => maybePromote(l.longValue + r.longValue)),
LongType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) + toBigInt(r))),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) + toBigInt(r))),
RationalType -> ((l: Number, r: Number) => maybeDemote(toRational(l) + toRational(r))),
DoubleType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue + r.doubleValue: Number)),
BigDecType -> ((a: Number, b: Number) => (BigDecType, toBigDecimal(a) + toBigDecimal(b))),
ComplexBigIntType -> ((l: Number, r: Number) => maybeDemote(toComplexBigInt(l) + toComplexBigInt(r))),
ComplexRationalType -> ((l: Number, r: Number) => maybeDemote(toComplexRational(l) + toComplexRational(r))),
ComplexDoubleType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) + toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => (ComplexBigDecType, toComplexBigDecimal(l) + toComplexBigDecimal(r)))
)
operation(
Symbol("-"),
IntType -> ((l: Number, r: Number) => maybePromote(l.longValue - r.longValue)),
LongType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) - toBigInt(r))),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) - toBigInt(r))),
RationalType -> ((l: Number, r: Number) => maybeDemote(toRational(l) - toRational(r))),
DoubleType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue - r.doubleValue: Number)),
BigDecType -> ((a: Number, b: Number) => (BigDecType, toBigDecimal(a) - toBigDecimal(b))),
ComplexBigIntType -> ((l: Number, r: Number) => maybeDemote(toComplexBigInt(l) - toComplexBigInt(r))),
ComplexRationalType -> ((l: Number, r: Number) => maybeDemote(toComplexRational(l) - toComplexRational(r))),
ComplexDoubleType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) - toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => (ComplexBigDecType, toComplexBigDecimal(l) - toComplexBigDecimal(r)))
)
operation(
Symbol("*"),
IntType -> ((l: Number, r: Number) => maybePromote(l.longValue * r.longValue)),
LongType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) * toBigInt(r))),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) * toBigInt(r))),
RationalType -> ((l: Number, r: Number) => maybeDemote(toRational(l) * toRational(r))),
DoubleType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue * r.doubleValue: Number)),
BigDecType -> ((a: Number, b: Number) => (BigDecType, toBigDecimal(a) * toBigDecimal(b))),
ComplexBigIntType -> ((l: Number, r: Number) => maybeDemote(toComplexBigInt(l) * toComplexBigInt(r))),
ComplexRationalType -> ((l: Number, r: Number) => maybeDemote(toComplexRational(l) * toComplexRational(r))),
ComplexDoubleType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) * toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => (ComplexBigDecType, toComplexBigDecimal(l) * toComplexBigDecimal(r)))
)
operation(
Symbol("/"),
IntType -> ((l: Number, r: Number) => maybeDemote(toRational(l) / toRational(r))),
LongType -> ((l: Number, r: Number) => maybeDemote(toRational(l) / toRational(r))),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toRational(l) / toRational(r))),
RationalType -> ((l: Number, r: Number) => maybeDemote(toRational(l) / toRational(r))),
DoubleType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue / r.doubleValue: Number)),
BigDecType -> ((a: Number, b: Number) => (BigDecType, toBigDecimal(a) / toBigDecimal(b))),
ComplexBigIntType -> ((l: Number, r: Number) => maybeDemote(toComplexRational(l) / toComplexRational(r))),
ComplexRationalType -> ((l: Number, r: Number) => maybeDemote(toComplexRational(l) / toComplexRational(r))),
ComplexDoubleType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) / toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => (ComplexBigDecType, toComplexBigDecimal(l) / toComplexBigDecimal(r)))
)
operation(
Symbol("//"),
IntType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue / r.doubleValue: Number)),
LongType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue / r.doubleValue: Number)),
BigIntType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue / r.doubleValue: Number)),
RationalType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue / r.doubleValue: Number)),
DoubleType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue / r.doubleValue: Number)),
BigDecType -> ((a: Number, b: Number) => (BigDecType, toBigDecimal(a) / toBigDecimal(b))),
ComplexBigIntType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) / toComplexDouble(r))),
ComplexRationalType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) / toComplexDouble(r))),
ComplexDoubleType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) / toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => (ComplexBigDecType, toComplexBigDecimal(l) / toComplexBigDecimal(r)))
)
operation(
Symbol("^"),
IntType -> ((l: Number, r: Number) => bigIntPow(l, r)),
LongType -> ((l: Number, r: Number) => bigIntPow(l, r)),
BigIntType -> ((l: Number, r: Number) => bigIntPow(l, r)),
RationalType -> ((l: Number, r: Number) => {
r match {
case i: boxed.Integer => maybeDemote(toRational(l) ^ i)
case bi: BigInt => (RationalType, toRational(l) ^ bi)
case _ => (DoubleType, pow(l.doubleValue, r.doubleValue): Number)
}
}),
DoubleType -> ((l: Number, r: Number) => (DoubleType, pow(l.doubleValue, r.doubleValue): Number)),
BigDecType -> ((l: Number, r: Number) => (BigDecType, BigDecimalMath.pow(toBigDecimal(l), toBigDecimal(r)))),
ComplexBigIntType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) ^ toComplexDouble(r))),
ComplexRationalType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) ^ toComplexDouble(r))),
ComplexDoubleType -> ((l: Number, r: Number) => (ComplexDoubleType, toComplexDouble(l) ^ toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => (ComplexBigDecType, toComplexBigDecimal(l) ^ toComplexBigDecimal(r)))
)
operation(
Symbol("mod"),
IntType -> ((l: Number, r: Number) => (IntType, l.intValue % r.intValue: Number)),
LongType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) % toBigInt(r))),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) % toBigInt(r))),
// todo: rational
DoubleType -> ((l: Number, r: Number) => (DoubleType, l.doubleValue % r.doubleValue: Number)),
BigDecType -> ((l: Number, r: Number) => (BigDecType, toBigDecimal(l) % toBigDecimal(r)))
)
relation(
Symbol("="),
IntType -> ((l: Number, r: Number) => boolean(l == r)),
LongType -> ((l: Number, r: Number) => boolean(l.longValue == r.longValue)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(l) == toBigInt(r))),
RationalType -> ((l: Number, r: Number) => boolean(toRational(l) == toRational(r))),
DoubleType -> ((l: Number, r: Number) => boolean(l.doubleValue == r.doubleValue)),
BigDecType -> ((l: Number, r: Number) => boolean(toBigDecimal(l) == toBigDecimal(r))),
ComplexBigIntType -> ((l: Number, r: Number) => boolean(toComplexDouble(l) == toComplexDouble(r))),
ComplexRationalType -> ((l: Number, r: Number) => boolean(toComplexDouble(l) == toComplexDouble(r))),
ComplexDoubleType -> ((l: Number, r: Number) => boolean(toComplexDouble(l) == toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => boolean(toComplexBigDecimal(l) == toComplexBigDecimal(r)))
)
relation(
Symbol("!="),
IntType -> ((l: Number, r: Number) => boolean(l != r)),
LongType -> ((l: Number, r: Number) => boolean(l.longValue != r.longValue)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(l) != toBigInt(r))),
RationalType -> ((l: Number, r: Number) => boolean(toRational(l) != toRational(r))),
DoubleType -> ((l: Number, r: Number) => boolean(l.doubleValue != r.doubleValue)),
BigDecType -> ((l: Number, r: Number) => boolean(toBigDecimal(l) != toBigDecimal(r))),
ComplexBigIntType -> ((l: Number, r: Number) => boolean(toComplexDouble(l) != toComplexDouble(r))),
ComplexRationalType -> ((l: Number, r: Number) => boolean(toComplexDouble(l) != toComplexDouble(r))),
ComplexDoubleType -> ((l: Number, r: Number) => boolean(toComplexDouble(l) != toComplexDouble(r))),
ComplexBigDecType -> ((l: Number, r: Number) => boolean(toComplexBigDecimal(l) != toComplexBigDecimal(r)))
)
relation(
Symbol("<"),
IntType -> ((l: Number, r: Number) => boolean(l.intValue < r.intValue)),
LongType -> ((l: Number, r: Number) => boolean(l.longValue < r.longValue)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(l) < toBigInt(r))),
RationalType -> ((l: Number, r: Number) => boolean(toRational(l) < toRational(r))),
DoubleType -> ((l: Number, r: Number) => boolean(l.doubleValue < r.doubleValue)),
BigDecType -> ((l: Number, r: Number) => boolean(toBigDecimal(l) < toBigDecimal(r)))
)
relation(
Symbol(">"),
IntType -> ((l: Number, r: Number) => boolean(l.intValue > r.intValue)),
LongType -> ((l: Number, r: Number) => boolean(l.longValue > r.longValue)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(l) > toBigInt(r))),
RationalType -> ((l: Number, r: Number) => boolean(toRational(l) > toRational(r))),
DoubleType -> ((l: Number, r: Number) => boolean(l.doubleValue > r.doubleValue)),
BigDecType -> ((l: Number, r: Number) => boolean(toBigDecimal(l) > toBigDecimal(r)))
)
relation(
Symbol("<="),
IntType -> ((l: Number, r: Number) => boolean(l.intValue <= r.intValue)),
LongType -> ((l: Number, r: Number) => boolean(l.longValue <= r.longValue)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(l) <= toBigInt(r))),
RationalType -> ((l: Number, r: Number) => boolean(toRational(l) <= toRational(r))),
DoubleType -> ((l: Number, r: Number) => boolean(l.doubleValue <= r.doubleValue)),
BigDecType -> ((l: Number, r: Number) => boolean(toBigDecimal(l) <= toBigDecimal(r)))
)
relation(
Symbol(">="),
IntType -> ((l: Number, r: Number) => boolean(l.intValue >= r.intValue)),
LongType -> ((l: Number, r: Number) => boolean(l.longValue >= r.longValue)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(l) >= toBigInt(r))),
RationalType -> ((l: Number, r: Number) => boolean(toRational(l) >= toRational(r))),
DoubleType -> ((l: Number, r: Number) => boolean(l.doubleValue >= r.doubleValue)),
BigDecType -> ((l: Number, r: Number) => boolean(toBigDecimal(l) >= toBigDecimal(r)))
)
relation(
Symbol("div"),
IntType -> ((l, r) => boolean(r.intValue % l.intValue == 0)),
LongType -> ((l: Number, r: Number) => boolean(r.longValue % l.longValue == 0)),
BigIntType -> ((l: Number, r: Number) => boolean(toBigInt(r) % toBigInt(l) == 0)),
// complex int types
)
operation(
Symbol("\\"),
IntType -> ((l, r) => (IntType, l.intValue / r.intValue: Number)),
LongType -> ((l: Number, r: Number) => (LongType, l.longValue / r.longValue: Number)),
BigIntType -> ((l: Number, r: Number) => (BigIntType, toBigInt(l) / toBigInt(r))),
// complex int types
)
operation(
Symbol("and"),
IntType -> ((l: Number, r: Number) => (IntType, l.intValue & r.intValue: Number)),
LongType -> ((l: Number, r: Number) => (IntType, l.longValue & r.longValue: Number)),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) & toBigInt(r)))
)
operation(
Symbol("or"),
IntType -> ((l: Number, r: Number) => (IntType, l.intValue | r.intValue: Number)),
LongType -> ((l: Number, r: Number) => (IntType, l.longValue | r.longValue: Number)),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) | toBigInt(r)))
)
operation(
Symbol("xor"),
IntType -> ((l: Number, r: Number) => (IntType, l.intValue ^ r.intValue: Number)),
LongType -> ((l: Number, r: Number) => (IntType, l.longValue ^ r.longValue: Number)),
BigIntType -> ((l: Number, r: Number) => maybeDemote(toBigInt(l) ^ toBigInt(r)))
)
operation(
Symbol("compare"),
IntType -> ((l: Number, r: Number) => (IntType, l.intValue compare r.intValue: Number)),
LongType -> ((l: Number, r: Number) => (IntType, l.longValue compare r.longValue: Number)),
BigIntType -> ((l: Number, r: Number) => (IntType, toBigInt(l) compare toBigInt(r): Number)),
RationalType -> ((l: Number, r: Number) => (IntType, toRational(l) compare toRational(r): Number)),
DoubleType -> ((l: Number, r: Number) => (IntType, l.doubleValue compare r.doubleValue: Number)),
BigDecType -> ((l: Number, r: Number) => (IntType, toBigDecimal(l) compare toBigDecimal(r): Number))
)
}
|
edadma/funl
|
dal/src/main/scala/xyz/hyperreal/dal/ComplexDAL.scala
|
Scala
|
mit
| 13,748
|
// code-examples/TypeLessDoMore/string-util.scala
object StringUtil {
def joiner(strings: List[String]) = strings.mkString(" ")
def joiner(strings: String*):String = joiner(strings.toList)
def toCollection(string: String) = string.split(' ')
}
|
XClouded/t4f-core
|
scala/src/tmp/TypeLessDoMore/string-util.scala
|
Scala
|
apache-2.0
| 254
|
// #4608
object Test {
def main(args: Array[String]) {
((1 to 100) sliding 10).toList.par.map{_.map{i => i * i}}.flatten
}
}
|
felixmulder/scala
|
test/files/run/t4608.scala
|
Scala
|
bsd-3-clause
| 135
|
package cqrs.publicserver
import endpoints.documented.openapi.model.OpenApi
import endpoints.play.server.circe.JsonEntities
import endpoints.play.server.{Assets, Endpoints, PlayComponents}
import play.api.routing.{Router => PlayRouter}
import play.twirl.api.{Html, StringInterpolation}
/**
* These endpoints serve the web page and the assets.
*/
class BootstrapEndpoints(protected val playComponents: PlayComponents) extends Endpoints with Assets with JsonEntities {
val index: Endpoint[Unit, Html] =
endpoint(get(path), htmlResponse)
val assets: Endpoint[AssetRequest, AssetResponse] =
assetsEndpoint(path / "assets" / assetSegments)
val documentation: Endpoint[Unit, OpenApi] =
endpoint(get(path / "documentation"), jsonResponse[OpenApi])
val routes: PlayRouter.Routes =
routesFromEndpoints(
index.implementedBy(_ => indexHtml),
assets.implementedBy(assetsResources(/*pathPrefix = Some("/public")*/))
)
lazy val digests = BootstrapDigests.digests
lazy val indexHtml =
html"""<!DOCTYPE html>
<html>
<head>
<script src="${assets.call(asset("example-cqrs-web-client-fastopt.js")).url}" defer></script>
<title>Meters</title>
</head>
<body></body>
</html>
"""
}
|
Krever/endpoints
|
documentation/examples/cqrs/public-server/src/main/scala/cqrs/publicserver/BootstrapEndpoints.scala
|
Scala
|
mit
| 1,312
|
package org.http4s
import cats.implicits._
import cats.kernel.laws.discipline.OrderTests
import org.http4s.laws.discipline.HttpCodecTests
class QValueSpec extends Http4sSpec {
import QValue._
checkAll("Order[QValue]", OrderTests[QValue].order)
checkAll("HttpCodec[QValue]", HttpCodecTests[QValue].httpCodec)
"sort by descending q-value" in {
prop { (x: QValue, y: QValue) =>
x.thousandths > y.thousandths ==> (x > y)
}
}
"fromDouble should be consistent with fromThousandths" in {
forall(0 to 1000) { i =>
fromDouble(i / 1000.0) must_== fromThousandths(i)
}
}
"fromString should be consistent with fromThousandths" in {
forall(0 to 1000) { i =>
fromString((i / 1000.0).toString) must_== fromThousandths(i)
}
}
"literal syntax should be consistent with successful fromDouble" in {
Right(q(1.0)) must_== fromDouble(1.0)
Right(q(0.5)) must_== fromDouble(0.5)
Right(q(0.0)) must_== fromDouble(0.0)
Right(qValue"1.0") must_== fromDouble(1.0)
Right(qValue"0.5") must_== fromDouble(0.5)
Right(qValue"0.0") must_== fromDouble(0.0)
Right(q(0.5 + 0.1)) must_== fromDouble(0.6)
}
"literal syntax should reject invalid values" in {
import org.specs2.execute._, Typecheck._
import org.specs2.matcher.TypecheckMatchers._
typecheck {
"""
q(2.0) // doesn't compile: out of range
"""
} should not succeed
typecheck {
"""
val d: Double = 0.5 + 0.1
q(d) // doesn't compile, not a literal
"""
} should not succeed
typecheck {
"""
qValue"2.0" // doesn't compile: out of range
"""
} should not succeed
typecheck {
"""
qValue"invalid" // doesn't compile, not parsable as a double
"""
} should not succeed
}
}
|
ChristopherDavenport/http4s
|
tests/src/test/scala/org/http4s/QValueSpec.scala
|
Scala
|
apache-2.0
| 1,824
|
package de.ust.skill.scala.generic
import java.io.ByteArrayOutputStream
import java.io.File
import java.nio.ByteBuffer
import java.nio.channels.FileChannel
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardOpenOption
import scala.collection.mutable.HashMap
/**
* The interface to fully generic access of skill files.
*
* @author Timm Felden
*/
final class State {
type β[A, B] = HashMap[A, B]
/**
* References are stored as (name:String, index:Long).
*/
type refType = (String, Long)
/**
* α : typeName β instance index β field index β data
*/
var fieldData = new (String β (Long β (Long β Any)))
/**
* Contains all type definitions by name.
*/
var typeDefinitions = new (String β TypeDefinition)
/**
* field data bypass
*/
def getString(index: Long): String = fieldData("string")(0)(index).asInstanceOf[String]
/**
* little helper for updating fehu
*/
private[generic] def newMap(name: String) = {
val result = new HashMap[Long, HashMap[Long, Any]]
fieldData(name) = result
result
}
override def toString = s"""α :
${
fieldData.map {
case ("string", v) β s"string poolβ${
v(0).map { case (k, v) β s"""#$k:"$v"""" }.mkString("{", ", ", "}")
}}"
case (k, v) β s"$k: ${
v.map {
case (k, v) β s"#$k: [${
v.values.map {
case (n: String, i: Long) β s"$n#$i"
case s: String β s""""$s""""
case u β u.toString
}.mkString(", ")
}]"
}.mkString("{\\n ", "\\n ", "\\n}")
}"
}.mkString("\\n")
}
Ο:
${typeDefinitions.mkString("\\n")}"""
import State.v64
/**
* writes the state to a file, not using any append operations
*
* @note if one were ever to implement "append changes", fieldData and typeDefinitions need to be hidden behind a facade
*/
def write(target: Path) {
val file = Files.newByteChannel(target,
StandardOpenOption.CREATE,
StandardOpenOption.READ,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING).asInstanceOf[FileChannel]
@inline def put(value: Long) = file.write(ByteBuffer.wrap(v64(value)))
// write string pool
writeStrings(file)
// number of type definitions
put(typeDefinitions.size)
// write header to file and field data to out
val out = new ByteArrayOutputStream
val reverseStrings = fieldData("string")(0).toList.map { case (k, v: String) β (v, k) }.toMap
// TODO type definitions have to be stored in type order!!
typeDefinitions.values.foreach { td β
val fields = fieldData(td.name)
put(reverseStrings(td.name))
td.superName match {
case None β put(0)
case Some(name) β
put(reverseStrings(name))
// TODO LBPSI
put(0)
}
put(fields.size)
put(0)
put(td.fields.size)
td.fields.foreach {
case (index, f) β
put(0)
put(f.t.typeId)
put(reverseStrings(f.name))
// [[data]]
f.t match {
case I8Info β
out.write(fields.values.map { fmap β fmap(index) }.collect { case b: Byte β b }.toArray)
case I16Info β
val bb = ByteBuffer.allocate(2 * fields.size)
val output: PartialFunction[Any, Unit] = { case s: Short β bb.putShort(s) };
fields.values.foreach { fmap β output(fmap(index)) }
bb.rewind
out.write(bb.array)
case I32Info β
val bb = ByteBuffer.allocate(4 * fields.size)
val output: PartialFunction[Any, Unit] = { case i: Int β bb.putInt(i) };
fields.values.foreach { fmap β output(fmap(index)) }
bb.rewind
out.write(bb.array)
case I64Info β
val bb = ByteBuffer.allocate(8 * fields.size)
val output: PartialFunction[Any, Unit] = { case l: Long β bb.putLong(l) };
fields.values.foreach { fmap β output(fmap(index)) }
bb.rewind
out.write(bb.array)
case StringInfo β
val output: PartialFunction[Any, Unit] = { case s: String β out.write(v64(reverseStrings(s))) };
fields.values.foreach { fmap β output(fmap(index)) }
}
// end-offset
put(out.size)
}
}
// append data
file.write(ByteBuffer.wrap(out.toByteArray()))
// done:)
file.close()
}
private[this] def writeStrings(file: FileChannel) {
implicit val order = Ordering.by[(Long, Any), Long](_._1)
val strings = fieldData("string")(0).toBuffer.sorted.toList.unzip._2.collect { case s: String β s }
val out = new ByteArrayOutputStream
// number of instances
file.write(ByteBuffer.wrap(v64(strings.size)))
val header = ByteBuffer.allocate(4 * strings.length)
// offsets & data
for (s β strings) {
out.write(s.getBytes())
header.putInt(out.size)
}
// append data
header.rewind()
file.write(header)
file.write(ByteBuffer.wrap(out.toByteArray()))
}
}
/**
* Provides basic read capabilities for states.
*
* @author Timm Felden
*/
object State {
def read(path: String): State = FileParser.read(new File(path).toPath);
/**
* encode a v64 value into a stream
*/
def v64(v: Long): Array[Byte] = {
// calculate effective size
var size = 0;
{
var q = v;
while (q != 0) {
q >>>= 7;
size += 1;
}
}
if (0 == size) {
val rval = new Array[Byte](1);
rval(0) = 0;
return rval;
} else if (10 == size)
size = 9;
// split
val rval = new Array[Byte](size);
var count = 0;
while (count < 8 && count < size - 1) {
rval(count) = (v >> (7 * count)).asInstanceOf[Byte];
rval(count) = (rval(count) | 0x80).asInstanceOf[Byte];
count += 1;
}
rval(count) = (v >> (7 * count)).asInstanceOf[Byte];
return rval;
}
def v64(in: Array[Byte]): Long = {
var next = 0
var count = 0
var rval: Long = 0
var r: Long = in(next)
next += 1
while (count < 8 && 0 != (r & 0x80)) {
rval |= (r & 0x7f) << (7 * count);
count += 1;
r = in(next)
next += 1
}
rval = (rval | (count match {
case 8 β r
case _ β (r & 0x7f)
}) << (7 * count));
return rval
}
}
|
lordd/ScalaTFGenericBinding
|
src/de/ust/skill/scala/generic/State.scala
|
Scala
|
bsd-3-clause
| 6,557
|
package net.itadinanta.rnkr.engine
import scala.concurrent.Future
import scala.concurrent.Promise
import akka.actor._
import akka.pattern._
import net.itadinanta.rnkr.backend._
import Leaderboard._
import net.itadinanta.rnkr.core.arbiter.Gate
import scala.concurrent.ExecutionContext
import net.itadinanta.rnkr.util.SetOnce
import akka.util.Timeout
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import scala.language.implicitConversions
object PersistentLeaderboard {
case class Get()
implicit val timeout = Timeout(1 minute)
private class PersistentLeaderboardDecorator(
override val target: Leaderboard,
walLength: Int,
metadata: Metadata,
writer: ActorRef,
parent: ActorRef,
implicit val executionContext: ExecutionContext) extends Leaderboard.Decorator {
var flushCount: Int = walLength
/**
* Writes changes to the WriteAheadLog (event log)
*
* Once an update has been applied in memory we persist it onto the WriteAheadLog (event log)
* once the write ahead log overflows its size limit, we make a snapshot and flush the log.
* Snapshot writing is done in the background and we can keep on other operations once
* the snapshot of the state was taken.
*
* Failure to save the snapshot is not catastrophic, as the old snapshot is deleted
* and the new one marked as valid only on success.
*/
private def writeAheadLog(cmd: Write, replayMode: ReplayMode.Value, post: Post) =
target -> cmd flatMap { update =>
if (update.hasChanged) {
(writer ? WriteAheadLog(replayMode, update.timestamp, post)) map { _ =>
flushCount += 1
if (flushCount > metadata.walSizeLimit) {
flush()
flushCount = 0
}
update
}
} else {
// no changes, don't bother updating
Future.successful(update)
}
}
def flush() = target -> Export() onSuccess { case snapshot => parent ! Flush(snapshot) }
override def decorate[T] = {
case c @ PostScore(post, updateMode) => writeAheadLog(c, ReplayMode(updateMode), post)
case c @ Remove(entrant) => writeAheadLog(c, ReplayMode.Delete, Storage.tombstone(entrant))
case c @ Clear() => writeAheadLog(c, ReplayMode.Clear, Storage.tombstone())
}
}
/**
* Kernel of the persistence engine.
*
* In the constructor we fire up a reader and delegate the loading of an existing state
* (snapshot + log) to the loader.
*
* Once the loader is done, it will send back a Loaded(...) message containing a pre-populated
* leaderboard buffer and its storage metadata.
*
* The data is then used to instantiate an a writer and a persistent Leaderboard.
* The PersistentLeaderboard adds WAL (event) logging and snapshotting functionality on top
* of the transient ConcurrentLeaderboard
*
* In order to retrieve the Leaderboard instance this actor responds to Get() messages.
* While the leaderboard is being created we are unable to respond to the message straight away
* so we accumulate the incoming requests into a temporary list which is consumed
* once the leaderboard has been created and loaded and becomes available.
*
* TODO: timeout management and error recovering are nonexistent
*/
private class PersistentLeaderboardManager(name: String, datastore: Datastore) extends Actor {
val writer = SetOnce[ActorRef]
val leaderboard = SetOnce[Leaderboard]
var receivers: List[ActorRef] = Nil
context.actorOf(datastore.readerProps(name), "reader_" + name) ! Load
// TODO timeout if read doesn't complete
def receive = {
case Loaded(buffer, watermark, walLength, metadata) =>
writer := context.actorOf(datastore.writerProps(name, watermark, metadata), "writer_" + name)
leaderboard := new PersistentLeaderboardDecorator(
ConcurrentLeaderboard(buffer, "gate_" + name),
walLength,
metadata,
writer.get,
self,
context.dispatcher)
receivers.reverse foreach (_ ! leaderboard.get)
receivers = Nil
case Flush(snapshot) =>
if (writer.isSet) writer.get ! Save(snapshot)
case Get() =>
if (leaderboard.isSet) sender ! leaderboard.get
else receivers = sender :: receivers
}
}
/** Cameo for the persistent leaderboard manager */
private def managerProps(name: String, datastore: Datastore) =
Props(new PersistentLeaderboardManager(name: String, datastore: Datastore))
/**
* Creates a persistent "name" leaderboard using the given datastore to persist its state
*
* @param name the leaderboard's name in this partition. Must be unique within the datastore.
* @param datastore where to store the leaderboard's state (snapshot + wal)
* @param context parent actor context
*/
def apply(name: String, datastore: Datastore)(implicit context: ActorRefFactory): Future[Leaderboard] = {
implicit val executionContext = context.dispatcher
(context.actorOf(managerProps(name, datastore), "persistent_" + name) ? Get()).mapTo[Leaderboard]
}
}
|
itadinanta/rnkr
|
rnkr-engine/src/main/scala/net/itadinanta/rnkr/engine/PersistentLeaderboard.scala
|
Scala
|
gpl-2.0
| 4,941
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.functions
import org.apache.flink.api.common.functions.InvalidTypesException
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.TypeExtractor
import org.apache.flink.table.api.ValidationException
import org.apache.flink.table.expressions.{Expression, ScalarFunctionCall}
/**
* Base class for a user-defined scalar function. A user-defined scalar functions maps zero, one,
* or multiple scalar values to a new scalar value.
*
* The behavior of a [[ScalarFunction]] can be defined by implementing a custom evaluation
* method. An evaluation method must be declared publicly and named "eval". Evaluation methods
* can also be overloaded by implementing multiple methods named "eval".
*
* User-defined functions must have a default constructor and must be instantiable during runtime.
*
* By default the result type of an evaluation method is determined by Flink's type extraction
* facilities. This is sufficient for basic types or simple POJOs but might be wrong for more
* complex, custom, or composite types. In these cases [[TypeInformation]] of the result type
* can be manually defined by overriding [[getResultType()]].
*
* Internally, the Table/SQL API code generation works with primitive values as much as possible.
* If a user-defined scalar function should not introduce much overhead during runtime, it is
* recommended to declare parameters and result types as primitive types instead of their boxed
* classes. DATE/TIME is equal to int, TIMESTAMP is equal to long.
*/
abstract class ScalarFunction extends UserDefinedFunction {
/**
* Creates a call to a [[ScalarFunction]] in Scala Table API.
*
* @param params actual parameters of function
* @return [[Expression]] in form of a [[ScalarFunctionCall]]
*/
final def apply(params: Expression*): Expression = {
ScalarFunctionCall(this, params)
}
// ----------------------------------------------------------------------------------------------
/**
* Returns the result type of the evaluation method with a given signature.
*
* This method needs to be overridden in case Flink's type extraction facilities are not
* sufficient to extract the [[TypeInformation]] based on the return type of the evaluation
* method. Flink's type extraction facilities can handle basic types or
* simple POJOs but might be wrong for more complex, custom, or composite types.
*
* @param signature signature of the method the return type needs to be determined
* @return [[TypeInformation]] of result type or null if Flink should determine the type
*/
def getResultType(signature: Array[Class[_]]): TypeInformation[_] = null
/**
* Returns [[TypeInformation]] about the operands of the evaluation method with a given
* signature.
*
* In order to perform operand type inference in SQL (especially when NULL is used) it might be
* necessary to determine the parameter [[TypeInformation]] of an evaluation method.
* By default Flink's type extraction facilities are used for this but might be wrong for
* more complex, custom, or composite types.
*
* @param signature signature of the method the operand types need to be determined
* @return [[TypeInformation]] of operand types
*/
def getParameterTypes(signature: Array[Class[_]]): Array[TypeInformation[_]] = {
signature.map { c =>
try {
TypeExtractor.getForClass(c)
} catch {
case ite: InvalidTypesException =>
throw new ValidationException(
s"Parameter types of scalar function '${this.getClass.getCanonicalName}' cannot be " +
s"automatically determined. Please provide type information manually.")
}
}
}
}
|
yew1eb/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala
|
Scala
|
apache-2.0
| 4,631
|
package debop4s.redis.spring
import debop4s.redis.model.User
import org.slf4j.LoggerFactory
import org.springframework.cache.annotation.{ CacheEvict, Cacheable }
import org.springframework.stereotype.Repository
/**
* debop4s.redis.spring.UserRepository
*
* @author λ°°μ±ν sunghyouk.bae@gmail.com
* @since 2014. 2. 24. μ€μ 11:00
*/
@Repository
class UserRepository {
private lazy val log = LoggerFactory.getLogger(getClass)
@Cacheable(value = Array("user"), key = "'user'.concat(':').concat(#id)")
def getUser(id: String, favoriteMovieSize: Int = 1000): User = {
log.info(s"μλ‘μ΄ μ¬μ©μλ₯Ό μμ±ν©λλ€. id=$id")
val user = User(favoriteMovieSize)
user.setId(id)
user
}
/**
* id κ°μ Javaμ getter κ° μμ΄μΌλ§ ν©λλ€.
*/
@CacheEvict(value = Array("user"), key = "'user'.concat(':').concat(#user.id)")
def updateUser(user: User) {
log.info("μ¬μ©μ μ 보λ₯Ό κ°±μ ν©λλ€. μΊμλ μμ λ©λλ€...")
}
}
|
debop/debop4s
|
debop4s-redis/src/test/scala/debop4s/redis/spring/UserRepository.scala
|
Scala
|
apache-2.0
| 994
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.tstreams.agents.consumer
import com.bwsw.tstreamstransactionserver.rpc.TransactionStates
import scala.collection.mutable.ListBuffer
/**
* Created by Ivan Kudryavtsev on 22.08.16.
* Abstract type for Consumer
*/
trait TransactionOperator {
def getLastTransaction(partition: Int): Option[ConsumerTransaction]
def getTransactionById(partition: Int, transactionID: Long): Option[ConsumerTransaction]
def buildTransactionObject(partition: Int, transactionID: Long, state: TransactionStates, count: Int): Option[ConsumerTransaction]
def setStreamPartitionOffset(partition: Int, transactionID: Long): Unit
def loadTransactionFromDB(partition: Int, transactionID: Long): Option[ConsumerTransaction]
def getTransactionsFromTo(partition: Int, from: Long, to: Long): ListBuffer[ConsumerTransaction]
def checkpoint(): Unit
def getPartitions: Set[Int]
def getCurrentOffset(partition: Int): Long
def getProposedTransactionId: Long
}
|
bwsw/t-streams
|
src/main/scala/com/bwsw/tstreams/agents/consumer/TransactionOperator.scala
|
Scala
|
apache-2.0
| 1,783
|
package org.scalaide.core.internal
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.tools.nsc.settings.ScalaVersion
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IProject
import org.eclipse.core.resources.IResourceChangeEvent
import org.eclipse.core.resources.IResourceChangeListener
import org.eclipse.core.resources.IResourceDelta
import org.eclipse.core.resources.IResourceDeltaVisitor
import org.eclipse.core.resources.ResourcesPlugin
import org.eclipse.core.runtime.Platform
import org.eclipse.core.runtime.content.IContentType
import org.eclipse.jdt.core.ElementChangedEvent
import org.eclipse.jdt.core.IClassFile
import org.eclipse.jdt.core.ICompilationUnit
import org.eclipse.jdt.core.IElementChangedListener
import org.eclipse.jdt.core.IJavaElement
import org.eclipse.jdt.core.IJavaElementDelta
import org.eclipse.jdt.core.IJavaProject
import org.eclipse.jdt.core.JavaCore
import org.eclipse.ui.IEditorInput
import org.eclipse.ui.PlatformUI
import org.osgi.framework.BundleContext
import org.scalaide.core.IScalaInstallation
import org.scalaide.core.IScalaPlugin
import org.scalaide.core.SdtConstants
import org.scalaide.core.internal.builder.zinc.CompilerInterfaceStore
import org.scalaide.core.internal.jdt.model.ScalaClassFile
import org.scalaide.core.internal.jdt.model.ScalaCompilationUnit
import org.scalaide.core.internal.jdt.model.ScalaSourceFile
import org.scalaide.core.internal.project.ScalaInstallation.platformInstallation
import org.scalaide.core.internal.project.ScalaProject
import org.scalaide.logging.HasLogger
import org.scalaide.logging.PluginLogConfigurator
import org.scalaide.ui.internal.diagnostic
import org.scalaide.ui.internal.editor.ScalaDocumentProvider
import org.scalaide.ui.internal.migration.RegistryExtender
import org.scalaide.ui.internal.templates.ScalaTemplateManager
import org.scalaide.util.Utils.WithAsInstanceOfOpt
import org.scalaide.util.eclipse.OSGiUtils
import org.scalaide.util.internal.CompilerUtils
import org.scalaide.util.internal.FixedSizeCache
object ScalaPlugin {
@volatile private var plugin: ScalaPlugin = _
def apply(): ScalaPlugin = plugin
}
class ScalaPlugin extends IScalaPlugin with PluginLogConfigurator with IResourceChangeListener with IElementChangedListener with HasLogger {
import CompilerUtils.ShortScalaVersion
import CompilerUtils.isBinaryPrevious
import CompilerUtils.isBinarySame
import org.scalaide.core.SdtConstants._
/** Check if the given version is compatible with the current plug-in version.
* Check on the major/minor number, discard the maintenance number.
*
* For example 2.9.1 and 2.9.2-SNAPSHOT are compatible versions whereas
* 2.8.1 and 2.9.0 aren't.
*/
def isCompatibleVersion(version: ScalaVersion, project: ScalaProject): Boolean = {
if (project.isUsingCompatibilityMode())
isBinaryPrevious(ScalaVersion.current, version)
else
isBinarySame(ScalaVersion.current, version)// don't treat 2 unknown versions as equal
}
private lazy val sdtCoreBundle = getBundle()
lazy val sbtCompilerBundle = Platform.getBundle(SbtPluginId)
lazy val sbtCompilerInterfaceBundle = Platform.getBundle(SbtCompilerInterfacePluginId)
lazy val sbtCompilerInterface = OSGiUtils.pathInBundle(sbtCompilerInterfaceBundle, "/")
lazy val templateManager = new ScalaTemplateManager()
lazy val scalaSourceFileContentType: IContentType =
Platform.getContentTypeManager().getContentType("scala.tools.eclipse.scalaSource")
lazy val scalaClassFileContentType: IContentType =
Platform.getContentTypeManager().getContentType("scala.tools.eclipse.scalaClass")
/**
* The document provider needs to exist only a single time because it caches
* compilation units (their working copies). Each `ScalaSourceFileEditor` is
* associated with this document provider.
*/
private[scalaide] lazy val documentProvider = new ScalaDocumentProvider
override def start(context: BundleContext) = {
ScalaPlugin.plugin = this
super.start(context)
if (!headlessMode) {
PlatformUI.getWorkbench.getEditorRegistry.setDefaultEditor("*.scala", SdtConstants.EditorId)
diagnostic.StartupDiagnostics.run
new RegistryExtender().perform()
}
ResourcesPlugin.getWorkspace.addResourceChangeListener(this, IResourceChangeEvent.PRE_CLOSE | IResourceChangeEvent.POST_CHANGE)
JavaCore.addElementChangedListener(this)
logger.info("Scala compiler bundle: " + platformInstallation.compiler.classJar.toOSString() )
}
override def stop(context: BundleContext) = {
ResourcesPlugin.getWorkspace.removeResourceChangeListener(this)
for {
iProject <- ResourcesPlugin.getWorkspace.getRoot.getProjects
if iProject.isOpen
scalaProject <- asScalaProject(iProject)
} scalaProject.projectSpecificStorage.save()
super.stop(context)
ScalaPlugin.plugin = null
}
/** The compiler-interface store, located in this plugin configuration area (usually inside the metadata directory */
lazy val compilerInterfaceStore: CompilerInterfaceStore = new CompilerInterfaceStore(Platform.getStateLocation(sdtCoreBundle), this)
/** A LRU cache of class loaders for Scala builders */
lazy val classLoaderStore: FixedSizeCache[IScalaInstallation,ClassLoader] = new FixedSizeCache(initSize = 2, maxSize = 3)
// Scala project instances
private val projects = new mutable.HashMap[IProject, ScalaProject]
override def scalaCompilationUnit(input: IEditorInput): Option[ScalaCompilationUnit] = {
def unitOfSourceFile = Option(documentProvider.getWorkingCopy(input)) map (ScalaCompilationUnit.castFrom)
def unitOfClassFile = input.getAdapter(classOf[IClassFile]) match {
case tr: ScalaClassFile => Some(tr)
case _ => None
}
unitOfSourceFile orElse unitOfClassFile
}
def getJavaProject(project: IProject) = JavaCore.create(project)
override def getScalaProject(project: IProject): ScalaProject = projects.synchronized {
projects.get(project) getOrElse {
val scalaProject = ScalaProject(project)
projects(project) = scalaProject
scalaProject
}
}
override def asScalaProject(project: IProject): Option[ScalaProject] = {
if (ScalaProject.isScalaProject(project)) {
Some(getScalaProject(project))
} else {
None
}
}
def disposeProject(project: IProject): Unit = {
projects.synchronized {
projects.get(project) foreach { (scalaProject) =>
projects.remove(project)
scalaProject.dispose()
}
}
}
/** Restart all presentation compilers in the workspace. Need to do it in order
* for them to pick up the new std out/err streams.
*/
def resetAllPresentationCompilers(): Unit = {
for {
iProject <- ResourcesPlugin.getWorkspace.getRoot.getProjects
if iProject.isOpen
scalaProject <- asScalaProject(iProject)
} scalaProject.presentationCompiler.askRestart()
}
override def resourceChanged(event: IResourceChangeEvent): Unit = {
(event.getResource, event.getType) match {
case (project: IProject, IResourceChangeEvent.PRE_CLOSE) =>
disposeProject(project)
case _ =>
}
(Option(event.getDelta()) foreach (_.accept(new IResourceDeltaVisitor() {
override def visit(delta: IResourceDelta): Boolean = {
// This is obtained at project opening or closing, meaning the 'openness' state changed
if (delta.getFlags == IResourceDelta.OPEN){
val resource = delta.getResource().asInstanceOfOpt[IProject]
resource foreach {(r) =>
// that particular classpath check can set the Installation (used, e.g., for sbt-eclipse imports)
// setting the Installation triggers a recursive check
asScalaProject(r) foreach { (p) =>
try {
// It's important to save this /before/ checking classpath : classpath
// checks create their own preference modifications under some conditions.
// Doing them concurrently can wreak havoc.
p.projectSpecificStorage.save()
} finally {
p.checkClasspath(true)
}
}
}
false
} else
true
}
})))
}
override def elementChanged(event: ElementChangedEvent): Unit = {
import scala.collection.mutable.ListBuffer
import IJavaElement._
import IJavaElementDelta._
// check if the changes are linked with the build path
val modelDelta = event.getDelta()
// check that the notification is about a change (CHANGE) of some elements (F_CHILDREN) of the java model (JAVA_MODEL)
if (modelDelta.getElement().getElementType() == JAVA_MODEL && modelDelta.getKind() == CHANGED && (modelDelta.getFlags() & F_CHILDREN) != 0) {
for (innerDelta <- modelDelta.getAffectedChildren()) {
// check that the notification no the child is about a change (CHANDED) relative to a resolved classpath change (F_RESOLVED_CLASSPATH_CHANGED)
if (innerDelta.getKind() == CHANGED && (innerDelta.getFlags() & IJavaElementDelta.F_RESOLVED_CLASSPATH_CHANGED) != 0) {
innerDelta.getElement() match {
// classpath change should only impact projects
case javaProject: IJavaProject => {
asScalaProject(javaProject.getProject()).foreach{ (p) => p.classpathHasChanged(false) }
}
case _ =>
}
}
}
}
// process deleted files
val buff = new ListBuffer[ScalaSourceFile]
val changed = new ListBuffer[ICompilationUnit]
val projectsToReset = new mutable.HashSet[ScalaProject]
def findRemovedSources(delta: IJavaElementDelta): Unit = {
val isChanged = delta.getKind == CHANGED
val isRemoved = delta.getKind == REMOVED
val isAdded = delta.getKind == ADDED
def hasFlag(flag: Int) = (delta.getFlags & flag) != 0
val elem = delta.getElement
val processChildren: Boolean = elem.getElementType match {
case JAVA_MODEL =>
true
case JAVA_PROJECT if isRemoved =>
disposeProject(elem.getJavaProject.getProject)
false
case JAVA_PROJECT if !hasFlag(F_CLOSED) =>
true
case PACKAGE_FRAGMENT_ROOT =>
val hasContentChanged = isRemoved || hasFlag(F_REMOVED_FROM_CLASSPATH | F_ADDED_TO_CLASSPATH | F_ARCHIVE_CONTENT_CHANGED)
if (hasContentChanged) {
logger.info("package fragment root changed (resetting presentation compiler): " + elem.getElementName())
asScalaProject(elem.getJavaProject().getProject).foreach(projectsToReset += _)
}
!hasContentChanged
case PACKAGE_FRAGMENT =>
val hasContentChanged = isAdded || isRemoved
if (hasContentChanged) {
logger.debug("package fragment added or removed: " + elem.getElementName())
asScalaProject(elem.getJavaProject().getProject).foreach(projectsToReset += _)
}
// stop recursion here, we need to reset the PC anyway
!hasContentChanged
// TODO: the check should be done with isInstanceOf[ScalaSourceFile] instead of
// endsWith(scalaFileExtn), but it is not working for Play 2.0 because of #1000434
case COMPILATION_UNIT if isChanged && elem.getResource != null && elem.getResource.getName.endsWith(ScalaFileExtn) =>
val hasContentChanged = hasFlag(IJavaElementDelta.F_CONTENT)
if (hasContentChanged)
// mark the changed Scala files to be refreshed in the presentation compiler if needed
changed += elem.asInstanceOf[ICompilationUnit]
false
case COMPILATION_UNIT if elem.isInstanceOf[ScalaSourceFile] && isRemoved =>
buff += elem.asInstanceOf[ScalaSourceFile]
false
case COMPILATION_UNIT if isAdded =>
logger.debug("added compilation unit " + elem.getElementName())
asScalaProject(elem.getJavaProject().getProject).foreach(projectsToReset += _)
false
case _ =>
false
}
if (processChildren)
delta.getAffectedChildren foreach findRemovedSources
}
findRemovedSources(event.getDelta)
// ask for the changed scala files to be refreshed in each project presentation compiler if needed
if (changed.nonEmpty) {
changed.toList groupBy (_.getJavaProject.getProject) foreach {
case (project, units) =>
asScalaProject(project) foreach { p =>
if (project.isOpen && !projectsToReset(p)) {
p.presentationCompiler(_.refreshChangedFiles(units.map(_.getResource.asInstanceOf[IFile])))
}
}
}
}
projectsToReset.foreach(_.presentationCompiler.askRestart())
if (buff.nonEmpty) {
buff.toList groupBy (_.getJavaProject.getProject) foreach {
case (project, srcs) =>
asScalaProject(project) foreach { p =>
if (project.isOpen && !projectsToReset(p))
p.presentationCompiler.internal (_.filesDeleted(srcs))
}
}
}
}
}
|
Kwestor/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/core/internal/ScalaPlugin.scala
|
Scala
|
bsd-3-clause
| 13,198
|
package io.sweetheart.examples.pis.abstractMembers
object Misc {
object Obj1 {
trait RationalTrait {
val numerArg: Int
val denomArg: Int
}
new RationalTrait {
val numerArg = 1
val denomArg = 2
}
val expr1 = 1
val expr2 = 2
new RationalTrait {
val numerArg = expr1
val denomArg = expr2
}
object Color extends Enumeration {
val Red = Value
val Green = Value
val Blue = Value
}
import Color._
val color1 = Red
object Direction extends Enumeration {
val North, East, South, West = Value
}
abstract class Currency {
val amount: Long
def designation: String
override def toString = amount + " " + designation
}
new Currency {
val amount = 79L
def designation = "USD"
}
abstract class Dollar extends Currency {
def designation = "USD"
}
abstract class Euro extends Currency {
def designation = "EUR"
}
abstract class CurrencyZone {
type Currency <: AbstractCurrency
def make(x: Long): Currency
abstract class AbstractCurrency {
val amount: Long
def designation: String
def + (that: Currency): Currency =
make(this.amount + that.amount)
def * (x: Double): Currency =
make((this.amount * x).toLong)
}
}
object US extends CurrencyZone {
abstract class Dollar extends AbstractCurrency {
def designation = "USD"
}
type Currency = Dollar
def make(x: Long) = new Dollar { val amount = x }
}
}
object Obj2 {
trait RationalTrait {
val numerArg: Int
val denomArg: Int
require(denomArg != 0)
private val g = gcd(numerArg, denomArg)
val numer = numerArg / g
val denom = denomArg / g
private def gcd(a: Int, b: Int): Int =
if (b == 0) a else gcd(b, a % b)
override def toString = numer +"/"+ denom
}
object twoThirds extends {
val numerArg = 2
val denomArg = 3
} with RationalTrait
class RationalClass(n: Int, d: Int) extends {
val numerArg = n
val denomArg = d
} with RationalTrait {
def + (that: RationalClass) = new RationalClass(
numer * that.denom + that.numer * denom,
denom * that.denom
)
}
object Color extends Enumeration {
val Red, Green, Blue = Value
}
abstract class CurrencyZone {
type Currency <: AbstractCurrency
def make(x: Long): Currency
abstract class AbstractCurrency {
val amount: Long
def designation: String
def + (that: Currency): Currency =
make(this.amount + that.amount)
def * (x: Double): Currency =
make((this.amount * x).toLong)
def - (that: Currency): Currency =
make(this.amount - that.amount)
def / (that: Double) =
make((this.amount / that).toLong)
def / (that: Currency) =
this.amount.toDouble / that.amount
def from(other: CurrencyZone#AbstractCurrency): Currency =
make(math.round(
other.amount.toDouble * Converter.exchangeRate
(other.designation)(this.designation)))
private def decimals(n: Long): Int =
if (n == 1) 0 else 1 + decimals(n / 10)
override def toString =
((amount.toDouble / CurrencyUnit.amount.toDouble)
formatted ("%."+ decimals(CurrencyUnit.amount) +"f")
+" "+ designation)
}
val CurrencyUnit: Currency
}
object Converter {
var exchangeRate = Map(
"USD" -> Map("USD" -> 1.0 , "EUR" -> 0.7596,
"JPY" -> 1.211 , "CHF" -> 1.223),
"EUR" -> Map("USD" -> 1.316 , "EUR" -> 1.0 ,
"JPY" -> 1.594 , "CHF" -> 1.623),
"JPY" -> Map("USD" -> 0.8257, "EUR" -> 0.6272,
"JPY" -> 1.0 , "CHF" -> 1.018),
"CHF" -> Map("USD" -> 0.8108, "EUR" -> 0.6160,
"JPY" -> 0.982 , "CHF" -> 1.0 )
)
}
}
class Outer {
class Inner
}
}
|
zzragida/ScalaExamples
|
ProgrammingInScala/src/io/sweetheart/examples/pis/abstractMembers/Misc.scala
|
Scala
|
apache-2.0
| 4,089
|
package org.tuubes.core.entities
import org.tuubes.core.{Type, TypeRegistry}
/**
* A type of entity.
*
* @author TheElectronWill
*/
abstract class EntityType(n: String) extends Type[EntityType](n, EntityType) {}
/**
* Companion object and registry of entity types.
*/
object EntityType extends TypeRegistry[EntityType] {}
|
mcphoton/Photon-Server
|
core/src/main/scala/org/tuubes/core/entities/EntityType.scala
|
Scala
|
lgpl-3.0
| 331
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.schema
import org.apache.flink.annotation.Internal
import org.apache.flink.table.plan.stats.FlinkStatistic
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeFactory}
/**
* An intermediate Table to wrap a optimized RelNode inside. The input data of this Table is
* generated by the underlying optimized RelNode.
*
* @param relNode underlying optimized RelNode
* @param isAccRetract true if input data of table contain retraction messages.
* @param statistic statistics of current Table
*/
@Internal
class IntermediateRelTable(
val relNode: RelNode,
val isAccRetract: Boolean,
val statistic: FlinkStatistic = FlinkStatistic.UNKNOWN)
extends FlinkTable {
def this(relNode: RelNode) {
this(relNode, false)
}
override def getRowType(typeFactory: RelDataTypeFactory): RelDataType = relNode.getRowType
/**
* Creates a copy of this table, changing statistic.
*
* @param statistic A new FlinkStatistic.
* @return Copy of this table, substituting statistic.
*/
override def copy(statistic: FlinkStatistic): FlinkTable =
new IntermediateRelTable(relNode, isAccRetract, statistic)
/**
* Returns statistics of current table
*
* @return statistics of current table
*/
override def getStatistic: FlinkStatistic = statistic
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/schema/IntermediateRelTable.scala
|
Scala
|
apache-2.0
| 2,208
|
import language.experimental.fewerBraces
val xs = List(1, 2, 3)
val ys = xs.map x =>
x + 1
val x = ys.foldLeft(0) (x, y) =>
x + y
val y = ys.foldLeft(0) (x: Int, y: Int) =>
val z = x + y
z * z
val as: Int = xs
.map x =>
x * x
.filter y =>
y > 0
(0)
|
dotty-staging/dotty
|
tests/pos/closure-args.scala
|
Scala
|
apache-2.0
| 272
|
package com.github.mrpowers.spark.daria.sql
import org.apache.spark.sql.DataFrame
/**
* spark-daria can be used as a lightweight framework for running ETL analyses in Spark.
*
* You can define `EtlDefinitions`, group them in a collection, and run the etls via jobs.
*
* '''Components of an ETL'''
*
* An ETL starts with a DataFrame, runs a series of transformations (filter, custom transformations, repartition), and writes out data.
*
* The `EtlDefinition` class is generic and can be molded to suit all ETL situations. For example, it can read a CSV file from S3, run transformations, and write out Parquet files on your local filesystem.
*/
case class EtlDefinition(
sourceDF: DataFrame,
transform: (DataFrame => DataFrame),
write: (DataFrame => Unit),
metadata: scala.collection.mutable.Map[String, Any] = scala.collection.mutable.Map[String, Any]()
) {
/**
* Runs an ETL process
*
* {{{
* val sourceDF = spark.createDF(
* List(
* ("bob", 14),
* ("liz", 20)
* ), List(
* ("name", StringType, true),
* ("age", IntegerType, true)
* )
* )
*
* def someTransform()(df: DataFrame): DataFrame = {
* df.withColumn("cool", lit("dude"))
* }
*
* def someWriter()(df: DataFrame): Unit = {
* val path = new java.io.File("./tmp/example").getCanonicalPath
* df.repartition(1).write.csv(path)
* }
*
* val etlDefinition = new EtlDefinition(
* name = "example",
* sourceDF = sourceDF,
* transform = someTransform(),
* write = someWriter(),
* hidden = false
* )
*
* etlDefinition.process()
* }}}
*/
def process(): Unit = {
write(sourceDF.transform(transform))
}
}
|
MrPowers/spark-daria
|
src/main/scala/com/github/mrpowers/spark/daria/sql/EtlDefinition.scala
|
Scala
|
mit
| 1,716
|
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
import org.scalatest._
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Gen.choose
import org.scalacheck.Properties
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, PropSpec}
import java.util.Arrays
class QTreeLaws extends CheckProperties {
import BaseProperties._
implicit val qtSemigroup = new QTreeSemigroup[Long](4)
implicit val qtGen = Arbitrary {
for (v <- choose(0L, 10000L)) yield (QTree(v))
}
property("QTree is associative") {
isAssociative[QTree[Long]]
}
}
class QTreeTest extends WordSpec with Matchers {
def randomList(n: Long) =
(1L to n).map { i =>
math.random
}
def buildQTree(k: Int, list: Seq[Double]) = {
val qtSemigroup = new QTreeSemigroup[Double](k)
qtSemigroup.sumOption(list.map { QTree(_) }).get
}
def trueQuantile[T: Ordering](list: Seq[T], q: Double): T = {
val rank = math.floor(q * list.size).toInt
val sorted = list.toList.sorted
sorted(rank)
}
def trueRangeSum(list: Seq[Double], from: Double, to: Double) =
list.filter { _ >= from }.filter { _ < to }.sum
for (k <- Seq(3, 11, 51, 101)) {
s"QTree with elements (1 to $k)" should {
val trueMedian = (1 + k) / 2
s"have median $trueMedian" in {
implicit val sg = new QTreeSemigroup[Unit](6)
val list = (1 to k).map(_.toDouble)
val qtree = sg.sumOption(list.map(QTree.value(_))).get
val (lower, upper) = qtree.quantileBounds(0.5)
assert(lower <= trueMedian && trueMedian <= upper)
}
}
}
for (k <- (1 to 6))
("QTree with sizeHint 2^" + k) should {
"always contain the true quantile within its bounds" in {
val list = randomList(10000)
val qt = buildQTree(k, list)
val quantile = math.random
val (lower, upper) = qt.quantileBounds(quantile)
val truth = trueQuantile(list, quantile)
assert(truth >= lower)
assert(truth <= upper)
}
"return correct quantile bounds for two percentile extremes" in {
val list = randomList(10000)
val qt = buildQTree(k, list)
val (lower, _) = qt.quantileBounds(0.0)
val (_, upper) = qt.quantileBounds(1.0)
assert(lower == 0.0)
assert(upper == 1.0)
}
"always contain the true range sum within its bounds" in {
val list = randomList(10000)
val qt = buildQTree(k, list)
val from = math.random
val to = math.random
val (lower, upper) = qt.rangeSumBounds(from, to)
val truth = trueRangeSum(list, from, to)
assert(truth >= lower)
assert(truth <= upper)
}
"have size bounded by 2^(k+2)" in {
val list = randomList(10000)
val qt = buildQTree(k, list)
assert(qt.size <= (1 << (k + 2)))
}
}
for (quantile <- List(0, .05, .5, .777777777, .95))
("A QTreeAggregator with quantile set as " + quantile) should {
"work as an aggregator for doubles with a small stream" in {
val list = randomList(10000).map(i => math.round(i * 100).toDouble)
val agg = QTreeAggregator(quantile)(implicitly[Numeric[Double]])
val interval = agg(list)
val truth = trueQuantile(list, quantile)
assert(interval.contains(truth))
}
"work as an aggregator for longs with a small stream" in {
val list = randomList(10000).map(i => (i * 1000l).toLong)
val agg = QTreeAggregator(quantile)(implicitly[Numeric[Long]])
val interval = agg(list)
val truth = trueQuantile(list, quantile)
assert(interval.contains(truth))
}
}
}
|
nevillelyh/algebird
|
algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala
|
Scala
|
apache-2.0
| 4,257
|
/*
* Copyright 2016 sadikovi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sadikovi.spark.benchmark
import java.util.{HashMap => JHashMap}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col
/** Configuration option for cli */
private case class ConfOption(name: String)
/** Configuration map for cli */
private case class Conf() {
private val conf: JHashMap[ConfOption, String] = new JHashMap[ConfOption, String]()
def addOption(opt: ConfOption, value: String): Unit = conf.put(opt, value)
def get(opt: ConfOption): Option[String] = Option(conf.get(opt))
}
/**
* NetFlow benchmarks divided into several categories:
* - full scan without any predicate with field conversion and without
* (result is cached and counted)
* - predicate scan with predicate pushdown and without (result is aggregated by protocol)
* - aggregated scan with predicate pushdown trying to replicate report
* (result is cached and counted)
*/
object NetFlowReadBenchmark {
// Required options
private val ITERATIONS = ConfOption("--iterations")
private val FILES = ConfOption("--files")
private val VERSION = ConfOption("--version")
// Initialize Spark context
val sparkConf = new SparkConf().
setMaster("local[1]").
setAppName("spark-netflow-benchmark")
val spark = SparkSession.builder().config(sparkConf).getOrCreate()
def main(args: Array[String]): Unit = {
val conf = process(args.toList, Conf())
// Extract options
val iterations = conf.get(ITERATIONS).getOrElse(
sys.error("Number of iterations must be specified, e.g. '--iterations 3'")).toInt
val files = conf.get(FILES).getOrElse(
sys.error("Files / directory must be specified, e.g. '--files /tmp/files'"))
val version = conf.get(VERSION).getOrElse(
sys.error("NetFlow version must be specified, e.g. '--version 5'"))
// scalastyle:off
println(s"- Iterations: $iterations")
println(s"- Files: $files")
println(s"- Version: $version")
// scalastyle:on
// Defined benchmarks
fullScanBenchmark(iterations, version, files)
predicateScanBenchmark(iterations, version, files)
aggregatedScanBenchmark(iterations, version, files)
}
private def process(args: List[String], conf: Conf): Conf = args match {
case ITERATIONS.name :: value :: tail =>
conf.addOption(ITERATIONS, value)
process(tail, conf)
case FILES.name :: value :: tail =>
conf.addOption(FILES, value)
process(tail, conf)
case VERSION.name :: value :: tail =>
conf.addOption(VERSION, value)
process(tail, conf)
case other :: tail => process(tail, conf)
case Nil => conf
}
/** Test full read of files provided with or without `stringify` option */
def fullScanBenchmark(iters: Int, version: String, files: String): Unit = {
val sqlBenchmark = new Benchmark("NetFlow full scan", 10000, iters)
sqlBenchmark.addCase("Scan, stringify = F") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).option("stringify", "false").load(files)
df.foreach(_ => ())
}
sqlBenchmark.addCase("Scan, stringify = T") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).option("stringify", "true").load(files)
df.foreach(_ => ())
}
sqlBenchmark.run()
}
/** Predicate scan benchmark, test high and low selectivity */
def predicateScanBenchmark(iters: Int, version: String, files: String): Unit = {
val sqlBenchmark = new Benchmark("NetFlow predicate scan", 10000, iters)
sqlBenchmark.addCase("Predicate pushdown = F, high") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).option("predicate-pushdown", "false").load(files).
filter(col("srcport") =!= 10)
df.foreach(_ => ())
}
sqlBenchmark.addCase("Predicate pushdown = T, high") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).option("predicate-pushdown", "true").load(files).
filter(col("srcport") =!= 10)
df.foreach(_ => ())
}
sqlBenchmark.addCase("Predicate pushdown = F, low") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).option("predicate-pushdown", "false").load(files).
filter(col("srcip") === "127.0.0.1")
df.foreach(_ => ())
}
sqlBenchmark.addCase("Predicate pushdown = T, low") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).option("predicate-pushdown", "true").load(files).
filter(col("srcip") === "127.0.0.1")
df.foreach(_ => ())
}
sqlBenchmark.run()
}
/** Run simple aggregation based with filtering */
def aggregatedScanBenchmark(iters: Int, version: String, files: String): Unit = {
val sqlBenchmark = new Benchmark("NetFlow aggregated report", 10000, iters)
sqlBenchmark.addCase("Aggregated report") { iter =>
val df = spark.read.format("com.github.sadikovi.spark.netflow").
option("version", version).load(files).
filter(col("srcport") > 10).
select("srcip", "dstip", "srcport", "dstport", "packets", "octets")
val agg = df.groupBy(col("srcip"), col("dstip"), col("srcport"), col("dstport")).count()
agg.foreach(_ => ())
}
sqlBenchmark.run()
}
}
|
sadikovi/spark-netflow
|
src/main/scala/com/github/sadikovi/spark/benchmark/NetFlowReadBenchmark.scala
|
Scala
|
apache-2.0
| 6,090
|
/*
* ____ ____ _____ ____ ___ ____
* | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.performance
import com.precog.common.util.IOUtils
import org.specs2.mutable.Specification
import org.specs2.specification._
import org.specs2.execute._
import java.io.File
trait PerformanceSpec extends ExamplesFactory {
def perform(iterations: Int, time: Long)(test: => Any): Result = {
def batchTest(iterations: Int) = {
var cnt = 0
while(cnt < iterations) {
test
cnt += 1
}
}
performBatch(iterations, time)(batchTest _)
}
def performBatch(iterations: Int, time: Long)(batchTest: Int => Any): Result = {
test("warmup", batchTest(iterations))
val testTime = test("measure", batchTest(iterations))
val millis = testTime / 1000000
if(millis <= time) {
new Success("Nailed it! %.02f%% of %d".format( millis * 100.0 / time, time))
} else {
new Failure("Wiff! %.02f times goal of %d".format( millis.toDouble / time, time))
}
}
def test(msg: String, test: => Any): Long = {
val start = System.nanoTime
test
System.nanoTime - start
}
def newTempDir(): File = IOUtils.createTmpDir("preformance_test").unsafePerformIO
def cleanupTempDir(dir: File) = IOUtils.recursiveDelete(dir).unsafePerformIO
}
|
precog/platform
|
performance/src/test/scala/com/precog/performance/PerformanceSpec.scala
|
Scala
|
agpl-3.0
| 2,302
|
package scalaz.stream
import scalaz.{Equal, Nondeterminism}
import scalaz.syntax.equal._
import scalaz.std.anyVal._
import scalaz.std.list._
import scalaz.std.list.listSyntax._
import org.scalacheck._
import Prop._
import scalaz.concurrent.Task
object AsyncSpec extends Properties("async") {
property("queue") = forAll { l: List[Int] =>
val (q, s) = async.queue[Int]
val t1 = Task {
l.foreach(i => q.enqueue(i))
q.close
}
val t2 = s.runLog
Nondeterminism[Task].both(t1, t2).run._2.toList == l
}
property("ref") = forAll { l: List[Int] =>
val v = async.ref[Int]
val s = v.signal.continuous
val t1 = Task {
l.foreach { i => v.set(i); Thread.sleep(1) }
v.close
}
val t2 = s.takeWhile(_ % 23 != 0).runLog
Nondeterminism[Task].both(t1, t2).run._2.toList.forall(_ % 23 != 0)
}
}
|
bmjames/scalaz-stream
|
src/test/scala/scalaz/stream/AsyncSpec.scala
|
Scala
|
mit
| 862
|
package models
/**
* Author: matthijs
* Created on: 29 Dec 2013.
*/
import java.security.MessageDigest
import play.api.db.DB
import play.api.Play.current
import anorm._
import anorm.SqlParser._
import models.util.AnormExtension._
import anorm.~
import play.Logger
case class Account(id: Long, firstname: String, lastname: String, fullname: Option[String], email: String, fever_api_key: String, password: String)
//creationdate
//updateddate
//lastlogindate
object Account {
val account = {
get[Long]("id") ~
get[String]("firstname") ~
get[String]("lastname") ~
get[Option[String]]("fullname") ~
get[String]("email") ~
get[String]("fever_api_key") ~
get[String]("password") map {
case id ~ firstname ~ lastname ~ fullname ~ email ~ fever_api_key ~ password =>
Account(id, firstname, lastname, fullname, email, fever_api_key, password)
}
}
val digest = MessageDigest.getInstance("MD5")
def create(firstname: String, lastname: String, fullname: Option[String], email: String, password: String)
: Account = DB.withConnection {
implicit c =>
//val hashedPassword = hashMD5(password)
val feverApiKey = createFeverApiKey(email, password)
SQL( """
insert into account (firstname, lastname, fullname, email, password, fever_api_key, creationdate)
values({firstname}, {lastname}, {fullname},{email},{password},{fever_api_key}, current_timestamp)
""".stripMargin).on(
'firstname -> firstname,
'lastname -> lastname,
'fullname -> fullname,
'email -> email,
'password -> password,
'fever_api_key -> feverApiKey
).executeUpdate()
this.findByEmail(email).getOrElse(null)
}
def updateLogin(email: String) = DB.withConnection {
implicit c =>
SQL(
"""
update account set lastlogindate = CURRENT_TIMESTAMP
where email={email}
""".stripMargin
).on(
'email -> email
).executeUpdate()
}
def findByEmail(email: String): Option[Account] = DB.withConnection {
implicit c =>
try {
Some(
SQL(
"""
select id, firstname, lastname, fullname, email, fever_api_key, password
from account
where email={email}
""".stripMargin
).on(
'email -> email
).using(account).single()
)
}
catch {
case e: RuntimeException => None
}
}
def findByFeverApiKey(key: String): Option[Account] = DB.withConnection {
implicit c =>
try {
Some(
SQL(
"""
select id, firstname, lastname, fullname, email, fever_api_key, password
from account
where fever_api_key={key}
""".stripMargin
).on(
'key -> key
).using(account).single()
)
}
catch {
case e: RuntimeException => None
}
}
def setFeverApiKey(email: String, password: String) = DB.withConnection {
implicit c =>
Logger.info("setting fever api key with password[" + password + "]: result[" + createFeverApiKey(email, password) + "]")
SQL(
"""
update account set fever_api_key ={fever_api_key}
where email={email}
""".stripMargin
).on(
'email -> email,
'fever_api_key -> createFeverApiKey(email, password)
).executeUpdate()
}
private def createFeverApiKey(email: String, password: String): String = {
hashMD5(email + ":" + password)
}
private def hashMD5(toHash: String): String = {
digest.digest(toHash.getBytes).map("%02x".format(_)).mkString
}
}
|
plamola/FeedR-V1
|
app/models/Account.scala
|
Scala
|
gpl-2.0
| 3,755
|
/**
* CPNetSolver
* Copyright (C) 2013 Francesco Burato, Simone Carriero
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see [http://www.gnu.org/licenses/].
*
* File: GraphPanel.scala
* Package: guitest
* Autore: Francesco Burato
* Creazione: 03/lug/2013
*/
package gui
import java.awt.Color;
import java.awt.event.MouseEvent;
import java.util.Iterator;
import org.apache.commons.collections15.Transformer
import edu.uci.ics.jung.algorithms.layout._
import edu.uci.ics.jung.graph.{ Graph, DirectedSparseGraph }
import edu.uci.ics.jung.visualization.{ GraphZoomScrollPane, VisualizationViewer }
import edu.uci.ics.jung.visualization.control.{ DefaultModalGraphMouse, GraphMouseListener, ModalGraphMouse }
import edu.uci.ics.jung.visualization.decorators.{ EdgeShape, PickableEdgePaintTransformer, PickableVertexPaintTransformer, ToStringLabeller }
import edu.uci.ics.jung.visualization.picking.PickedState
import scala.collection.JavaConversions.asScalaIterator
import scala.swing.Panel
/**
* Pannello per il disegno dei grafi
* @author Francesco Burato
*
*/
class GraphPanel {
/**
* Il costruttore inizializza completamente gli oggetti del pannello
*/
private val graphMouse = new DefaultModalGraphMouse[Int, Int]
private val graph = new DirectedSparseGraph[String, String]
private val layout = new DAGLayout[String, String](graph)
private val viewer = new VisualizationViewer[String, String](layout)
initialize
val panel = new Panel{ peer add new GraphZoomScrollPane(viewer)}
// inizializzazione del pannello
private def initialize = {
graphMouse.setMode(ModalGraphMouse.Mode.PICKING)
viewer.setGraphMouse(graphMouse)
viewer.setBackground(Color.WHITE)
// imposto i labeler di archi e vertici
viewer.getRenderContext().setEdgeLabelTransformer(new Transformer[String, String] { def transform(s: String) = "" })
viewer.getRenderContext().setVertexLabelTransformer(new Transformer[String, String] { def transform(s: String) = s })
// imposto il look and feel di vertivi e archi
viewer.getRenderContext().setEdgeDrawPaintTransformer(new PickableEdgePaintTransformer[String](viewer.getPickedEdgeState(), Color.black, Color.cyan))
viewer.getRenderContext().setEdgeShapeTransformer(new EdgeShape.Line[String, String])
viewer.getRenderContext().setVertexFillPaintTransformer(new PickableVertexPaintTransformer[String](viewer.getPickedVertexState(), Color.red, Color.yellow));
viewer.setVertexToolTipTransformer(new ToStringLabeller[String]());
//TODO forse non serve
//viewer.setGraphMouse(graphMouse);
// imposto il listener per il mouse
viewer.addGraphMouseListener(new GraphMouseListener[String] {
override def graphClicked(a1: String, a2: MouseEvent) {}
override def graphPressed(a1: String, a2: MouseEvent) {}
override def graphReleased(a1: String, a2: MouseEvent) {}
})
}
//metodi dell'interfaccia pubblica
def cleanAll() {
clearGraph
layout.reset
viewer.revalidate
viewer.repaint()
}
def paintGraph(g: Graph[String, String]) {
clearGraph
//ottengo tutti i vertici
g.getVertices.iterator foreach {vertex => graph.addVertex(vertex) }
// inserisco tutti gli archi
for (vertex <- g.getVertices.iterator) {
g.getOutEdges(vertex).iterator foreach { edge => graph.addEdge(edge, g.getEndpoints(edge))}
}
// Refresh
layout.reset();
viewer.revalidate();
viewer.repaint();
}
private def clearGraph() =
graph.getVertices.toArray foreach { vertex => graph.removeVertex(vertex.asInstanceOf[String]) }
}
|
fburato/CPNetSolver
|
CPNetSolver/src/gui/GraphPanel.scala
|
Scala
|
gpl-3.0
| 4,169
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.models.lenet
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.dataset.image.{BytesToGreyImg, GreyImgNormalizer, GreyImgToBatch}
import com.intel.analytics.bigdl.nn.{ClassNLLCriterion, Module}
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, T, Table}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
object Train {
LoggerFilter.redirectSparkInfoLogs()
import Utils._
def main(args: Array[String]): Unit = {
trainParser.parse(args, new TrainParams()).map(param => {
val conf = Engine.createSparkConf()
.setAppName("Train Lenet on MNIST")
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
Engine.init
val trainData = param.folder + "/train-images-idx3-ubyte"
val trainLabel = param.folder + "/train-labels-idx1-ubyte"
val validationData = param.folder + "/t10k-images-idx3-ubyte"
val validationLabel = param.folder + "/t10k-labels-idx1-ubyte"
val model = if (param.modelSnapshot.isDefined) {
Module.load[Float](param.modelSnapshot.get)
} else {
if (param.graphModel) LeNet5.graph(classNum = 10) else LeNet5(classNum = 10)
}
val optimMethod = if (param.stateSnapshot.isDefined) {
OptimMethod.load[Float](param.stateSnapshot.get)
} else {
new SGD[Float](learningRate = param.learningRate,
learningRateDecay = param.learningRateDecay)
}
val trainSet = DataSet.array(load(trainData, trainLabel), sc) ->
BytesToGreyImg(28, 28) -> GreyImgNormalizer(trainMean, trainStd) -> GreyImgToBatch(
param.batchSize)
val optimizer = Optimizer(
model = model,
dataset = trainSet,
criterion = ClassNLLCriterion[Float]())
if (param.checkpoint.isDefined) {
optimizer.setCheckpoint(param.checkpoint.get, Trigger.everyEpoch)
}
if(param.overWriteCheckpoint) {
optimizer.overWriteCheckpoint()
}
val validationSet = DataSet.array(load(validationData, validationLabel), sc) ->
BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToBatch(
param.batchSize)
optimizer
.setValidation(
trigger = Trigger.everyEpoch,
dataset = validationSet,
vMethods = Array(new Top1Accuracy, new Top5Accuracy[Float], new Loss[Float]))
.setOptimMethod(optimMethod)
.setEndWhen(Trigger.maxEpoch(param.maxEpoch))
.optimize()
sc.stop()
})
}
}
|
jenniew/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/models/lenet/Train.scala
|
Scala
|
apache-2.0
| 3,322
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.religions.UndyingCourt
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, RequiresAllOfFeat}
/**
* Created by adarr on 4/7/2017.
*/
trait ChildOfTheUndyingCourt
extends FeatRequisiteImpl with EberronReligionNonWarforged with ChildLevelBase
with RequiresAllOfFeat with UndyingCourt with TheUndyingCourtFeatBase { self: DeityFeat =>
override def allOfFeats: Seq[Feat] =
List(DeityFeat.FollowerOfTheUndyingCourt)
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/ChildOfTheUndyingCourt.scala
|
Scala
|
apache-2.0
| 1,187
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package swave.core.impl.util
import scala.annotation.tailrec
import org.jctools.util.UnsafeAccess.UNSAFE
import swave.core.impl.util.UnsafeArrayAccess.calcRefArrayElementOffset
import swave.core.macros._
import swave.core.util._
/**
* A mutable RingBuffer with a fixed capacity.
* The `cap` must be a positive power of two.
*/
private[swave] final class RingBuffer[T](cap: Int) {
requireArg(isPowerOf2(cap) && cap > 0) // automatically implies cap <= 0x40000000
private[this] val array = new Array[AnyRef](cap)
private[this] def mask = array.length - 1 // bit mask for converting a cursor into an array index
/*
* two counters counting the number of elements ever written and read; wrap-around is
* handled by always looking at differences or masked values
*/
private[this] var writeIx = 0
private[this] var readIx = 0
/**
* The maximum number of elements the buffer can hold.
*/
def capacity: Int = array.length
/**
* The number of elements currently in the buffer.
*/
def count: Int = writeIx - readIx
/**
* The number of elements the buffer can still take in.
*/
def available: Int = capacity - count
/**
* True if the next write will succeed.
*/
def canWrite: Boolean = capacity > count
/**
* True if no elements are currently in the buffer.
*/
def isEmpty: Boolean = writeIx == readIx
/**
* True if at least one elements is currently in the buffer.
*/
def nonEmpty: Boolean = writeIx != readIx
/**
* Tries to write the given value into the buffer.
* Returns `true` if the write was successful and false if the buffer is full.
*/
def write(value: T): Boolean =
canWrite && {
unsafeWrite(value)
true
}
/**
* Writes the given value into the buffer without any buffer overflow protection.
*/
def unsafeWrite(value: T): Unit = {
val w = writeIx
UNSAFE.putObject(array, calcRefArrayElementOffset((w & mask).toLong), value)
writeIx = w + 1
}
/**
* Reads the next value from the buffer.
* Throws a NoSuchElementException if the buffer is empty.
*/
def read(): T =
if (nonEmpty) unsafeRead()
else throw new NoSuchElementException
/**
* Reads the next value from the buffer without any buffer underrun protection.
*/
def unsafeRead(): T = {
val r = readIx
readIx = r + 1
val ix = calcRefArrayElementOffset((r & mask).toLong)
val res = UNSAFE.getObject(array, ix).asInstanceOf[T]
UNSAFE.putObject(array, ix, null)
res
}
/**
* Drops the element that would otherwise be read next.
* CAUTION: Must not be used if buffer is empty! This precondition is not verified!
*/
def unsafeDropHead(): Unit = {
val r = readIx
UNSAFE.putObject(array, calcRefArrayElementOffset((r & mask).toLong), null)
readIx = r + 1
}
/**
* Drops the element that was written last.
* CAUTION: Must not be used if buffer is empty! This precondition is not verified!
*/
def unsafeDropTail(): Unit = {
val w = writeIx - 1
UNSAFE.putObject(array, calcRefArrayElementOffset((w & mask).toLong), null)
writeIx = w
}
/**
* Resets the buffer to "is empty" status and nulls out all references.
*/
def clear(): Unit = {
readIx = 0
writeIx = 0
java.util.Arrays.fill(array, null)
}
/**
* Adds a traversable of elements to the buffer
*/
def ++=(elems: Traversable[T]): Boolean = elems.forall(write)
/**
* Iterates (in FIFO order) over all elements currently in the buffer
* changing neither the read- nor the write cursor.
*/
def foreach[U](f: T => U): Unit = {
@tailrec def rec(i: Int): Unit =
if (i < writeIx) {
val ix = calcRefArrayElementOffset((i & mask).toLong)
f(UNSAFE.getObject(array, ix).asInstanceOf[T])
rec(i + 1)
}
rec(readIx)
}
override def toString: String = s"RingBuffer(len=${array.length}, size=$count, writeIx=$writeIx, readIx=$readIx)"
}
|
sirthias/swave
|
core/src/main/scala/swave/core/impl/util/RingBuffer.scala
|
Scala
|
mpl-2.0
| 4,225
|
package com.atanana.checkers
import java.time.{LocalDateTime, ZoneId}
import com.atanana.data.{Requisition, RequisitionData, RequisitionsCheckResult}
class RequisitionsChecker {
def check(oldRequisitions: Set[Requisition], newRequisitions: Set[RequisitionData]): RequisitionsCheckResult = {
RequisitionsCheckResult(
getNewRequisitions(oldRequisitions, newRequisitions),
getCancelledRequisitions(getNotFinishedRequisitions(oldRequisitions), newRequisitions)
)
}
private def getNotFinishedRequisitions(oldRequisitions: Set[Requisition]) = {
val finishTime = LocalDateTime.now(ZoneId.of("Europe/Moscow")).plusHours(1)
oldRequisitions.filter(requisition => requisition.dateTime.isAfter(finishTime))
}
//noinspection FilterOtherContains
def getNewRequisitions(oldRequisitions: Set[Requisition], newRequisitions: Set[RequisitionData]): Set[RequisitionData] = {
newRequisitions.filter(newRequisition => !oldRequisitions.contains(newRequisition))
}
def getCancelledRequisitions(oldRequisitions: Set[Requisition], newRequisitions: Set[Requisition]): Set[Requisition] = {
oldRequisitions -- newRequisitions
}
}
object RequisitionsChecker {
def apply(): RequisitionsChecker = new RequisitionsChecker()
}
|
atanana/rating-bot
|
src/main/scala/com/atanana/checkers/RequisitionsChecker.scala
|
Scala
|
mit
| 1,255
|
//
// Scaled - a scalable editor extensible via JVM languages
// http://github.com/scaled/scaled/blob/master/LICENSE
package scaled.code
import org.junit.Assert._
import org.junit._
import scaled._
class CommenterTest {
import CodeConfig._
def toLine (syntax :Syntax)(tup :(String,Int)) = {
val (tx, ci) = tup
Line.builder(tx).withSyntax(syntax, ci, tx.length).build()
}
val lineText = Seq(("//", 0),
("// this is a line comment", 0),
("// with a couple of lines and a prefix", 0),
("foo(); // and this one has code in front of it", 7))
@Test def testCommentStart () {
val buf = Buffer("lines", lineText map toLine(Syntax.LineComment))
val cm = new Commenter() {
override def linePrefix = "//"
override def docPrefix = "*"
}
assertEquals(2, cm.commentStart(buf.line(0)))
assertEquals(3, cm.commentStart(buf.line(1)))
assertEquals(3, cm.commentStart(buf.line(2)))
assertEquals(10, cm.commentStart(buf.line(3)))
}
val fillText = Seq(("// this is a line comment", 0),
("// with a few lines that should be long enough", 0),
("// to necessitate multiple lines for a refill", 0))
@Test def testRefill () {
val buf = Buffer("lines", fillText map toLine(Syntax.LineComment))
val cm = new Commenter() {
override def linePrefix = "//"
override def docPrefix = "*"
}
val filled = cm.refilled(buf, 80, Loc(0, 0), Loc(2, buf.line(2).length))
assertEquals(2, filled.length)
assertEquals(72, filled(0).length)
assertEquals(42, filled(1).length)
}
val extraSpaces = Seq(("// this is a line comment", 0),
("// with a few lines that should be long enough", 0),
("// to necessitate multiple lines for a refill", 0))
@Test def testRefillMatchesWhitespace () {
val buf = Buffer("lines", extraSpaces map toLine(Syntax.LineComment))
val cm = new Commenter() {
override def linePrefix = "//"
override def docPrefix = "*"
}
val filled = cm.refilled(buf, 80, Loc(0, 0), Loc(2, buf.line(2).length))
assertEquals(2, filled.length)
assertEquals(73, filled(0).length)
assertTrue(filled(0).toString startsWith "// ")
assertEquals(43, filled(1).length)
assertTrue(filled(1).toString startsWith "// ")
}
}
|
swhgoon/scaled
|
api/src/test/scala/scaled/code/CommenterTest.scala
|
Scala
|
bsd-3-clause
| 2,401
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.tradingpremises
import cats.data.Validated.{Invalid, Valid}
import jto.validation.forms.UrlFormEncoded
import jto.validation.{Rule, ValidationError, _}
import models.DateOfChange
import models.businessmatching.{ChequeCashingNotScrapMetal => BMChequeCashingNotScrapMetal, ChequeCashingScrapMetal => BMChequeCashingScrapMetal, CurrencyExchange => BMCurrencyExchange, ForeignExchange => BMForeignExchange, TransmittingMoney => BMTransmittingMoney}
import play.api.i18n.Messages
import play.api.libs.json.{Reads, Writes, _}
import utils.TraversableValidators
sealed trait TradingPremisesMsbService{
val message = "msb.services.list.lbl."
def getMessage(implicit messages: Messages): String =
this match {
case TransmittingMoney => messages(s"${message}01")
case CurrencyExchange => messages(s"${message}02")
case ChequeCashingNotScrapMetal => messages(s"${message}03")
case ChequeCashingScrapMetal => messages(s"${message}04")
case ForeignExchange => messages(s"${message}05")
}
}
case object TransmittingMoney extends TradingPremisesMsbService
case object CurrencyExchange extends TradingPremisesMsbService
case object ChequeCashingNotScrapMetal extends TradingPremisesMsbService
case object ChequeCashingScrapMetal extends TradingPremisesMsbService
case object ForeignExchange extends TradingPremisesMsbService
case class TradingPremisesMsbServices(services : Set[TradingPremisesMsbService])
object TradingPremisesMsbService {
implicit val serviceR = Rule[String, TradingPremisesMsbService] {
case "01" => Valid(TransmittingMoney)
case "02" => Valid(CurrencyExchange)
case "03" => Valid(ChequeCashingNotScrapMetal)
case "04" => Valid(ChequeCashingScrapMetal)
case "05" => Valid(ForeignExchange)
case _ => Invalid(Seq(Path -> Seq(ValidationError("error.invalid"))))
}
implicit val serviceW = Write[TradingPremisesMsbService, String] {
case TransmittingMoney => "01"
case CurrencyExchange => "02"
case ChequeCashingNotScrapMetal => "03"
case ChequeCashingScrapMetal => "04"
case ForeignExchange => "05"
}
implicit val jsonR: Rule[JsValue, TradingPremisesMsbService] = {
import jto.validation.playjson.Rules._
stringR andThen serviceR
}
implicit val jsonW: Write[TradingPremisesMsbService, JsValue] = {
import jto.validation.playjson.Writes._
serviceW andThen string
}
def applyWithoutDateOfChange(services: Set[TradingPremisesMsbService]) = TradingPremisesMsbServices(services)
def unapplyWithoutDateOfChange(s: TradingPremisesMsbServices) = Some(s.services)
}
sealed trait MsbServices0 {
private implicit def rule[A]
(implicit
p: Path => RuleLike[A, Set[TradingPremisesMsbService]]
): Rule[A, TradingPremisesMsbServices] =
From[A] { __ =>
import utils.MappingUtils.Implicits.RichRule
val required =
TraversableValidators.minLengthR[Set[TradingPremisesMsbService]](1) withMessage "error.required.tp.services"
(__ \\ "msbServices").read(required) map TradingPremisesMsbService.applyWithoutDateOfChange
}
private implicit def write[A]
(implicit
p: Path => WriteLike[Set[TradingPremisesMsbService], A]
): Write[TradingPremisesMsbServices, A] =
To[A] { __ =>
import play.api.libs.functional.syntax.unlift
(__ \\ "msbServices").write[Set[TradingPremisesMsbService]] contramap unlift(TradingPremisesMsbService.unapplyWithoutDateOfChange)
}
val formR: Rule[UrlFormEncoded, TradingPremisesMsbServices] = {
import jto.validation.forms.Rules._
implicitly[Rule[UrlFormEncoded, TradingPremisesMsbServices]]
}
val formW: Write[TradingPremisesMsbServices, UrlFormEncoded] = {
import jto.validation.forms.Writes._
import utils.MappingUtils.writeM
implicitly[Write[TradingPremisesMsbServices, UrlFormEncoded]]
}
}
object TradingPremisesMsbServices {
private object Cache extends MsbServices0
def addDateOfChange(doc: Option[DateOfChange], obj: JsObject) =
doc.fold(obj) { dateOfChange => obj + ("dateOfChange" -> DateOfChange.writes.writes(dateOfChange))}
implicit val jsonWrites = new Writes[TradingPremisesMsbServices] {
def writes(s: TradingPremisesMsbServices): JsValue = {
val values = s.services map { x => JsString(TradingPremisesMsbService.serviceW.writes(x)) }
Json.obj(
"msbServices" -> values
)
}
}
implicit val msbServiceReader: Reads[Set[TradingPremisesMsbService]] = {
__.read[JsArray].map(a => a.value.map(TradingPremisesMsbService.jsonR.validate(_).toOption.get).toSet)
}
implicit val jReads: Reads[TradingPremisesMsbServices] = {
(__ \\ "msbServices").read[Set[TradingPremisesMsbService]].map(TradingPremisesMsbServices.apply _)
}
implicit val formR: Rule[UrlFormEncoded, TradingPremisesMsbServices] = Cache.formR
implicit val formW: Write[TradingPremisesMsbServices, UrlFormEncoded] = Cache.formW
implicit def convertServices(msbService: Set[models.businessmatching.BusinessMatchingMsbService]): Set[TradingPremisesMsbService] =
msbService map {s => convertSingleService(s)}
implicit def convertSingleService(msbService: models.businessmatching.BusinessMatchingMsbService): models.tradingpremises.TradingPremisesMsbService = {
msbService match {
case BMTransmittingMoney => TransmittingMoney
case BMCurrencyExchange => CurrencyExchange
case BMChequeCashingNotScrapMetal => ChequeCashingNotScrapMetal
case BMChequeCashingScrapMetal => ChequeCashingScrapMetal
case BMForeignExchange => ForeignExchange
}
}
}
|
hmrc/amls-frontend
|
app/models/tradingpremises/TradingPremisesMsbServices.scala
|
Scala
|
apache-2.0
| 6,190
|
/*
* FirmsTest.scala
* Firms examples tests.
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Jan 1, 2009
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.test.example
import org.scalatest.Matchers
import org.scalatest.WordSpec
import com.cra.figaro.algorithm._
import com.cra.figaro.algorithm.sampling._
import com.cra.figaro.algorithm.factored._
import com.cra.figaro.language._
import com.cra.figaro.language.Universe._
import com.cra.figaro.library.compound._
import com.cra.figaro.library.atomic._
import com.cra.figaro.test._
import com.cra.figaro.test.tags.Example
class FirmsTest extends WordSpec with Matchers {
"The firms example" should {
"produce the correct answer under importance sampling" taggedAs (Example) in {
test((e: Element[Boolean]) => Importance(20000, e))
}
"produce the correct answer under Metropolis-Hastings" taggedAs (Example) in {
test((e: Element[Boolean]) => MetropolisHastings(3000000, ProposalScheme.default, 30000, e))
}
"produce the correct answer under variable elimination" taggedAs (Example) in {
test((e: Element[Boolean]) => VariableElimination(e))
}
}
private class Firm(name: String) {
val efficient = Flip(0.3)(name + "efficient", universe)
val bidWhenEfficient = continuous.Uniform(5.0, 15.0)(name + "bidWhenEfficient", universe)
val bidWhenInefficient = continuous.Uniform(10.0, 20.0)(name + "bidWhenInefficient", universe)
val bid = If(efficient, bidWhenEfficient, bidWhenInefficient)("bid", universe)
}
def test(algorithmCreator: Element[Boolean] => ProbQueryAlgorithm) {
Universe.createNew()
val firm1 = new Firm("Firm1")
val firm2 = new Firm("Firm2")
val firms = Array(firm1, firm2)
val winner = discrete.Uniform(firms: _*)("winner", universe)
val winningBid = CachingChain(winner, (f: Firm) => f.bid)("winningBid", universe)
winningBid.setConstraint((d: Double) => 20 - d)
val winningEfficiency = CachingChain(winner, (f: Firm) => f.efficient)("winningEfficiency", universe)
// Expected constraint for efficient firm = 0.1 \int_5^15 (20 - x) dx
// = 0.1 [20x - 0.5 x^2]_5^15 = 0.1 (300 - 112.5 - 100 + 12.5) = 10
// Expected constraint for inefficient firm = 0.1 \int_10^20 (20 - x) dx
// = 0.1 [20x - 0.5 x^2]_10^20 = 0.1 (400 - 200 - 200 + 50) = 5
// Code: pyz = probability 0's efficiency is y, 1's efficiency is z
// Assume wlog 0 is the winner
val pff = 0.7 * 0.7 * 5
val pft = 0.7 * 0.3 * 5
val ptf = 0.3 * 0.7 * 10
val ptt = 0.3 * 0.3 * 10
val answer = (ptf + ptt) / (pff + pft + ptf + ptt)
val alg = MetropolisHastings(200000, ProposalScheme.default, winningEfficiency)
val bid1WhenEfficient: Element[Double] = universe.get("Firm1bidWhenEfficient")
alg.start()
alg.probability(winningEfficiency, true) should be(answer +- 0.01)
alg.kill
}
}
|
agarbuno/figaro
|
Figaro/src/test/scala/com/cra/figaro/test/example/FirmsTest.scala
|
Scala
|
bsd-3-clause
| 3,100
|
package util.testkit
import engine.api.MapReduce
/**
* Used for testing the MapReduce Interface Unit
*/
class WordCountMapReduceTest extends MapReduce[None.type, String, String, Int, String, Int, (String, Int)] {
override def map(k: None.type, v: String): Iterable[(String, Int)] = {
v.split(" ").map((_, 1))
}
override def reduce(key: String, list: Iterable[Int]): (String, Int) = {
(key, list.fold(0)(_ + _))
}
}
|
filipegmiranda/typed-mapreduce
|
src/test/scala/util/testkit/WordCountMapReduceTest.scala
|
Scala
|
apache-2.0
| 438
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
/**
* Thrown when a request is made for partition on a broker that is NOT a leader for that partition
*/
class NotLeaderForPartitionException(message: String) extends RuntimeException(message) {
def this() = this(null)
}
|
unix1986/universe
|
tool/kafka-0.8.1.1-src/core/src/main/scala/kafka/common/NotLeaderForPartitionException.scala
|
Scala
|
bsd-2-clause
| 1,049
|
package net.sansa_stack.owl.flink.dataset
import de.javakaffee.kryoserializers.UnmodifiableCollectionsSerializer
import org.apache.flink.api.scala.ExecutionEnvironment
import org.scalatest.FunSuite
import org.semanticweb.owlapi.model.{OWLAsymmetricObjectPropertyAxiom, OWLDataPropertyAssertionAxiom, OWLDisjointObjectPropertiesAxiom, OWLEquivalentObjectPropertiesAxiom, OWLFunctionalObjectPropertyAxiom, OWLInverseFunctionalObjectPropertyAxiom, OWLInverseObjectPropertiesAxiom, OWLIrreflexiveObjectPropertyAxiom, OWLNegativeDataPropertyAssertionAxiom, OWLNegativeObjectPropertyAssertionAxiom, OWLObjectPropertyAssertionAxiom, OWLObjectPropertyDomainAxiom, OWLObjectPropertyRangeAxiom, OWLReflexiveObjectPropertyAxiom, OWLSubObjectPropertyOfAxiom, OWLSubPropertyChainOfAxiom, OWLSymmetricObjectPropertyAxiom, OWLTransitiveObjectPropertyAxiom, SWRLRule, _}
class FunctionalSyntaxOWLAxiomsDataSetBuilderTest extends FunSuite {
import net.sansa_stack.owl.flink.owl._
val env = ExecutionEnvironment.getExecutionEnvironment
// scalastyle:off classforname
env.getConfig.addDefaultKryoSerializer(
Class.forName("java.util.Collections$UnmodifiableCollection"),
classOf[UnmodifiableCollectionsSerializer])
// scalastyle:on classforname
var _dataSet: OWLAxiomsDataSet = null
val syntax = Syntax.FUNCTIONAL
def dataSet: OWLAxiomsDataSet = {
if (_dataSet == null) {
_dataSet = env.owl(syntax)(this.getClass.getClassLoader.getResource("ont_functional.owl").getPath)
}
_dataSet
}
test("The number of axioms should match") {
val expectedNumberOfAxioms = 67 // = 71 - commented out Import(...) - 3 x null
assert(dataSet.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLAnnotationAssertionAxiom objects should be correct") {
// --> AnnotationAssertion(bar:label bar:Cls1 "Class 1")
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLAnnotationAssertionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLAnnotationPropertyDomainAxiom objects should be correct") {
// --> AnnotationPropertyDomain(bar:annProp1 bar:Cls1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLAnnotationPropertyDomainAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLAnnotationPropertyRangeAxiom objects should be correct") {
// --> AnnotationPropertyRange(bar:annProp1 bar:Cls2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLAnnotationPropertyRangeAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSubAnnotationPropertyOfAxiom objects should be correct") {
// --> SubAnnotationPropertyOf(bar:annProp1 bar:annProp2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSubAnnotationPropertyOfAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDeclarationAxiom objects should be correct") {
// --> Declaration(Annotation(foo:ann "some annotation") Class(bar:Cls1))
// --> Declaration(Class(bar:Cls2))
// --> Declaration(Datatype(bar:dtype1))
// --> Declaration(Datatype(bar:dtype2))
// --> Declaration(ObjectProperty(bar:objProp1))
// --> Declaration(ObjectProperty(bar:objProp2))
// --> Declaration(DataProperty(bar:dataProp1))
// --> Declaration(DataProperty(bar:dataProp2))
// --> Declaration(AnnotationProperty(bar:annProp1))
// --> Declaration(AnnotationProperty(bar:annProp2))
// --> Declaration(NamedIndividual(foo:indivA))
// --> Declaration(NamedIndividual(foo:indivB))
val expectedNumberOfAxioms = 12
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDeclarationAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDisjointUnionAxiom objects should be correct") {
// --> DisjointUnion(bar:Cl1OrNegate bar:Cls bar:ComplementCls)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDisjointUnionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDisjointClassesAxiom objects should be correct") {
// --> DisjointClasses(bar:DataMin3Prop1 bar:DataMax2Prop1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDisjointClassesAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLEquivalentClassesAxiom objects should be correct") {
// --> EquivalentClasses(bar:IntersectionCls ObjectIntersectionOf(bar:Cls1 bar:Cls2))
// --> EquivalentClasses(bar:UnionCls ObjectUnionOf(bar:Cls1 bar:Cls2))
// --> EquivalentClasses(bar:ComplementCls ObjectComplementOf(bar:Cls1))
// --> EquivalentClasses(bar:AllIndividualsCls ObjectOneOf(foo:indivA foo:indivB))
// --> EquivalentClasses(bar:SomeProp1Cls1 ObjectSomeValuesFrom(bar:objProp1 bar:Cls1))
// --> EquivalentClasses(bar:AllProp1Cls1 ObjectAllValuesFrom(bar:objProp1 bar:Cls1))
// --> EquivalentClasses(bar:HasValProp1IndivB ObjectHasValue(bar:objProp1 foo:indivB))
// --> EquivalentClasses(bar:HasSelfProp1 ObjectHasSelf(bar:objProp1))
// --> EquivalentClasses(bar:Min2Prop1Cls1 ObjectMinCardinality(2 bar:objProp1 bar:Cls1))
// --> EquivalentClasses(bar:Max3Prop1Cls1 ObjectMaxCardinality(3 bar:objProp1 bar:Cls1))
// --> EquivalentClasses(bar:Exact5Prop1Cls1 ObjectExactCardinality(5 bar:objProp1 bar:Cls1))
// --> EquivalentClasses(bar:DataSomeIntLT20 DataSomeValuesFrom(bar:dataProp2 DatatypeRestriction(xsd:integer xsd:maxExclusive "20"^^xsd:integer)))
// --> EquivalentClasses(bar:DataAllIntGT10 DataAllValuesFrom(bar:dataProp2 DatatypeRestriction(xsd:integer xsd:minInclusive "10"^^xsd:integer)))
// --> EquivalentClasses(bar:DataHasVal5 DataHasValue(bar:dataProp2 "5"^^xsd:integer))
// --> EquivalentClasses(bar:DataMin3Prop1 DataMinCardinality(3 bar:dataProp1))
// --> EquivalentClasses(bar:DataMax2Prop1 DataMaxCardinality(2 bar:dataProp1))
// --> EquivalentClasses(bar:DataExact5Prop1 DataExactCardinality(5 bar:dataProp1))
val expectedNumberOfAxioms = 17
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLEquivalentClassesAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSubClassOfAxiom objects should be correct") {
// --> SubClassOf(bar:Cls1 bar:UnionCls)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSubClassOfAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLFunctionalDataPropertyAxiom objects should be correct") {
// --> FunctionalDataProperty(bar:dataProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLFunctionalDataPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDataPropertyDomainAxiom objects should be correct") {
// --> DataPropertyDomain(bar:dataProp1 bar:Cls1)
// --> DataPropertyDomain(bar:dataProp2 bar:Cls1)
val expectedNumberOfAxioms = 2
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDataPropertyDomainAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDataPropertyRangeAxiom objects should be correct") {
// --> DataPropertyRange(bar:dataProp1 xsd:string)
// --> DataPropertyRange(bar:dataProp2 xsd:int)
val expectedNumberOfAxioms = 2
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDataPropertyRangeAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDisjointDataPropertiesAxiom objects should be correct") {
// --> DisjointDataProperties(bar:dataProp1 bar:dataProp2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDisjointDataPropertiesAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLEquivalentDataPropertiesAxiom objects should be correct") {
// --> EquivalentDataProperties(bar:sameAsDataProp1 bar:dataProp1)
val expectedNumberOfAxioms = 1
val filteredDatset = dataSet.filter(axiom => axiom.isInstanceOf[OWLEquivalentDataPropertiesAxiom])
assert(filteredDatset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSubDataPropertyOfAxiom objects should be correct") {
// --> SubDataPropertyOf(bar:subDataProp1 bar:dataProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSubDataPropertyOfAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDatatypeDefinitionAxiom objects should be correct") {
val expectedNumberOfAxioms = 0
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDatatypeDefinitionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLHasKeyAxiom objects should be correct") {
// --> HasKey(bar:Cls1 () (bar:dataProp1))
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLHasKeyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLClassAssertionAxiom objects should be correct") {
// --> ClassAssertion(bar:Cls1 foo:indivA)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLClassAssertionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDifferentIndividualsAxiom objects should be correct") {
// --> DifferentIndividuals(foo:indivA foo:indivB)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDifferentIndividualsAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSameIndividualAxiom objects should be correct") {
// --> SameIndividual(foo:sameAsIndivA foo:indivA)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSameIndividualAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLNegativeDataPropertyAssertionAxiom objects should be correct") {
// --> NegativeDataPropertyAssertion(bar:dataProp2 foo:indivA "23"^^xsd:integer)
val expectedNumberOfAxioms = 1
val filteredDatset = dataSet.filter(axiom => axiom.isInstanceOf[OWLNegativeDataPropertyAssertionAxiom])
assert(filteredDatset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLNegativeObjectPropertyAssertionAxiom objects should be correct") {
// --> NegativeObjectPropertyAssertion(bar:Prop2 foo:indivB foo:indivA)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLNegativeObjectPropertyAssertionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLObjectPropertyAssertionAxiom objects should be correct") {
// --> ObjectPropertyAssertion(bar:objProp1 foo:indivA foo:indivB)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLObjectPropertyAssertionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDisjointObjectPropertiesAxiom objects should be correct") {
// --> DisjointObjectProperties(bar:objProp1 bar:objProp2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDisjointObjectPropertiesAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLEquivalentObjectPropertiesAxiom objects should be correct") {
// --> EquivalentObjectProperties(bar:invObjProp1 ObjectInverseOf(bar:objProp1))
// --> EquivalentObjectProperties(bar:sameAsObjProp1 bar:objProp1)
val expectedNumberOfAxioms = 2
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLEquivalentObjectPropertiesAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLInverseObjectPropertiesAxiom objects should be correct") {
// --> InverseObjectProperties(bar:invObjProp1 bar:objProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLInverseObjectPropertiesAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLAsymmetricObjectPropertyAxiom objects should be correct") {
// --> AsymmetricObjectProperty(bar:asymmObjProp)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLAsymmetricObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLFunctionalObjectPropertyAxiom objects should be correct") {
// --> FunctionalObjectProperty(bar:objProp2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLFunctionalObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLInverseFunctionalObjectPropertyAxiom objects should be correct") {
// --> InverseFunctionalObjectProperty(bar:invObjProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLInverseFunctionalObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLIrreflexiveObjectPropertyAxiom objects should be correct") {
// --> IrreflexiveObjectProperty(bar:objProp2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLIrreflexiveObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLReflexiveObjectPropertyAxiom objects should be correct") {
// --> ReflexiveObjectProperty(bar:objProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLReflexiveObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSymmetricObjectPropertyAxiom objects should be correct") {
// --> SymmetricObjectProperty(bar:objProp2)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSymmetricObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLTransitiveObjectPropertyAxiom objects should be correct") {
// --> TransitiveObjectProperty(bar:objProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLTransitiveObjectPropertyAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLObjectPropertyDomainAxiom objects should be correct") {
// --> ObjectPropertyDomain(bar:objProp1 bar:Cls1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLObjectPropertyDomainAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLObjectPropertyRangeAxiom objects should be correct") {
// --> ObjectPropertyRange(bar:objProp1 bar:AllIndividualsCls)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLObjectPropertyRangeAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSubObjectPropertyOfAxiom objects should be correct") {
// --> SubObjectPropertyOf(bar:subObjProp1 bar:objProp1)
val expectedNumberOfAxioms = 1
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSubObjectPropertyOfAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLSubPropertyChainOfAxiom objects should be correct") {
val expectedNumberOfAxioms = 0
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLSubPropertyChainOfAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated SWRLRule objects should be correct") {
val expectedNumberOfAxioms = 0
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[SWRLRule])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("The number of generated OWLDataPropertyAssertionAxiom objects should be correct") {
// --> DataPropertyAssertion(bar:dataProp1 foo:indivA "ABCD")
// --> DataPropertyAssertion(bar:dataProp1 foo:indivB "BCDE")
val expectedNumberOfAxioms = 2
val filteredDataset = dataSet.filter(axiom => axiom.isInstanceOf[OWLDataPropertyAssertionAxiom])
assert(filteredDataset.count() == expectedNumberOfAxioms)
}
test("There should not be any null values") {
assert(dataSet.filter(a => a == null).count() == 0)
}
}
|
SANSA-Stack/SANSA-RDF
|
sansa-owl/sansa-owl-flink/src/test/scala/net/sansa_stack/owl/flink/dataset/FunctionalSyntaxOWLAxiomsDataSetBuilderTest.scala
|
Scala
|
apache-2.0
| 17,789
|
package mesosphere.marathon.metrics
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{ ExponentiallyDecayingReservoir, MetricRegistry }
import com.google.inject.{ Guice, AbstractModule }
import com.google.inject.matcher.{ AbstractMatcher, Matchers }
import mesosphere.marathon.MarathonSpec
import mesosphere.marathon.metrics.Metrics._
import org.aopalliance.intercept.{ MethodInvocation, MethodInterceptor }
import org.mockito.ArgumentCaptor
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
class FooBar {
def dummy(): Unit = {}
}
class MetricsTest
extends MarathonSpec
with MockitoSugar {
private var metrics: Metrics = _
class TestModule extends AbstractModule {
class DummyBehavior extends MethodInterceptor {
override def invoke(invocation: MethodInvocation): AnyRef = {
invocation.proceed()
}
}
object MarathonMatcher extends AbstractMatcher[Class[_]] {
override def matches(t: Class[_]): Boolean = t == classOf[FooBar]
}
override def configure(): Unit = {
bindInterceptor(Matchers.any(), Matchers.any(), new DummyBehavior())
}
}
before {
metrics = new Metrics(new MetricRegistry())
}
test("Metrics#className should strip 'EnhancerByGuice' from the metric names") {
val instance = Guice.createInjector(new TestModule).getInstance(classOf[FooBar])
assert(instance.getClass.getName.contains("EnhancerByGuice"))
assert(metrics.className(instance.getClass) == "mesosphere.marathon.metrics.FooBar")
}
test("Metrics caches the class names") {
val metricsSpy = spy(metrics)
metricsSpy.name("prefix", classOf[FooBar], "method1")
metricsSpy.name("prefix", classOf[FooBar], "method2")
metricsSpy.name("prefix", classOf[MetricsTest], "method1")
verify(metricsSpy, times(1)).stripGuiceMarksFromClassName(classOf[FooBar])
verify(metricsSpy, times(2)).stripGuiceMarksFromClassName(any())
}
test("Metrics#name should use a dot to separate the class name and the method name") {
val expectedName = "service.mesosphere.marathon.tasks.TaskTracker.write-request-time"
val actualName = metrics.name("service", classOf[mesosphere.marathon.tasks.TaskTracker], "write-request-time")
assert(expectedName.equals(actualName))
}
test("The Histogram wrapper should properly proxy updates") {
val origHistogram = new com.codahale.metrics.Histogram(new ExponentiallyDecayingReservoir())
val histogram = new Histogram(origHistogram)
histogram.update(10L)
histogram.update(1)
assert(origHistogram.getSnapshot.getMax == 10)
assert(origHistogram.getSnapshot.getMin == 1)
}
test("The Meter wrapper should properly proxy marks") {
val origMeter = new com.codahale.metrics.Meter
val meter = new Meter(origMeter)
meter.mark()
meter.mark(10)
assert(origMeter.getCount == 11)
}
test("The Timer wrapper should properly time method calls and proxy the updates") {
val origTimer = mock[com.codahale.metrics.Timer]
val timer = new Timer(origTimer)
timer {}
val durationCaptor = ArgumentCaptor.forClass(classOf[Long])
verify(origTimer).update(durationCaptor.capture(), org.mockito.Matchers.eq(TimeUnit.NANOSECONDS))
assert(durationCaptor.getValue > 0)
}
}
|
EasonYi/marathon
|
src/test/scala/mesosphere/marathon/metrics/MetricsTest.scala
|
Scala
|
apache-2.0
| 3,328
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{File, OutputStream, PrintStream}
import java.nio.charset.StandardCharsets
import scala.collection.mutable.ArrayBuffer
import com.google.common.io.Files
import org.apache.ivy.core.module.descriptor.MDArtifact
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.resolver.{AbstractResolver, ChainResolver, FileSystemResolver, IBiblioResolver}
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
import org.apache.spark.util.Utils
class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
private var tempIvyPath: String = _
private val noOpOutputStream = new OutputStream {
def write(b: Int) = {}
}
/** Simple PrintStream that reads data into a buffer */
private class BufferPrintStream extends PrintStream(noOpOutputStream) {
var lineBuffer = ArrayBuffer[String]()
// scalastyle:off println
override def println(line: String) {
lineBuffer += line
}
// scalastyle:on println
}
override def beforeAll() {
super.beforeAll()
// We don't want to write logs during testing
SparkSubmitUtils.printStream = new BufferPrintStream
tempIvyPath = Utils.createTempDir(namePrefix = "ivy").getAbsolutePath()
}
test("incorrect maven coordinate throws error") {
val coordinates = Seq("a:b: ", " :a:b", "a: :b", "a:b:", ":a:b", "a::b", "::", "a:b", "a")
for (coordinate <- coordinates) {
intercept[IllegalArgumentException] {
SparkSubmitUtils.extractMavenCoordinates(coordinate)
}
}
}
test("create repo resolvers") {
val settings = new IvySettings
val res1 = SparkSubmitUtils.createRepoResolvers(settings.getDefaultIvyUserDir)
// should have central and spark-packages by default
assert(res1.getResolvers.size() === 4)
assert(res1.getResolvers.get(0).asInstanceOf[IBiblioResolver].getName === "local-m2-cache")
assert(res1.getResolvers.get(1).asInstanceOf[FileSystemResolver].getName === "local-ivy-cache")
assert(res1.getResolvers.get(2).asInstanceOf[IBiblioResolver].getName === "central")
assert(res1.getResolvers.get(3).asInstanceOf[IBiblioResolver].getName === "spark-packages")
}
test("create additional resolvers") {
val repos = "a/1,b/2,c/3"
val settings = SparkSubmitUtils.buildIvySettings(Option(repos), None)
val resolver = settings.getDefaultResolver.asInstanceOf[ChainResolver]
assert(resolver.getResolvers.size() === 4)
val expected = repos.split(",").map(r => s"$r/")
resolver.getResolvers.toArray.map(_.asInstanceOf[AbstractResolver]).zipWithIndex.foreach {
case (r, i) =>
if (1 < i && i < 3) {
assert(r.getName === s"repo-$i")
assert(r.asInstanceOf[IBiblioResolver].getRoot === expected(i - 1))
}
}
}
test("add dependencies works correctly") {
val md = SparkSubmitUtils.getModuleDescriptor
val artifacts = SparkSubmitUtils.extractMavenCoordinates("com.databricks:spark-csv_2.11:0.1," +
"com.databricks:spark-avro_2.11:0.1")
SparkSubmitUtils.addDependenciesToIvy(md, artifacts, "default")
assert(md.getDependencies.length === 2)
}
test("excludes works correctly") {
val md = SparkSubmitUtils.getModuleDescriptor
val excludes = Seq("a:b", "c:d")
excludes.foreach { e =>
md.addExcludeRule(SparkSubmitUtils.createExclusion(e + ":*", new IvySettings, "default"))
}
val rules = md.getAllExcludeRules
assert(rules.length === 2)
val rule1 = rules(0).getId.getModuleId
assert(rule1.getOrganisation === "a")
assert(rule1.getName === "b")
val rule2 = rules(1).getId.getModuleId
assert(rule2.getOrganisation === "c")
assert(rule2.getName === "d")
intercept[IllegalArgumentException] {
SparkSubmitUtils.createExclusion("e:f:g:h", new IvySettings, "default")
}
}
test("ivy path works correctly") {
val md = SparkSubmitUtils.getModuleDescriptor
val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", "jar", "jar")
var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(tempIvyPath))
for (i <- 0 until 3) {
val index = jPaths.indexOf(tempIvyPath)
assert(index >= 0)
jPaths = jPaths.substring(index + tempIvyPath.length)
}
val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
IvyTestUtils.withRepository(main, None, None) { repo =>
// end to end
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
main.toString,
SparkSubmitUtils.buildIvySettings(Option(repo), Option(tempIvyPath)),
isTest = true)
assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy path")
}
}
test("search for artifact at local repositories") {
val main = new MavenCoordinate("my.great.lib", "mylib", "0.1")
val dep = "my.great.dep:mydep:0.5"
// Local M2 repository
IvyTestUtils.withRepository(main, Some(dep), Some(SparkSubmitUtils.m2Path)) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
main.toString,
SparkSubmitUtils.buildIvySettings(None, None),
isTest = true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
}
// Local Ivy Repository
val settings = new IvySettings
val ivyLocal = new File(settings.getDefaultIvyUserDir, "local" + File.separator)
IvyTestUtils.withRepository(main, Some(dep), Some(ivyLocal), useIvyLayout = true) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
main.toString,
SparkSubmitUtils.buildIvySettings(None, None),
isTest = true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
}
// Local ivy repository with modified home
val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
settings.setDefaultIvyUserDir(new File(tempIvyPath))
IvyTestUtils.withRepository(main, Some(dep), Some(dummyIvyLocal), useIvyLayout = true,
ivySettings = settings) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(
main.toString,
SparkSubmitUtils.buildIvySettings(None, Some(tempIvyPath)),
isTest = true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
assert(jarPath.indexOf(tempIvyPath) >= 0, "should be in new ivy path")
assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
}
}
test("dependency not found throws RuntimeException") {
intercept[RuntimeException] {
SparkSubmitUtils.resolveMavenCoordinates(
"a:b:c",
SparkSubmitUtils.buildIvySettings(None, None),
isTest = true)
}
}
test("neglects Spark and Spark's dependencies") {
val coordinates = SparkSubmitUtils.IVY_DEFAULT_EXCLUDES
.map(comp => s"org.apache.spark:spark-${comp}2.11:2.1.1")
.mkString(",") + ",org.apache.spark:spark-core_fake:1.2.0"
val path = SparkSubmitUtils.resolveMavenCoordinates(
coordinates,
SparkSubmitUtils.buildIvySettings(None, None),
isTest = true)
assert(path === "", "should return empty path")
val main = MavenCoordinate("org.apache.spark", "spark-streaming-kafka-assembly_2.11", "1.2.0")
IvyTestUtils.withRepository(main, None, None) { repo =>
val files = SparkSubmitUtils.resolveMavenCoordinates(
coordinates + "," + main.toString,
SparkSubmitUtils.buildIvySettings(Some(repo), None),
isTest = true)
assert(files.indexOf(main.artifactId) >= 0, "Did not return artifact")
}
}
test("exclude dependencies end to end") {
val main = new MavenCoordinate("my.great.lib", "mylib", "0.1")
val dep = "my.great.dep:mydep:0.5"
IvyTestUtils.withRepository(main, Some(dep), None) { repo =>
val files = SparkSubmitUtils.resolveMavenCoordinates(
main.toString,
SparkSubmitUtils.buildIvySettings(Some(repo), None),
Seq("my.great.dep:mydep"),
isTest = true)
assert(files.indexOf(main.artifactId) >= 0, "Did not return artifact")
assert(files.indexOf("my.great.dep") < 0, "Returned excluded artifact")
}
}
test("load ivy settings file") {
val main = new MavenCoordinate("my.great.lib", "mylib", "0.1")
val dep = "my.great.dep:mydep:0.5"
val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
val settingsText =
s"""
|<ivysettings>
| <caches defaultCacheDir="$tempIvyPath/cache"/>
| <settings defaultResolver="local-ivy-settings-file-test"/>
| <resolvers>
| <filesystem name="local-ivy-settings-file-test">
| <ivy pattern=
| "$dummyIvyLocal/[organisation]/[module]/[revision]/[type]s/[artifact].[ext]"/>
| <artifact pattern=
| "$dummyIvyLocal/[organisation]/[module]/[revision]/[type]s/[artifact].[ext]"/>
| </filesystem>
| </resolvers>
|</ivysettings>
|""".stripMargin
val settingsFile = new File(tempIvyPath, "ivysettings.xml")
Files.write(settingsText, settingsFile, StandardCharsets.UTF_8)
val settings = SparkSubmitUtils.loadIvySettings(settingsFile.toString, None, None)
settings.setDefaultIvyUserDir(new File(tempIvyPath)) // NOTE - can't set this through file
val testUtilSettings = new IvySettings
testUtilSettings.setDefaultIvyUserDir(new File(tempIvyPath))
IvyTestUtils.withRepository(main, Some(dep), Some(dummyIvyLocal), useIvyLayout = true,
ivySettings = testUtilSettings) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, settings, isTest = true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
assert(jarPath.indexOf(tempIvyPath) >= 0, "should be in new ivy path")
assert(jarPath.indexOf("mydep") >= 0, "should find dependency")
}
}
}
|
ddna1021/spark
|
core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
|
Scala
|
apache-2.0
| 10,902
|
package actors
import actors.persistent.staffing.{AddStaffMovements, AddStaffMovementsAck, GetState, RemoveStaffMovements, RemoveStaffMovementsAck, StaffMovementsActorBase, StaffMovementsReadActor}
import java.util.UUID
import akka.actor.{PoisonPill, Props}
import akka.testkit.ImplicitSender
import drt.shared.Terminals.T1
import drt.shared.{MilliDate, SDateLike, StaffMovement, StaffMovements}
import services.SDate
import services.crunch.CrunchTestLike
import scala.concurrent.duration._
object PersistenceHelper {
val dbLocation = "target/test"
}
class StaffMovementsActorSpec extends CrunchTestLike with ImplicitSender {
sequential
isolated
"StaffMovementsActor" should {
"remember a movement added before a shutdown" in {
val movementUuid1: UUID = UUID.randomUUID()
val staffMovements = StaffMovements(Seq(StaffMovement(T1, "lunch start", MilliDate(SDate(s"2017-01-01T00:00").millisSinceEpoch), -1, movementUuid1, createdBy = Some("batman"))))
val now: () => SDateLike = () => SDate("2017-01-01T23:59")
val expireAfterOneDay: () => SDateLike = () => now().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(staffMovements.movements)
expectMsg(AddStaffMovementsAck(staffMovements.movements))
actor ! PoisonPill
Thread.sleep(100)
val newActor = system.actorOf(Props(classOf[StaffMovementsActorBase], now, expireAfterOneDay), "movementsActor2")
newActor ! GetState
expectMsg(staffMovements)
true
}
"correctly remove a movement after a restart" in {
val movementUuid1: UUID = UUID.randomUUID()
val movementUuid2: UUID = UUID.randomUUID()
val movement1 = StaffMovement(T1, "lunch start", MilliDate(SDate(s"2017-01-01T00:00").millisSinceEpoch), -1, movementUuid1, createdBy = Some("batman"))
val movement2 = StaffMovement(T1, "coffee start", MilliDate(SDate(s"2017-01-01T01:15").millisSinceEpoch), -1, movementUuid2, createdBy = Some("robin"))
val staffMovements = StaffMovements(Seq(movement1, movement2))
val now: () => SDateLike = () => SDate("2017-01-01T23:59")
val expireAfterOneDay: () => SDateLike = () => now().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(staffMovements.movements)
expectMsg(AddStaffMovementsAck(staffMovements.movements))
actor ! RemoveStaffMovements(movementUuid1)
expectMsg(RemoveStaffMovementsAck(movementUuid1))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[StaffMovementsActorBase], now, expireAfterOneDay), "movementsActor2")
newActor ! GetState
val expected = Set(movement2)
val result = expectMsgPF(1 second) {
case StaffMovements(movements) => movements.toSet
}
result === expected
}
"remember multiple added movements and correctly forget removed movements after a restart" in {
val movementUuid1: UUID = UUID.randomUUID()
val movementUuid2: UUID = UUID.randomUUID()
val movementUuid3: UUID = UUID.randomUUID()
val movementUuid4: UUID = UUID.randomUUID()
val movement1 = StaffMovement(T1, "lunch start", MilliDate(SDate("2017-01-01T00:00").millisSinceEpoch), -1, movementUuid1, createdBy = Some("batman"))
val movement2 = StaffMovement(T1, "coffee start", MilliDate(SDate("2017-01-01T01:15").millisSinceEpoch), -1, movementUuid2, createdBy = Some("robin"))
val movement3 = StaffMovement(T1, "supper start", MilliDate(SDate("2017-01-01T21:30").millisSinceEpoch), -1, movementUuid3, createdBy = Some("bruce"))
val movement4 = StaffMovement(T1, "supper start", MilliDate(SDate("2017-01-01T21:40").millisSinceEpoch), -1, movementUuid4, createdBy = Some("bruce"))
val now: () => SDateLike = () => SDate("2017-01-01T23:59")
val expireAfterOneDay: () => SDateLike = () => now().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(Seq(movement1, movement2))
expectMsg(AddStaffMovementsAck(Seq(movement1, movement2)))
actor ! RemoveStaffMovements(movementUuid1)
expectMsg(RemoveStaffMovementsAck(movementUuid1))
actor ! AddStaffMovements(Seq(movement3, movement4))
expectMsg(AddStaffMovementsAck(Seq(movement3, movement4)))
actor ! RemoveStaffMovements(movementUuid4)
expectMsg(RemoveStaffMovementsAck(movementUuid4))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[StaffMovementsActorBase], now, expireAfterOneDay), "movementsActor2")
newActor ! GetState
val expected = Set(movement2, movement3)
val result = expectMsgPF(1 second) {
case StaffMovements(movements) => movements.toSet
}
result === expected
}
"purge movements created more than the specified expiry period ago" in {
val movementUuid1: UUID = UUID.randomUUID()
val movementUuid2: UUID = UUID.randomUUID()
val movementUuid3: UUID = UUID.randomUUID()
val movementUuid4: UUID = UUID.randomUUID()
val expiredMovement1 = StaffMovement(T1, "lunch start", MilliDate(SDate(s"2017-01-01T00:00").millisSinceEpoch), -1, movementUuid1, createdBy = Some("batman"))
val expiredMovement2 = StaffMovement(T1, "coffee start", MilliDate(SDate(s"2017-01-01T01:15").millisSinceEpoch), -1, movementUuid2, createdBy = Some("robin"))
val unexpiredMovement1 = StaffMovement(T1, "supper start", MilliDate(SDate(s"2017-01-01T21:30").millisSinceEpoch), -1, movementUuid3, createdBy = Some("bruce"))
val unexpiredMovement2 = StaffMovement(T1, "supper start", MilliDate(SDate(s"2017-01-01T21:40").millisSinceEpoch), -1, movementUuid4, createdBy = Some("bruce"))
val now_is_20170102_0200: () => SDateLike = () => SDate("2017-01-02T02:00")
val expireAfterOneDay: () => SDateLike = () => now_is_20170102_0200().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now_is_20170102_0200, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(Seq(expiredMovement1, expiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(expiredMovement1, expiredMovement2)))
actor ! AddStaffMovements(Seq(unexpiredMovement1, unexpiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(unexpiredMovement1, unexpiredMovement2)))
actor ! GetState
val expected = Set(unexpiredMovement1, unexpiredMovement2)
val result = expectMsgPF(1 second) {
case StaffMovements(movements) => movements.toSet
}
result === expected
}
"purge movements created more than the specified expiry period ago when requested via a point in time actor" in {
val expiredMovement1 = StaffMovement(T1, "lunch start", MilliDate(SDate(s"2017-01-01T00:00").millisSinceEpoch), -1, UUID.randomUUID(), createdBy = Some("batman"))
val expiredMovement2 = StaffMovement(T1, "coffee start", MilliDate(SDate(s"2017-01-01T01:15").millisSinceEpoch), -1, UUID.randomUUID(), createdBy = Some("robin"))
val unexpiredMovement1 = StaffMovement(T1, "supper start", MilliDate(SDate(s"2017-01-01T21:30").millisSinceEpoch), -1, UUID.randomUUID(), createdBy = Some("bruce"))
val unexpiredMovement2 = StaffMovement(T1, "supper start", MilliDate(SDate(s"2017-01-01T21:40").millisSinceEpoch), -1, UUID.randomUUID(), createdBy = Some("bruce"))
val now_is_20170102_0200: () => SDateLike = () => SDate("2017-01-02T02:00")
val expireAfterOneDay: () => SDateLike = () => now_is_20170102_0200().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now_is_20170102_0200, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(Seq(expiredMovement1, expiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(expiredMovement1, expiredMovement2)))
actor ! AddStaffMovements(Seq(unexpiredMovement1, unexpiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(unexpiredMovement1, unexpiredMovement2)))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[StaffMovementsReadActor], now_is_20170102_0200(), expireAfterOneDay), "movementsActor2")
newActor ! GetState
val expected = Set(unexpiredMovement1, unexpiredMovement2)
val result = expectMsgPF(1 second) {
case StaffMovements(movements) => movements.toSet
}
result === expected
}
"keep pairs of movements when only one was created more than the specified expiry period ago when requested via a point in time actor" in {
val pair1 = UUID.randomUUID()
val expiredMovement1 = StaffMovement(T1, "lunch start", MilliDate(SDate(s"2017-01-01T00:00").millisSinceEpoch), -1, pair1, createdBy = Some("batman"))
val expiredMovement2 = StaffMovement(T1, "lunch end", MilliDate(SDate(s"2017-01-01T01:15").millisSinceEpoch), 1, pair1, createdBy = Some("robin"))
val pair2 = UUID.randomUUID()
val unexpiredMovement1 = StaffMovement(T1, "supper start", MilliDate(SDate(s"2017-01-01T21:30").millisSinceEpoch), -1, pair2, createdBy = Some("bruce"))
val unexpiredMovement2 = StaffMovement(T1, "supper enmd", MilliDate(SDate(s"2017-01-01T21:40").millisSinceEpoch), 1, pair2, createdBy = Some("ed"))
val now_is_20170102_0200: () => SDateLike = () => SDate("2017-01-02T01:00")
val expireAfterOneDay: () => SDateLike = () => now_is_20170102_0200().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now_is_20170102_0200, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(Seq(expiredMovement1, expiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(expiredMovement1, expiredMovement2)))
actor ! AddStaffMovements(Seq(unexpiredMovement1, unexpiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(unexpiredMovement1, unexpiredMovement2)))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[StaffMovementsReadActor], now_is_20170102_0200(), expireAfterOneDay), "movementsActor2")
newActor ! GetState
val expected = Set(expiredMovement1, expiredMovement2, unexpiredMovement1, unexpiredMovement2)
val result = expectMsgPF(1 second) {
case StaffMovements(movements) => movements.toSet
}
result === expected
}
"purge pairs of movements where both were created more than the specified expiry period ago when requested via a point in time actor" in {
val pair1 = UUID.randomUUID()
val expiredMovement1 = StaffMovement(T1, "lunch start", MilliDate(SDate(s"2017-01-01T00:00").millisSinceEpoch), -1, pair1, createdBy = Some("batman"))
val expiredMovement2 = StaffMovement(T1, "lunch end", MilliDate(SDate(s"2017-01-01T01:15").millisSinceEpoch), 1, pair1, createdBy = Some("robin"))
val pair2 = UUID.randomUUID()
val unexpiredMovement1 = StaffMovement(T1, "supper start", MilliDate(SDate(s"2017-01-01T21:30").millisSinceEpoch), -1, pair2, createdBy = Some("bruce"))
val unexpiredMovement2 = StaffMovement(T1, "supper end", MilliDate(SDate(s"2017-01-01T21:40").millisSinceEpoch), 1, pair2, createdBy = Some("ed"))
val now_is_20170102_0200: () => SDateLike = () => SDate("2017-01-02T21:35")
val expireAfterOneDay: () => SDateLike = () => now_is_20170102_0200().addDays(-1)
val actor = system.actorOf(Props(classOf[StaffMovementsActorBase], now_is_20170102_0200, expireAfterOneDay), "movementsActor1")
actor ! AddStaffMovements(Seq(expiredMovement1, expiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(expiredMovement1, expiredMovement2)))
actor ! AddStaffMovements(Seq(unexpiredMovement1, unexpiredMovement2))
expectMsg(AddStaffMovementsAck(Seq(unexpiredMovement1, unexpiredMovement2)))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[StaffMovementsReadActor], now_is_20170102_0200(), expireAfterOneDay), "movementsActor2")
newActor ! GetState
val expected = Set(unexpiredMovement1, unexpiredMovement2)
val result = expectMsgPF(1 second) {
case StaffMovements(movements) => movements.toSet
}
result === expected
}
}
}
|
UKHomeOffice/drt-scalajs-spa-exploration
|
server/src/test/scala/actors/StaffMovementsActorSpec.scala
|
Scala
|
apache-2.0
| 12,379
|
/*
* Copyright 2015-2016 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package system.basic
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common.JsHelpers
import common.TestHelpers
import common.TestUtils
import common.Wsk
import common.WskProps
import common.WskTestHelpers
import spray.json._
import spray.json.DefaultJsonProtocol._
import spray.json.JsObject
import spray.json.pimpAny
@RunWith(classOf[JUnitRunner])
class WskActionTests
extends TestHelpers
with WskTestHelpers
with JsHelpers {
implicit val wskprops = WskProps()
val wsk = new Wsk
val testString = "this is a test"
val testResult = JsObject("count" -> testString.split(" ").length.toJson)
val guestNamespace = wskprops.namespace
behavior of "Whisk actions"
it should "invoke an action returning a promise" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "hello promise"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("helloPromise.js")))
}
val run = wsk.action.invoke(name)
withActivation(wsk.activation, run) {
activation =>
activation.response.status shouldBe "success"
activation.response.result shouldBe Some(JsObject("done" -> true.toJson))
activation.logs.get.mkString(" ") shouldBe empty
}
}
it should "invoke an action with a space in the name" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "hello Async"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("helloAsync.js")))
}
val run = wsk.action.invoke(name, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run) {
activation =>
activation.response.status shouldBe "success"
activation.response.result shouldBe Some(testResult)
activation.logs.get.mkString(" ") should include(testString)
}
}
it should "pass parameters bound on creation-time to the action" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "printParams"
val params = Map(
"param1" -> "test1",
"param2" -> "test2")
assetHelper.withCleaner(wsk.action, name) {
(action, _) =>
action.create(
name,
Some(TestUtils.getTestActionFilename("printParams.js")),
parameters = params.mapValues(_.toJson))
}
val invokeParams = Map("payload" -> testString)
val run = wsk.action.invoke(name, invokeParams.mapValues(_.toJson))
withActivation(wsk.activation, run) {
activation =>
val logs = activation.logs.get.mkString(" ")
(params ++ invokeParams).foreach {
case (key, value) =>
logs should include(s"params.$key: $value")
}
}
}
it should "copy an action and invoke it successfully" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "copied"
val packageName = "samples"
val actionName = "wordcount"
val fullQualifiedName = s"/$guestNamespace/$packageName/$actionName"
assetHelper.withCleaner(wsk.pkg, packageName) {
(pkg, _) => pkg.create(packageName, shared = Some(true))
}
assetHelper.withCleaner(wsk.action, fullQualifiedName) {
val file = Some(TestUtils.getTestActionFilename("wc.js"))
(action, _) => action.create(fullQualifiedName, file)
}
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(fullQualifiedName), Some("copy"))
}
val run = wsk.action.invoke(name, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run) {
activation =>
activation.response.status shouldBe "success"
activation.response.result shouldBe Some(testResult)
activation.logs.get.mkString(" ") should include(testString)
}
}
it should "copy an action and ensure exec, parameters, and annotations copied" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val origActionName = "origAction"
val copiedActionName = "copiedAction"
val params = Map("a" -> "A".toJson)
val annots = Map("b" -> "B".toJson)
assetHelper.withCleaner(wsk.action, origActionName) {
val file = Some(TestUtils.getTestActionFilename("wc.js"))
(action, _) => action.create(origActionName, file, parameters = params, annotations = annots)
}
assetHelper.withCleaner(wsk.action, copiedActionName) {
(action, _) => action.create(copiedActionName, Some(origActionName), Some("copy"))
}
val copiedAction = getJSONFromCLIResponse(wsk.action.get(copiedActionName).stdout)
val origAction = getJSONFromCLIResponse(wsk.action.get(copiedActionName).stdout)
copiedAction.fields("annotations") shouldBe origAction.fields("annotations")
copiedAction.fields("parameters") shouldBe origAction.fields("parameters")
copiedAction.fields("exec") shouldBe origAction.fields("exec")
copiedAction.fields("version") shouldBe JsString("0.0.1")
}
it should "add new parameters and annotations while copying an action" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val origName = "origAction"
val copiedName = "copiedAction"
val origParams = Map("origParam1" -> "origParamValue1".toJson, "origParam2" -> 999.toJson)
val copiedParams = Map("copiedParam1" -> "copiedParamValue1".toJson, "copiedParam2" -> 123.toJson)
val origAnnots = Map("origAnnot1" -> "origAnnotValue1".toJson, "origAnnot2" -> true.toJson)
val copiedAnnots = Map("copiedAnnot1" -> "copiedAnnotValue1".toJson, "copiedAnnot2" -> false.toJson)
val resParams = Seq(
JsObject(
"key" -> JsString("copiedParam1"),
"value" -> JsString("copiedParamValue1")
),
JsObject(
"key" -> JsString("copiedParam2"),
"value" -> JsNumber(123)
),
JsObject(
"key" -> JsString("origParam1"),
"value" -> JsString("origParamValue1")
),
JsObject(
"key" -> JsString("origParam2"),
"value" -> JsNumber(999)
)
)
val resAnnots = Seq(
JsObject(
"key" -> JsString("origAnnot1"),
"value" -> JsString("origAnnotValue1")
),
JsObject(
"key" -> JsString("copiedAnnot2"),
"value" -> JsBoolean(false)
),
JsObject(
"key" -> JsString("copiedAnnot1"),
"value" -> JsString("copiedAnnotValue1")
),
JsObject(
"key" -> JsString("origAnnot2"),
"value" -> JsBoolean(true)
),
JsObject(
"key" -> JsString("exec"),
"value" -> JsString("nodejs:6")
)
)
assetHelper.withCleaner(wsk.action, origName) {
val file = Some(TestUtils.getTestActionFilename("echo.js"))
(action, _) => action.create(origName, file, parameters = origParams, annotations = origAnnots)
}
assetHelper.withCleaner(wsk.action, copiedName) {
(action, _) => action.create(copiedName, Some(origName), Some("copy"), parameters = copiedParams, annotations = copiedAnnots)
}
val copiedAction = getJSONFromCLIResponse(wsk.action.get(copiedName).stdout)
// CLI does not guarantee order of annotations and parameters so do a diff to compare the values
copiedAction.fields("parameters").convertTo[Seq[JsObject]] diff resParams shouldBe List()
copiedAction.fields("annotations").convertTo[Seq[JsObject]] diff resAnnots shouldBe List()
}
it should "recreate and invoke a new action with different code" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "recreatedAction"
assetHelper.withCleaner(wsk.action, name, false) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("wc.js")))
}
val run1 = wsk.action.invoke(name, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run1) {
activation =>
activation.response.status shouldBe "success"
activation.logs.get.mkString(" ") should include(s"The message '$testString' has")
}
wsk.action.delete(name)
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("hello.js")))
}
val run2 = wsk.action.invoke(name, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run2) {
activation =>
activation.response.status shouldBe "success"
activation.logs.get.mkString(" ") should include(s"hello, $testString")
}
}
it should "fail to invoke an action with an empty file" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "empty"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("empty.js")))
}
val run = wsk.action.invoke(name)
withActivation(wsk.activation, run) {
activation =>
activation.response.status shouldBe "action developer error"
activation.response.result shouldBe Some(JsObject("error" -> "Missing main/no code to execute.".toJson))
}
}
it should "create an action with an empty file" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "empty"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("empty.js")))
}
val rr = wsk.action.get(name)
wsk.parseJsonString(rr.stdout).getFieldPath("exec", "code") shouldBe Some(JsString(""))
}
it should "blocking invoke of nested blocking actions" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "nestedBlockingAction"
val child = "wc"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("wcbin.js")))
}
assetHelper.withCleaner(wsk.action, child) {
(action, _) => action.create(child, Some(TestUtils.getTestActionFilename("wc.js")))
}
val run = wsk.action.invoke(name, Map("payload" -> testString.toJson), blocking = true)
val activation = wsk.parseJsonString(run.stdout).convertTo[CliActivation]
withClue(s"check failed for activation: $activation") {
val wordCount = testString.split(" ").length
activation.response.result.get shouldBe JsObject("binaryCount" -> s"${wordCount.toBinaryString} (base 2)".toJson)
}
}
it should "blocking invoke an asynchronous action" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "helloAsync"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("helloAsync.js")))
}
val run = wsk.action.invoke(name, Map("payload" -> testString.toJson), blocking = true)
val activation = wsk.parseJsonString(run.stdout).convertTo[CliActivation]
withClue(s"check failed for activation: $activation") {
activation.response.status shouldBe "success"
activation.response.result shouldBe Some(testResult)
activation.logs shouldBe Some(List())
}
}
it should "reject an invoke with the wrong parameters set" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val fullQualifiedName = s"/$guestNamespace/samples/helloWorld"
val payload = "bob"
val rr = wsk.cli(Seq("action", "invoke", fullQualifiedName, payload) ++ wskprops.overrides,
expectedExitCode = TestUtils.ERROR_EXIT)
rr.stderr should include("Run 'wsk --help' for usage.")
rr.stderr should include(s"error: Invalid argument(s): $payload")
}
it should "not be able to use 'ping' in an action" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "ping"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("ping.js")))
}
val run = wsk.action.invoke(name, Map("payload" -> "google.com".toJson))
withActivation(wsk.activation, run) {
activation =>
activation.response.result shouldBe Some(JsObject(
"stderr" -> "ping: icmp open socket: Operation not permitted\n".toJson,
"stdout" -> "".toJson))
}
}
ignore should "support UTF-8 as input and output format" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
val name = "utf8Test"
assetHelper.withCleaner(wsk.action, name) {
(action, _) => action.create(name, Some(TestUtils.getTestActionFilename("hello.js")))
}
val utf8 = "Β«ΟΞ±ΠΠ¬βΟΓΆΒ»: 1<2 & 4+1>Β³, now 20%β¬Β§$ off!"
val run = wsk.action.invoke(name, Map("payload" -> utf8.toJson))
withActivation(wsk.activation, run) {
activation =>
activation.response.status shouldBe "success"
activation.logs.get.mkString(" ") should include(s"hello $utf8")
}
}
}
|
CrowdFlower/incubator-openwhisk
|
tests/src/test/scala/system/basic/WskActionTests.scala
|
Scala
|
apache-2.0
| 15,611
|
import scala.tools.partest._
import java.io.{Console => _, _}
object Test extends DirectTest {
override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path
override def code = """class Foo {
| def bar(x: => String) = x
|
| def foo = bar("")
|}
|""".stripMargin.trim
override def show(): Unit = {
Console.withErr(System.out) {
compile()
}
}
}
|
felixmulder/scala
|
test/files/run/delambdafy_uncurry_byname_method.scala
|
Scala
|
bsd-3-clause
| 560
|
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the βLicenseβ); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an βAS ISβ BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package security
/**
* Created by yaakov on 6/14/15.
*/
sealed trait Operation
case object Admin extends Operation
case object Overwrite extends Operation
case object PriorityWrite extends Operation
object Operation {
def unapply(op: String): Option[Operation] = {
op match {
case "Admin" => Some(Admin)
case "Overwrite" => Some(Overwrite)
case "PriorityWrite" => Some(PriorityWrite)
case _ => None
}
}
}
|
nruppin/CM-Well
|
server/cmwell-ws/app/security/Operation.scala
|
Scala
|
apache-2.0
| 1,064
|
package com.sksamuel.elastic4s.requests.searches.aggs
import com.sksamuel.elastic4s.requests.common.RefreshPolicy
import com.sksamuel.elastic4s.requests.searches.DateHistogramInterval
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import scala.util.Try
class ExtendedStatsBucketPipelineAggHttpTest extends AnyFreeSpec with DockerTests with Matchers {
Try {
client.execute {
deleteIndex("extendedstatsbucketagg")
}.await
}
client.execute {
createIndex("extendedstatsbucketagg") mappings {
mapping("sales") fields(
dateField("date"),
doubleField("value").stored(true)
)
}
}.await
client.execute(
bulk(
indexInto("extendedstatsbucketagg") fields("date" -> "2017-01-01", "value" -> 1000.0),
indexInto("extendedstatsbucketagg") fields("date" -> "2017-01-02", "value" -> 1000.0),
indexInto("extendedstatsbucketagg") fields("date" -> "2017-02-01", "value" -> 2000.0),
indexInto("extendedstatsbucketagg") fields("date" -> "2017-02-01", "value" -> 2000.0),
indexInto("extendedstatsbucketagg") fields("date" -> "2017-03-01", "value" -> 3000.0),
indexInto("extendedstatsbucketagg") fields("date" -> "2017-03-02", "value" -> 3000.0)
).refresh(RefreshPolicy.Immediate)
).await
"extended stats bucket pipeline agg" - {
"should return the expected extended stats values" in {
val resp = client.execute {
search("extendedstatsbucketagg").matchAllQuery().aggs(
dateHistogramAgg("sales_per_month", "date")
.interval(DateHistogramInterval.Month)
.subaggs {
sumAgg("sales", "value")
},
extendedStatsBucketAggregation("stats_monthly_sales", "sales_per_month>sales")
)
}.await.result
resp.totalHits shouldBe 6
val agg = resp.aggs.extendedStatsBucket("stats_monthly_sales")
agg.count shouldBe 3
agg.min shouldBe 2000.0
agg.max shouldBe 6000.0
agg.avg shouldBe 4000.0
agg.sum shouldBe 12000.0
agg.sumOfSquares shouldBe 5.6E7
math.abs(agg.variance - 2666666.66) < 0.1 shouldBe true
math.abs(agg.stdDeviation - 1632.99) < 0.1 shouldBe true
math.abs(agg.stdDeviationBoundsLower - 734.01) < 0.1 shouldBe true
math.abs(agg.stdDeviationBoundsUpper - 7265.98) < 0.1 shouldBe true
}
}
}
|
stringbean/elastic4s
|
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/requests/searches/aggs/ExtendedStatsBucketPipelineAggHttpTest.scala
|
Scala
|
apache-2.0
| 2,445
|
package au.com.dius.pact.server
import io.netty.channel.ChannelHandler.Sharable
import unfiltered.netty.ReceivedMessage
import unfiltered.netty.ServerErrorResponse
import unfiltered.netty.cycle
import unfiltered.request.HttpRequest
import unfiltered.response.ResponseFunction
import scala.collection.immutable.Map
class ServerStateStore {
var state: ServerState = Map()
}
@Sharable
case class RequestHandler(store: ServerStateStore, config: Config) extends cycle.Plan
with cycle.SynchronousExecution
with ServerErrorResponse {
import io.netty.handler.codec.http.{ HttpResponse=>NHttpResponse }
def handle(request: HttpRequest[ReceivedMessage]): ResponseFunction[NHttpResponse] = {
val pactRequest = Conversions.unfilteredRequestToPactRequest(request)
val result = RequestRouter.dispatch(pactRequest, store.state, config)
store.state = result.newState
Conversions.pactToUnfilteredResponse(result.response)
}
def intent = PartialFunction[HttpRequest[ReceivedMessage], ResponseFunction[NHttpResponse]](handle)
}
|
DiUS/pact-jvm
|
pact-jvm-server/src/main/scala/au/com/dius/pact/server/RequestHandler.scala
|
Scala
|
apache-2.0
| 1,059
|
package BIDMat
import java.io.Closeable
case class CLHandle(context: org.jocl.cl_context, queue: org.jocl.cl_command_queue) {
def free():Unit = {
CLKernelCache.free()
org.jocl.CL.clReleaseCommandQueue(queue)
org.jocl.CL.clReleaseContext(context)
}
}
object CLHandle {
def apply():CLHandle = {
val platforms = getCLPlatforms()
val platform = platforms(0)
// Query for available GPUs first, then check CPUs
val devices = try {
getCLDevices(platform, org.jocl.CL.CL_DEVICE_TYPE_GPU)
} catch {
case err:org.jocl.CLException => {
getCLDevices(platform, org.jocl.CL.CL_DEVICE_TYPE_CPU)
}
}
val context = createCLContext(platform, devices)
val queue = createCLQueue(context, devices(0))
CLHandle(context, queue)
}
private def getCLPlatforms():Array[org.jocl.cl_platform_id] = {
val num_platforms_ptr = Array(0)
org.jocl.CL.clGetPlatformIDs(0, null, num_platforms_ptr)
val num_platforms = num_platforms_ptr(0)
val platforms = Array.ofDim[org.jocl.cl_platform_id](num_platforms)
org.jocl.CL.clGetPlatformIDs(platforms.length, platforms, null)
platforms
}
private def getCLDevices(platform: org.jocl.cl_platform_id, device_type: Long):Array[org.jocl.cl_device_id] = {
val num_devices_ptr = Array(0)
org.jocl.CL.clGetDeviceIDs(platform, device_type.toInt, 0, null, num_devices_ptr)
val num_devices = num_devices_ptr(0)
val devices = Array.ofDim[org.jocl.cl_device_id](num_devices)
org.jocl.CL.clGetDeviceIDs(platform, device_type.toInt, num_devices, devices, null)
devices
}
private def createCLContext(platform: org.jocl.cl_platform_id, devices: Array[org.jocl.cl_device_id]):org.jocl.cl_context = {
val properties = new org.jocl.cl_context_properties()
properties.addProperty(org.jocl.CL.CL_CONTEXT_PLATFORM, platform)
org.jocl.CL.clCreateContext(properties, devices.length, devices, null, null, null)
}
private def createCLQueue(context: org.jocl.cl_context, device: org.jocl.cl_device_id):org.jocl.cl_command_queue = {
//val properties = new org.jocl.cl_queue_properties()
//properties.addProperty(org.jocl.CL.CL_QUEUE_PROPERTIES, org.jocl.CL.CL_QUEUE_PROFILING_ENABLE)
//org.jocl.CL.clCreateCommandQueueWithProperties(context, device, properties, null)
org.jocl.CL.clCreateCommandQueue(context, device, 0, null)
}
}
|
phlip9/BIDMat
|
src/main/scala/BIDMat/CLHandle.scala
|
Scala
|
bsd-3-clause
| 2,391
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet.module
import org.apache.mxnet._
import org.slf4j.LoggerFactory
import org.slf4j.Logger
import scala.collection.mutable.ArrayBuffer
import org.apache.mxnet.optimizer.SGD
import scala.collection.immutable.ListMap
import org.apache.mxnet.module.BaseModule._
/**
* This module helps to deal efficiently with varying-length inputs.
* @param symGen A function when called with a bucket key, returns a triple
* ``(symbol, dataNames, labelNames)``.
* @param defaultBucketKey The key for the default bucket.
* @param contexts Default is cpu().
* @param workLoadList Default `None`, indicating uniform workload.
* @param fixedParamNames Default `None`, indicating no network parameters are fixed.
*/
class BucketingModule(symGen: AnyRef => (Symbol, IndexedSeq[String], IndexedSeq[String]),
defaultBucketKey: AnyRef, contexts: Array[Context] = Context.cpu(),
workLoadList: Option[IndexedSeq[Float]] = None,
fixedParamNames: Option[Set[String]] = None) extends BaseModule {
private val logger = LoggerFactory.getLogger(classOf[BucketingModule])
{
val (sym, dNames, lNames) = symGen(defaultBucketKey)
val dataNameList = if (dNames == null) IndexedSeq.empty[String] else dNames
val labelNameList = if (lNames == null) IndexedSeq.empty[String] else lNames
val fixedParamNameList = fixedParamNames.getOrElse(IndexedSeq.empty[String]).toIndexedSeq
_checkInputNames(sym, dataNameList, "data", true, logger)
_checkInputNames(sym, labelNameList, "label", false, logger)
_checkInputNames(sym, fixedParamNameList, "fixed_param", true, logger)
}
private val workLoads = workLoadList.getOrElse(contexts.map(_ => 1f).toIndexedSeq)
require(workLoads.size == contexts.length)
private val _buckets = scala.collection.mutable.Map[AnyRef, Module]()
private var _currModule: Module = null
private var _currBucketKey = defaultBucketKey
private var paramsDirty = false
// Internal function to reset binded state.
private def resetBind(): Unit = {
this.binded = false
this._buckets.clear()
this._currModule = null
this._currBucketKey = defaultBucketKey
}
// Symbol information
// A list of names for data required by this module.
override def dataNames: IndexedSeq[String] = {
if (this.binded) this._currModule.dataNames
else this.symGen(this.defaultBucketKey)._2
}
// A list of names for the outputs of this module.
override def outputNames: IndexedSeq[String] = {
if (this.binded) this._currModule.outputNames
else this.symGen(this.defaultBucketKey)._1.listOutputs()
}
// Input/Output information
// A list of (name, shape) pairs specifying the data inputs to this module.
override def dataShapes: IndexedSeq[DataDesc] = {
require(this.binded)
this._currModule.dataShapes
}
/**
* A list of (name, shape) pairs specifying the label inputs to this module.
* If this module does not accept labels -- either it is a module without loss
* function, or it is not binded for training, then this should return an empty
* list `[]`.
*/
override def labelShapes: IndexedSeq[DataDesc] = {
require(this.binded)
this._currModule.labelShapes
}
// A list of (name, shape) pairs specifying the outputs of this module.
override def outputShapes: IndexedSeq[(String, Shape)] = {
require(this.binded)
this._currModule.outputShapes
}
/**
* Get current parameters.
* `(arg_params, aux_params)`, each a dictionary of name to parameters (in
* `NDArray`) mapping.
*/
override def getParams: (Map[String, NDArray], Map[String, NDArray]) = {
require(binded && paramsInitialized)
this._currModule.paramsDirty = this.paramsDirty
val params = this._currModule.getParams
this.paramsDirty = false
params
}
/**
* Assign parameter and aux state values.
* @param argParams Dictionary of name to value (`NDArray`) mapping.
* @param auxParams Dictionary of name to value (`NDArray`) mapping.
* @param allowMissing
* If true, params could contain missing values, and the initializer will be
* called to fill those missing params.
* @param forceInit
* If true, will force re-initialize even if already initialized.
* @param allowExtra
* Whether allow extra parameters that are not needed by symbol.
* If this is True, no error will be thrown when argParams or auxParams
* contain extra parameters that is not needed by the executor.
*/
override def setParams(argParams: Map[String, NDArray],
auxParams: Map[String, NDArray],
allowMissing: Boolean = false,
forceInit: Boolean = true,
allowExtra: Boolean = false): Unit = {
if (!allowMissing) {
this.initParams(null, argParams, auxParams, allowMissing, forceInit, allowExtra)
} else if (this.paramsInitialized && !forceInit) {
logger.warn("Parameters already initialized and forceInit=false. " +
"setParams call ignored.")
} else {
this._currModule.setParams(
argParams, auxParams, allowMissing, forceInit, allowExtra)
// because we didn't update self._arg_params, they are dirty now.
this.paramsDirty = true
this.paramsInitialized = true
}
}
/**
* Initialize the parameters and auxiliary states.
* @param initializer Called to initialize parameters if needed.
* @param argParams If not None, should be a dictionary of existing arg_params.
* Initialization will be copied from that.
* @param auxParams If not None, should be a dictionary of existing aux_params.
* Initialization will be copied from that.
* @param allowMissing If true, params could contain missing values,
* and the initializer will be called to fill those missing params.
* @param forceInit If true, will force re-initialize even if already initialized.
* @param allowExtra Whether allow extra parameters that are not needed by symbol.
* If this is True, no error will be thrown when argParams or auxParams
* contain extra parameters that is not needed by the executor.
*/
override def initParams(initializer: Initializer = new Uniform(0.01f),
argParams: Map[String, NDArray] = null,
auxParams: Map[String, NDArray] = null,
allowMissing: Boolean = false,
forceInit: Boolean = false,
allowExtra: Boolean = false): Unit = {
if (paramsInitialized && !forceInit) {
return
}
require(binded, "call bind before initializing the parameters")
this._currModule.initParams(initializer, argParams, auxParams,
allowMissing, forceInit, allowExtra)
this.paramsDirty = false
this.paramsInitialized = true
}
/**
* Bind the symbols to construct executors. This is necessary before one
* can perform computation with the module.
* @param dataShapes Typically is `dataIter.provideData`.
* @param labelShapes Typically is `dataIter.provideLabel`.
* @param forTraining Default is `true`. Whether the executors should be bind for training.
* @param inputsNeedGrad Default is `false`.
* Whether the gradients to the input data need to be computed.
* Typically this is not needed.
* But this might be needed when implementing composition of modules.
* @param forceRebind Default is `false`.
* This function does nothing if the executors are already binded.
* But with this `true`, the executors will be forced to rebind.
* @param sharedModule Default is `None`. This is used in bucketing.
* When not `None`, the shared module essentially corresponds to
* a different bucket -- a module with different symbol
* but with the same sets of parameters
* (e.g. unrolled RNNs with different lengths).
*/
override def bind(dataShapes: IndexedSeq[DataDesc],
labelShapes: Option[IndexedSeq[DataDesc]] = None,
forTraining: Boolean = true, inputsNeedGrad: Boolean = false,
forceRebind: Boolean = false, sharedModule: Option[BaseModule] = None,
gradReq: String = "write"): Unit = {
// in case we already initialized params, keep it
val (argParams, auxParams) =
if (this.paramsInitialized) this.getParams
else (null, null)
// force rebinding is typically used when one want to switch from
// training to prediction phase.
if (forceRebind) this.resetBind()
if (this.binded) {
logger.warn("Already bound, ignoring bind()")
return
}
require(sharedModule == None,
"shared_module for BucketingModule is not supported")
this.forTraining = forTraining
this.inputsNeedGrad = inputsNeedGrad
this.binded = true
val (sym, dNames, lNames) = this.symGen(this.defaultBucketKey)
val module = new Module(sym, dNames, lNames, this.contexts,
this.workLoadList, this.fixedParamNames)
module.bind(dataShapes, labelShapes, forTraining, inputsNeedGrad,
forceRebind = false, sharedModule = None, gradReq)
this._currModule = module
this._currBucketKey = this.defaultBucketKey
this._buckets(this.defaultBucketKey) = module
// copy back saved params, if already initialized
if (this.paramsInitialized) {
this.setParams(argParams, auxParams)
}
}
/**
* Switches to a different bucket. This will change ``this._currModule``.
* @param bucketKey The key of the target bucket.
* @param dataShapes Typically is `dataIter.provideData`.
* @param labelShapes Typically is `dataIter.provideLabel`.
*/
def switchBucket(bucketKey: AnyRef, dataShapes: IndexedSeq[DataDesc],
labelShapes: Option[IndexedSeq[DataDesc]] = None): Unit = {
require(this.binded, "call bind before switching bucket")
if (!this._buckets.contains(bucketKey)) {
val (sym, dNames, lNames) = this.symGen(bucketKey)
val module = new Module(sym, dNames, lNames, this.contexts,
this.workLoadList, this.fixedParamNames)
module.bind(dataShapes, labelShapes, this._currModule.forTraining,
this._currModule.inputsNeedGrad, forceRebind = false,
sharedModule = Option(this._buckets(this.defaultBucketKey)))
this._buckets(bucketKey) = module
}
this._currModule = this._buckets(bucketKey)
this._currBucketKey = bucketKey
}
/**
* Install and initialize optimizers.
* @param kvstore
* @param optimizer
* @param resetOptimizer Default `True`, indicating whether we should set `rescaleGrad`
* & `idx2name` for optimizer according to executorGroup
* @param forceInit Default `False`, indicating whether we should force re-initializing
* the optimizer in the case an optimizer is already installed.
*/
override def initOptimizer(kvstore: String = "local", optimizer: Optimizer = new SGD(),
resetOptimizer: Boolean = true, forceInit: Boolean = false): Unit = {
require(binded && paramsInitialized)
if (optimizerInitialized && !forceInit) {
logger.warn("optimizer already initialized, ignoring ...")
} else {
this._currModule.initOptimizer(kvstore, optimizer, resetOptimizer, forceInit)
for (mod <- this._buckets.values) {
if (mod != this._currModule) mod.borrowOptimizer(this._currModule)
}
this.optimizerInitialized = true
}
}
/**
* Prepares a data batch for forward.
* @param dataBatch input data
*/
def prepare(dataBatch: DataBatch): Unit = {
// perform bind if haven't done so
require(this.binded && this.paramsInitialized)
val bucketKey = dataBatch.bucketKey
val originalBucketKey = this._currBucketKey
this.switchBucket(bucketKey, dataBatch.provideData, Option(dataBatch.provideLabel))
// switch back
this.switchBucket(originalBucketKey, null, None)
}
/**
* Forward computation.
* @param dataBatch input data
* @param isTrain Default is `None`, which means `is_train` takes the value of `for_training`.
*/
override def forward(dataBatch: DataBatch, isTrain: Option[Boolean] = None): Unit = {
require(binded && paramsInitialized)
this.switchBucket(dataBatch.bucketKey, dataBatch.provideData,
Option(dataBatch.provideLabel))
this._currModule.forward(dataBatch, isTrain)
}
/**
* Backward computation.
* @param outGrads Gradient on the outputs to be propagated back.
* This parameter is only needed when bind is called
* on outputs that are not a loss function.
*/
override def backward(outGrads: Array[NDArray] = null): Unit = {
require(binded && paramsInitialized)
this._currModule.backward(outGrads)
}
// Update parameters according to the installed optimizer and the gradients computed
// in the previous forward-backward cycle.
override def update(): Unit = {
require(binded && paramsInitialized && optimizerInitialized)
this.paramsDirty = true
this._currModule.update()
}
/**
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* those `NDArray` might live on different devices.
*/
override def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
require(binded && paramsInitialized)
this._currModule.getOutputs()
}
/**
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be merged from multiple devices,
* as they look like from a single executor.
* The results will look like `[out1, out2]`
*/
override def getOutputsMerged(): IndexedSeq[NDArray] = {
require(binded && paramsInitialized)
this._currModule.getOutputsMerged()
}
/**
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* those `NDArray` might live on different devices.
*/
override def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
require(binded && paramsInitialized && inputsNeedGrad)
this._currModule.getInputGrads()
}
/**
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be merged from multiple devices,
* as they look like from a single executor.
* The results will look like `[grad1, grad2]`
*/
override def getInputGradsMerged(): IndexedSeq[NDArray] = {
require(binded && paramsInitialized && inputsNeedGrad)
this._currModule.getInputGradsMerged()
}
/**
* Evaluate and accumulate evaluation metric on outputs of the last forward computation.
* @param evalMetric
* @param labels
*/
override def updateMetric(evalMetric: EvalMetric, labels: IndexedSeq[NDArray]): Unit = {
require(binded && paramsInitialized)
this._currModule.updateMetric(evalMetric, labels)
}
override def getSymbol: Symbol = {
require(binded)
this._currModule.symbol
}
// Install monitor on all executors
override def installMonitor(monitor: Monitor): Unit = {
require(binded)
for (mod <- this._buckets.values) mod.installMonitor(monitor)
}
}
|
indhub/mxnet
|
scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
|
Scala
|
apache-2.0
| 16,772
|
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.parse.hocon
import laika.config.{ConfigParser, ConfigParserErrors}
import munit.FunSuite
/**
* @author Jens Halm
*/
class HoconErrorSpec extends FunSuite {
def run (input: String, expectedMessage: String)(implicit loc: munit.Location): Unit = {
ConfigParser.parse(input).resolve() match {
case Right(result) => fail(s"Unexpected parser success: $result")
case Left(ConfigParserErrors(errors)) =>
assertEquals(errors.size, 1)
assertEquals(errors.head.toString, expectedMessage)
case Left(other) => fail(s"Unexpected parser error: $other")
}
}
test("missing closing quotes in a top level property") {
val input =
"""
|a = "foo bar
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.13] failure: Expected closing '"'
|
|a = "foo bar
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing quotes in an array property") {
val input =
"""
|a = [
| 3
| 4
| "some text
|]
|
|b = 9
""".stripMargin
val expectedMessage =
"""[5.12] failure: Expected closing '"'
|
| "some text
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing quotes in a nested object property") {
val input =
"""
|a {
| aa = "some text
| bb = 7
|}
|
|b = 9
""".stripMargin
val expectedMessage =
"""[3.17] failure: Expected closing '"'
|
| aa = "some text
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing quotes in a substitution reference") {
val input =
"""
|a = ${"foo.bar}
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.16] failure: Invalid key: Expected closing '"'
|
|a = ${"foo.bar}
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing quotes in a property key") {
val input =
"""
|"a = 7
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.7] failure: Invalid key: Expected closing '"'
|
|"a = 7
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid characters for unquoted strings in a top level property value") {
val input =
"""
|a = foo ? bar
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.9] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', '}'
|
|a = foo ? bar
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid characters for unquoted strings in an array property") {
val input =
"""
|a = [
| 3
| 4
| some ? text
|]
|
|b = 9
""".stripMargin
val expectedMessage =
"""[5.7] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', ']'
|
| some ? text
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid characters for unquoted strings in a nested object property value") {
val input =
"""
|a {
| aa = some ? text
| bb = 7
|}
|
|b = 9
""".stripMargin
val expectedMessage =
"""[3.12] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', '}'
|
| aa = some ? text
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid characters for unquoted strings in a substitution reference") {
val input =
"""
|a = ${foo = bar}
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.11] failure: Invalid key: Illegal character in unquoted string, expected delimiter is '}'
|
|a = ${foo = bar}
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid characters for unquoted strings in a property key") {
val input =
"""
|a } c = 7
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.3] failure: Invalid key: Illegal character in unquoted string, expected delimiters are one of '+=', ':', '=', '{'
|
|a } c = 7
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid characters for unquoted strings as a consequence of a missing separator between fields") {
val input =
"""
|a {
| b { x = 5 y = 6 }
|
| c = 9
|}
|
|d = 7
|""".stripMargin
val expectedMessage =
"""[3.15] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', '}'
|
| b { x = 5 y = 6 }
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid escape sequences in a top level property") {
val input =
"""
|a = "foo \x bar"
|
|b = 9
""".stripMargin
val expectedMessage =
"""[2.11] failure: Invalid escape sequence: \x
|
|a = "foo \x bar"
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing brackets for arrays in a top level property") {
val input =
"""
|a = [3, 4, 5
|
|b = 9
|""".stripMargin
val expectedMessage =
"""[4.3] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', ']'
|
|b = 9
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing brackets for arrays in a top level property in a multiline array") {
val input =
"""
|a = [
| 3
| 4
| 5
|
|b = 9
""".stripMargin
val expectedMessage =
"""[7.3] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', ']'
|
|b = 9
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing brackets for arrays in a nested object property") {
val input =
"""
|a {
| b = [
| 3
| 4
| 5
|
| c = 9
|}
|
|d = 7
""".stripMargin
val expectedMessage =
"""[8.5] failure: Illegal character in unquoted string, expected delimiters are one of '#', ',', '\n', ']'
|
| c = 9
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing brackets for objects in a top level property") {
val input =
"""
|a {
| x = 5
|
|b = 9
|""".stripMargin
val expectedMessage =
"""[6.1] failure: Expected closing '}'
|
|
|^""".stripMargin
run(input, expectedMessage)
}
test("missing closing brackets for objects in a multiline array") {
val input =
"""
|a = [
| { x = 3
| { x = 4 }
| { x = 5 }
|]
|
|b = 9
""".stripMargin
val expectedMessage =
"""[4.2] failure: Expected closing '}'
|
| { x = 4 }
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing brackets for objects in a nested object property") {
val input =
"""
|a {
| b { x = 5
|
| c = 9
|}
|
|d = 7
|""".stripMargin
val expectedMessage =
"""[9.1] failure: Expected closing '}'
|
|
|^""".stripMargin
run(input, expectedMessage)
}
test("missing '=' or ':' between key and value in a top level property") {
val input =
"""
|a 5
|
|b = 9
|""".stripMargin
val expectedMessage =
"""[2.4] failure: Expected separator after key ('=', '+=', ':' or '{')
|
|a 5
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing '=' or ':' between key and value in a multiline array") {
val input =
"""
|a = [
| { x 3 }
| { y = 4 }
| { z = 5 }
|]
|
|b = 9
""".stripMargin
val expectedMessage =
"""[3.8] failure: Expected separator after key ('=', '+=', ':' or '{')
|
| { x 3 }
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing '=' or ':' between key and value in a nested object property") {
val input =
"""
|a {
| b { x 5 }
|
| c = 9
|}
|
|d = 7
|""".stripMargin
val expectedMessage =
"""[3.11] failure: Expected separator after key ('=', '+=', ':' or '{')
|
| b { x 5 }
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing braces for substitution references in a top level property") {
val input =
"""
|a = ${foo.bar
|
|b = 9
|""".stripMargin
val expectedMessage =
"""[2.14] failure: Invalid key: Illegal character in unquoted string, expected delimiter is '}'
|
|a = ${foo.bar
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing braces for substitution references in a multiline array") {
val input =
"""
|a = [
| ${foo.bar
| 4
| 5
|]
|
|b = 9
""".stripMargin
val expectedMessage =
"""[3.11] failure: Invalid key: Illegal character in unquoted string, expected delimiter is '}'
|
| ${foo.bar
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing braces for substitution references in a nested object property") {
val input =
"""
|a {
| b = ${foo.bar
|
| c = 9
|}
|
|d = 7
|""".stripMargin
val expectedMessage =
"""[3.16] failure: Invalid key: Illegal character in unquoted string, expected delimiter is '}'
|
| b = ${foo.bar
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing triple quotes in a top level object") {
val input =
"""
|a = +++foo bar
| baz baz
|
|b = 9""".stripMargin.replace("+", "\"")
val expectedMessage =
"""[5.6] failure: Expected closing triple quote
|
|b = 9
| ^""".stripMargin
run(input, expectedMessage)
}
test("missing closing triple quotes in a nested object") {
val input =
"""
|a = {
| aa = +++foo bar
| baz baz
|}
|
|b = 9""".stripMargin.replace("+", "\"")
val expectedMessage =
"""[7.6] failure: Expected closing triple quote
|
|b = 9
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid include syntax - missing closing quotes") {
val input =
"""
|include "foo.conf
|
|b = 9""".stripMargin
val expectedMessage =
"""[2.18] failure: Expected closing '"'
|
|include "foo.conf
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid include syntax - missing closing quotes (file syntax)") {
val input =
"""
|include file("foo.conf)
|
|b = 9""".stripMargin
val expectedMessage =
"""[2.24] failure: Expected closing '"'
|
|include file("foo.conf)
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid include syntax - missing closing parenthesis (file syntax)") {
val input =
"""
|include file("foo.conf"
|
|b = 9""".stripMargin
val expectedMessage =
"""[2.24] failure: Expected closing ')'
|
|include file("foo.conf"
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid include syntax - missing closing parenthesis (required/file syntax)") {
val input =
"""
|include required(file("foo.conf")
|
|b = 9""".stripMargin
val expectedMessage =
"""[2.34] failure: Expected closing ')'
|
|include required(file("foo.conf")
| ^""".stripMargin
run(input, expectedMessage)
}
test("invalid include syntax - missing quotes") {
val input =
"""
|include file(foo.conf)
|
|b = 9""".stripMargin
val expectedMessage =
"""[2.14] failure: Expected quoted string
|
|include file(foo.conf)
| ^""".stripMargin
run(input, expectedMessage)
}
}
|
planet42/Laika
|
core/shared/src/test/scala/laika/parse/hocon/HoconErrorSpec.scala
|
Scala
|
apache-2.0
| 13,852
|
package com.kpbochenek.mieszkania
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import com.typesafe.scalalogging.LazyLogging
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives._
/** Created by kpbochenek on 8/4/16. */
object MieszkaniaApp extends LazyLogging {
def main(args: Array[String]): Unit = {
logger.info("Wynajem mieszkan START")
val host: String = args(0)
logger.info("Binduje sie na hoscie {}", host)
implicit val system = ActorSystem("mieszkania")
implicit val materializer = ActorMaterializer()
implicit val executionContext = system.dispatcher
val route =
path("hello") {
get {
complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, "<h1>Say hello to akka-http</h1>"))
}
}
val bindingFuture = Http().bindAndHandle(route, host, 8080)
}
}
|
kpbochenek/mieszkania
|
src/main/scala/com/kpbochenek/mieszkania/MieszkaniaApp.scala
|
Scala
|
apache-2.0
| 912
|
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.calculations
import org.joda.time.{Days, LocalDate}
import uk.gov.hmrc.ct.computations.HmrcAccountingPeriod
object AccountingPeriodHelper {
def daysInAccountingPeriod(accountingPeriod: HmrcAccountingPeriod) = daysBetween(accountingPeriod.cp1.value, accountingPeriod.cp2.value)
def accountingPeriodDaysInFinancialYear(year: Int, accountingPeriod: HmrcAccountingPeriod): BigDecimal = {
val (fyStartDate, fyEndDate) = financialYearStartingIn(year)
val start = if (accountingPeriod.cp1.value.isBefore(fyStartDate)) fyStartDate else accountingPeriod.cp1.value
val end = if (accountingPeriod.cp2.value.isAfter(fyEndDate)) fyEndDate else accountingPeriod.cp2.value
BigDecimal(daysBetween(start, end))
}
def accountingPeriodSpansTwoFinancialYears(accountingPeriod: HmrcAccountingPeriod): Boolean = {
fallsInFinancialYear(accountingPeriod.cp2.value) > fallsInFinancialYear(accountingPeriod.cp1.value)
}
def financialYearStartingIn(year: Int): (LocalDate, LocalDate) = (new LocalDate(year, 4, 1), new LocalDate(year + 1, 3, 31))
def fallsInFinancialYear(date: LocalDate): Int = if (date.getMonthOfYear < 4) date.getYear - 1 else date.getYear
def daysBetween(start: LocalDate, end: LocalDate): Int = Days.daysBetween(start, end).getDays + 1
def validateAccountingPeriod(accountingPeriod: HmrcAccountingPeriod) = {
if (accountingPeriod.cp1.value.isAfter(accountingPeriod.cp2.value)) {
throw new InvalidAccountingPeriodException("Accounting Period start date must be before the end date")
}
if (daysBetween(accountingPeriod.cp1.value, accountingPeriod.cp2.value) > maximumNumberOfDaysInAccountingPeriod(accountingPeriod)) {
throw new InvalidAccountingPeriodException("Accounting Period must not be longer than one calendar year")
}
if (accountingPeriod.cp1.value.isBefore(new LocalDate(2006, 10, 2))) {
throw new InvalidAccountingPeriodException("Accounting Period must not be before 1st October 2006")
}
}
private def maximumNumberOfDaysInAccountingPeriod(accountingPeriod: HmrcAccountingPeriod): BigDecimal = {
val startDate = accountingPeriod.cp1.value
val endDate = startDate.withYear(startDate.getYear + 1)
daysBetween(startDate, endDate) - 1
}
}
class InvalidAccountingPeriodException(message: String) extends Exception(message)
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/calculations/AccountingPeriodHelper.scala
|
Scala
|
apache-2.0
| 2,975
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.