code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/** * Created by YuanZhaokang on 2016/3/27 0027. */ import java.io.File import java.net.URI import java.util import javax.imageio.ImageIO import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.{SparkConf, SparkContext} object Draw { def main(args: Array[String]) { val conf = new SparkConf().setAppName("Draw").setMaster(Path.localSparkMaster) val sc = new SparkContext(conf) val core: Int = 16 val zIndex: Int = 10 val lngLat = sc.textFile(Path.localHdfsIp + "/p*") lngLat.persist() Range(10, 15).foreach { k => val index: Int = k //层级 val multi = index - zIndex val startX: Int = 851 * Math.pow(2, multi).toInt val startY: Int = 418 * Math.pow(2, multi).toInt val endX: Int = 854 * Math.pow(2, multi).toInt + multi val endY: Int = 421 * Math.pow(2, multi).toInt + multi // val lngLat = sc.textFile(Path.localHdfsIp+ "/p*") val pointsInfo = lngLat.map { line => val coordinates = line.split(",") val lng = coordinates(0).toDouble val lat = coordinates(1).toDouble val tile = getTileNumber(lng, lat, index) val x: Int = tile._1 val y: Int = tile._2 val z: Int = tile._3 val pixel = getPixelCoordinate(x, y, z, lng, lat) pixel } val p = Array.ofDim[util.ArrayList[Point]](endX-startX+1,endY-startY+1) for (i <- 0 until endX - startX + 1) { for (j <- 0 until endY - startY + 1) { p(i)(j) = new util.ArrayList[Point]() val points = pointsInfo.filter(p => p._1 == startX + i && p._2 == startY + j).map(p => (p._4, p._5)) points.collect().map(t => p(i)(j).add(new Point(t._1.toInt, t._2.toInt))) } } sc.parallelize(startX to endX).foreach( column => for (row <- startY to endY) { val render = new Heatmap() val image = render.render(256, 256, 10, 120, p(column - startX)(row - startY)) val conf = new Configuration() val path = new Path(Path.localHdfsIp + index + "/" + column + "/") val fs = FileSystem.get(URI.create(path.toString()), conf) if (!fs.exists(path)) fs.mkdirs(path) ImageUtils.writeImage(image, path.toString() + "/" + row + ".png") // ImageIO.write(image, "png", new File("/home/wind/yuanzk/lnglat/" + row + ".png")) } ) } sc.stop() } def getTileNumber(lng: Double, lat: Double, z: Int): Tuple3[Int, Int, Int] = { val x: Double = Math.pow(2, z - 1) * (lng / 180 + 1) val y: Double = Math.pow(2, z - 1) * (1 - (Math.log(Math.tan(Math.PI * lat / 180) + Math.sqrt(1 + Math.pow(Math.tan(Math.PI * lat / 180), 2)))) / Math.PI) (x.toInt, y.toInt, z.toInt) } def getPixelCoordinate(x: Int, y: Int, z: Int, lng: Double, lat: Double): Tuple5[Int, Int, Int, Double, Double] = { val m = ((lng / 180 + 1) / Math.pow(2, 1 - z) - x) * 256 val n = ((1 - Math.log(Math.tan((lat + 90) * Math.PI / 360)) / Math.PI) / Math.pow(2, 1 - z) - y) * 256 (x, y, z, m, n) } }
yuanzhaokang/ParallelizeHeatmap
src/Draw.scala
Scala
gpl-2.0
3,271
import io.circe.generic.semiauto._ import shapeless._ case class 苹果(苹果名称: String, 苹果大小: Int) case class 桃(桃名称: String, 桃大小: Int) case class 鸡(鸡名称: String, 鸡大小: Int, 神坛: Option[神坛]) case class 鹅(鹅名称: String, 鹅大小: Int) case class 走地鸡(走地鸡名称: String, 走地鸡大小: Int) case class 蛋糕(蛋糕名称: String, 蛋糕大小: Int) case class 神坛(苹果: 苹果, 桃: 桃, 鸡: 鸡, 鹅: 鹅, 走地鸡: 走地鸡, 蛋糕: 蛋糕) trait 水果 { type Init[T] = EncoderInit[T, 水果] type Init1[T] = DecoderInit[T, 水果] implicit def implicit1: Init[苹果] = EncoderInit(deriveEncoder) implicit def implicit2: Init1[苹果] = DecoderInit(deriveDecoder) implicit def implicit3: Init[桃] = EncoderInit(deriveEncoder) implicit def implicit4: Init1[桃] = DecoderInit(deriveDecoder) } object 水果 extends 水果 trait 禽类 { object implicitI extends ImportCompanion[神坛包裹类 :: HNil] import implicitI._ type Init[T] = EncoderInit[T, 禽类] type Init1[T] = DecoderInit[T, 禽类] implicit def implicit1: Init[鸡] = EncoderInit(deriveEncoder) implicit def implicit2: Init1[鸡] = DecoderInit(deriveDecoder) implicit def implicit3: Init[鹅] = EncoderInit(deriveEncoder) implicit def implicit4: Init1[鹅] = DecoderInit(deriveDecoder) implicit def implicit5: Init[走地鸡] = EncoderInit(deriveEncoder) implicit def implicit6: Init1[走地鸡] = DecoderInit(deriveDecoder) } object 禽类 extends 禽类 trait 蛋糕类 { type Init[T] = EncoderInit[T, 蛋糕类] type Init1[T] = DecoderInit[T, 蛋糕类] implicit def implicit1: Init[蛋糕] = EncoderInit(deriveEncoder) implicit def implicit2: Init1[蛋糕] = DecoderInit(deriveDecoder) } object 蛋糕类 extends 蛋糕类 trait 神坛包裹类 { object implicitI extends ImportCompanion[水果 :: 禽类 :: 蛋糕类 :: HNil] import implicitI._ type Init[T] = EncoderInit[T, 神坛包裹类] type Init1[T] = DecoderInit[T, 神坛包裹类] implicit def implicit1: Init[神坛] = EncoderInit(deriveEncoder) implicit def implicit2: Init1[神坛] = DecoderInit(deriveDecoder) } object 神坛包裹类 extends 神坛包裹类
djx314/ubw
raw03-祭祀日常小百科/src/main/scala/Prepare.scala
Scala
bsd-3-clause
2,248
package is package solidninja package scalatest import cats.effect._ /** * Functions that help testing purely functional code in ScalaTest */ trait Fs2Spec { def io(t: IO[Unit]) = t.unsafeRunSync() }
solidninja/openshift-scala-api
src/test/scala/is/solidninja/scalatest/Fs2Spec.scala
Scala
mit
210
package im.tox.antox.tox /** * Trait for Tox interval times */ trait Intervals { /** * Returns how many milliseconds should be used as an interval between tox iterations */ def interval: Int } /** * An enumeration to store the different possible levels the app can be 'working' at. * Enumeration makes it extensible */ object IntervalLevels extends Enumeration { type IntervalLevels = Value val WORKING = Value(50) val AWAKE = Value(1000) }
Ansa89/Antox
app/src/main/scala/im/tox/antox/tox/Intervals.scala
Scala
gpl-3.0
464
package se.ramn.bottfarmen.example.wren import collection.JavaConverters._ import collection.immutable.Seq import se.ramn.bottfarmen.api.BotCommander import se.ramn.bottfarmen.api.GameState import se.ramn.bottfarmen.api import se.ramn.bottfarmen.api.Command import se.ramn.bottfarmen.api.Move import se.ramn.bottfarmen.example.BaseBot import se.ramn.bottfarmen.example.BaseCommander class WrenBot(var underlying: api.Bot) extends BaseBot { case class Pos(x: Int, y: Int) var gameState: GameState = null var myPos: Pos = null def enemyInRange: Boolean = { println(enemiesInSight) true } def possibleMoves = Map( (Pos(myPos.x+1, myPos.y), 'e'), (Pos(myPos.x-1, myPos.y), 'w'), (Pos(myPos.x, myPos.y-1), 'n'), (Pos(myPos.x, myPos.y+1), 's') ) def moveIsValid(newPos: Pos) = { if(gameState.terrain.split("\\n")(newPos.y)(newPos.x) != '~') true else false } def validMoves = possibleMoves.filter(p => moveIsValid(p._1)).map(_._2) def isAlive = underlying.hitpoints > 0 def nextRandomDir = validMoves.toSeq(util.Random.nextInt(validMoves.toSeq.length)) def update(gameState: GameState) = { myPos = Pos(col, row) this.gameState = gameState enemyInRange println(validMoves.toString()) Move(id, nextRandomDir) } } class WrenCommander extends BaseCommander[WrenBot] { val name = "WrenCommander" override def makeBot(serverSideBot: api.Bot) = new WrenBot(serverSideBot) override def selectCommands(gameState: GameState): Seq[Command] = { val livingBots = bots.filter(_.isAlive) livingBots.toList.map { bot => bot.update(gameState) } } }
ramn/bottfarmen
common/src/main/scala/example/wren/WrenBot.scala
Scala
gpl-3.0
1,658
/* * Copyright 2016 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mongodb.scala.gridfs import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, InputStream} import javax.xml.bind.DatatypeConverter import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.io.Source import scala.util.Try import org.bson.{BsonArray, BsonBinary, BsonInt32} import org.mongodb.scala._ import org.mongodb.scala.bson.collection.mutable import org.mongodb.scala.bson.{BsonDocument, BsonInt64, BsonObjectId, BsonString} import org.mongodb.scala.gridfs.helpers.AsyncStreamHelper import org.scalatest.Inspectors.forEvery class GridFSSpec extends RequiresMongoDBISpec { private val filesCollectionName = "fs.files" private val chunksCollectionName = "fs.chunks" lazy val files = new File(getClass.getResource("/gridfs-tests").toURI).listFiles.filter(_.getName.endsWith(".json")) var gridFSBucket: Option[GridFSBucket] = None var filesCollection: Option[MongoCollection[Document]] = None var chunksCollection: Option[MongoCollection[Document]] = None forEvery (files) { (file: File) => s"Running ${file.getName} tests" should "pass all scenarios" in withDatabase(databaseName) { database => gridFSBucket = Some(GridFSBucket(database)) filesCollection = Some(database.getCollection(filesCollectionName)) chunksCollection = Some(database.getCollection(chunksCollectionName)) val definition = BsonDocument(Source.fromFile(file).getLines.mkString) val data = definition.getDocument("data") val tests = definition.getArray("tests").asScala.map(_.asDocument()) forEvery(tests) { (test: BsonDocument) => info(test.getString("description").getValue) val arrange: BsonDocument = test.getDocument("arrange", BsonDocument()) val action: BsonDocument = test.getDocument("act", BsonDocument()) val assertion: BsonDocument = test.getDocument("assert", BsonDocument()) arrangeGridFS(data, arrange) actionGridFS(action, assertion) } } } // scalastyle:off cyclomatic.complexity private def arrangeGridFS(data: BsonDocument, arrange: BsonDocument): Unit = { gridFSBucket.map(_.drop()).get.futureValue val filesDocuments: List[Document] = processFiles(data.getArray("files", new BsonArray)) val chunksDocuments: List[Document] = processChunks(data.getArray("chunks", new BsonArray)) if (filesDocuments.nonEmpty) filesCollection.map(_.insertMany(filesDocuments)).get.futureValue if (chunksDocuments.nonEmpty) chunksCollection.map(_.insertMany(chunksDocuments)).get.futureValue for (fileToArrange <- arrange.getArray("data", new BsonArray).asScala) { val document = fileToArrange.asDocument if (document.containsKey("delete") && document.containsKey("deletes")) { for (toDelete <- document.getArray("deletes").asScala) { val collection = document.getString("delete") match { case isFilesCollection(_) => filesCollection.get case isChunksCollection(_) => chunksCollection.get case x => throw new IllegalArgumentException(s"Unknown collection to delete: $x") } val query = toDelete.asDocument.getDocument("q") val limit: Int = toDelete.asDocument.getInt32("limit").getValue limit match { case 1 => collection.deleteOne(query).futureValue case _ => collection.deleteMany(query).futureValue } } } else if (document.containsKey("insert") && document.containsKey("documents")) { document.getString("insert") match { case isFilesCollection(_) => filesCollection.map(_.insertMany(processFiles(document.getArray("documents")))).get.futureValue case isChunksCollection(_) => chunksCollection.map(_.insertMany(processChunks(document.getArray("documents")))).get.futureValue case x => throw new IllegalArgumentException(s"Unknown collection to insert data into: $x") } } else if (document.containsKey("update") && document.containsKey("updates")) { val collection = document.getString("update") match { case isFilesCollection(_) => filesCollection.get case isChunksCollection(_) => chunksCollection.get case x => throw new IllegalArgumentException(s"Unknown collection to update: $x") } for (rawUpdate <- document.getArray("updates").asScala) { val query: Document = rawUpdate.asDocument.getDocument("q") val update: mutable.Document = mutable.Document(rawUpdate.asDocument.getDocument("u")) update.put("$set", parseHexDocument(update.get[BsonDocument]("$set").get)) collection.updateMany(query, update).head().futureValue } } else { throw new IllegalArgumentException("Unsupported arrange: " + document) } } } // scalastyle:on cyclomatic.complexity private def actionGridFS(action: BsonDocument, assertion: BsonDocument) { if (!action.isEmpty) { action.getString("operation").getValue match { case "delete" => doDelete(action.getDocument("arguments"), assertion) case "download" => doDownload(action.getDocument("arguments"), assertion) case "download_by_name" => doDownloadByName(action.getDocument("arguments"), assertion) case "upload" => doUpload(action.getDocument("arguments"), assertion) case x => throw new IllegalArgumentException(s"Unknown operation: $x") } } } private def doDelete(arguments: BsonDocument, assertion: BsonDocument) { val result = Try(gridFSBucket.map(_.delete(arguments.getObjectId("id").getValue)).get.futureValue) assertion.containsKey("error") match { case true => result should be a 'failure case false => result should be a 'success for (rawDataItem <- assertion.getArray("data").asScala) { val dataItem: BsonDocument = rawDataItem.asDocument for (deletedItem <- dataItem.getArray("deletes", new BsonArray).asScala) { val delete: String = dataItem.getString("delete", new BsonString("none")).getValue val id: BsonObjectId = delete match { case "expected.files" => deletedItem.asDocument.getDocument("q").getObjectId("_id") case "expected.chunks" => deletedItem.asDocument.getDocument("q").getObjectId("files_id") } val filesCount: Long = getFilesCount(new BsonDocument("_id", id)) val chunksCount: Long = getChunksCount(new BsonDocument("files_id", id)) filesCount should equal(0) chunksCount should equal(0) } } } } private def doDownload(arguments: BsonDocument, assertion: BsonDocument): Unit = { val outputStream: ByteArrayOutputStream = new ByteArrayOutputStream val result = Try(gridFSBucket.map(_.downloadToStream(arguments.getObjectId("id").getValue, AsyncStreamHelper.toAsyncOutputStream(outputStream)).head()).get.futureValue) outputStream.close() assertion.containsKey("error") match { case true => result should be a 'failure case false => result should be a 'success DatatypeConverter.printHexBinary(outputStream.toByteArray).toLowerCase should equal(assertion.getDocument("result").getString("$hex").getValue) } } private def doDownloadByName(arguments: BsonDocument, assertion: BsonDocument) { val outputStream: ByteArrayOutputStream = new ByteArrayOutputStream val options: GridFSDownloadOptions = new GridFSDownloadOptions() Option(arguments.get("options")).map(opts => options.revision(opts.asDocument().getInt32("revision").getValue)) val result = Try(gridFSBucket.map(_.downloadToStream(arguments.getString("filename").getValue, AsyncStreamHelper.toAsyncOutputStream(outputStream), options).head()).get.futureValue) outputStream.close() assertion.containsKey("error") match { case true => result should be a 'failure case false => result should be a 'success DatatypeConverter.printHexBinary(outputStream.toByteArray).toLowerCase should equal(assertion.getDocument("result").getString("$hex").getValue) } } //scalastyle:off method.length private def doUpload(rawArguments: BsonDocument, assertion: BsonDocument) { val arguments: BsonDocument = parseHexDocument(rawArguments, "source") val filename: String = arguments.getString("filename").getValue val inputStream: InputStream = new ByteArrayInputStream(arguments.getBinary("source").getData) val rawOptions: Document = arguments.getDocument("options", new BsonDocument()) val options: GridFSUploadOptions = new GridFSUploadOptions() rawOptions.get[BsonInt32]("chunkSizeBytes").map(chunkSize => options.chunkSizeBytes(chunkSize.getValue)) rawOptions.get[BsonDocument]("metadata").map(doc => options.metadata(doc)) val result = Try(gridFSBucket.map(_.uploadFromStream(filename, AsyncStreamHelper.toAsyncInputStream(inputStream), options).head()).get.futureValue) assertion.containsKey("error") match { case true => result should be a 'failure /* // We don't need to read anything more so don't see the extra chunk if (!assertion.getString("error").getValue == "ExtraChunk") assertNotNull("Should have thrown an exception", error) */ case false => result should be a 'success val objectId = result.get for (rawDataItem <- assertion.getArray("data", new BsonArray).asScala) { val dataItem: BsonDocument = rawDataItem.asDocument val insert: String = dataItem.getString("insert", new BsonString("none")).getValue insert match { case "expected.files" => val documents: List[Document] = processFiles(dataItem.getArray("documents", new BsonArray)) getFilesCount(new BsonDocument) should equal(documents.size) val actual: Document = filesCollection.map(_.find().first().head()).get.futureValue for (expected <- documents) { expected.get("length") should equal(actual.get("length")) expected.get("chunkSize") should equal(actual.get("chunkSize")) expected.get("md5") should equal(actual.get("md5")) expected.get("filename") should equal(actual.get("filename")) if (expected.contains("metadata")) expected.get("metadata") should equal(actual.get("metadata")) } case "expected.chunks" => val documents: List[Document] = processChunks(dataItem.getArray("documents", new BsonArray)) getChunksCount(new BsonDocument) should equal(documents.size) val actualDocuments: Seq[Document] = chunksCollection.map(_.find()).get.futureValue for ((expected, actual) <- documents zip actualDocuments) { new BsonObjectId(objectId) should equal(actual.get[BsonObjectId]("files_id").get) expected.get("n") should equal(actual.get("n")) expected.get("data") should equal(actual.get("data")) } } } } } //scalastyle:on method.length private def getChunksCount(filter: BsonDocument): Long = chunksCollection.map(col => col.count(filter).head()).get.futureValue private def getFilesCount(filter: BsonDocument): Long = filesCollection.map(col => col.count(filter).head()).get.futureValue private def processFiles(bsonArray: BsonArray): List[Document] = { val documents = ListBuffer[Document]() for (rawDocument <- bsonArray.getValues.asScala) { if (rawDocument.isDocument) { val document: BsonDocument = rawDocument.asDocument if (document.get("length").isInt32) document.put("length", BsonInt64(document.getInt32("length").getValue)) if (document.containsKey("metadata") && document.getDocument("metadata").isEmpty) document.remove("metadata") if (document.containsKey("aliases") && document.getArray("aliases").getValues.size == 0) document.remove("aliases") if (document.containsKey("contentType") && document.getString("contentType").getValue.length == 0) document.remove("contentType") documents += document } } documents.toList } private def processChunks(bsonArray: BsonArray): List[Document] = { val documents = ListBuffer[Document]() for (rawDocument <- bsonArray.getValues.asScala) { if (rawDocument.isDocument) documents += parseHexDocument(rawDocument.asDocument) } documents.toList } private def parseHexDocument(document: BsonDocument): BsonDocument = parseHexDocument(document, "data") private def parseHexDocument(document: BsonDocument, hexDocument: String): BsonDocument = { if (document.contains(hexDocument) && document.get(hexDocument).isDocument) { val bytes: Array[Byte] = DatatypeConverter.parseHexBinary(document.getDocument(hexDocument).getString("$hex").getValue) document.put(hexDocument, new BsonBinary(bytes)) } document } private object isFilesCollection { def unapply(name: BsonString): Option[Boolean] = if (name.getValue == filesCollectionName) Some(true) else None } private object isChunksCollection { def unapply(name: BsonString): Option[Boolean] = if (name.getValue == chunksCollectionName) Some(true) else None } }
jCalamari/mongo-scala-driver
driver/src/it/scala/org/mongodb/scala/gridfs/GridFSSpec.scala
Scala
apache-2.0
14,029
package pfennig import simulacrum._ import scala.language.{implicitConversions,higherKinds} @typeclass trait Sampleable[F[_]] { @op("sample") def sample[A](distribution: F[A])(randomness: Randomness): A }
noelwelsh/pfennig
src/main/scala/pfennig/Sampleable.scala
Scala
apache-2.0
209
package org.hammerlab.guacamole.filters import org.apache.commons.math3.util.ArithmeticUtils.binomialCoefficientLog object FishersExactTest { /** Fisher's exact test, returned as a probability. */ def apply(totalA: Int, totalB: Int, conditionA: Int, conditionB: Int): Double = math.exp(asLog(totalA, totalB, conditionA, conditionB)) /** Fisher's exact test, returned as -1 * log base 10 probability (i.e. a positive number). */ def asLog10(totalA: Int, totalB: Int, conditionA: Int, conditionB: Int): Double = asLog(totalA, totalB, conditionA, conditionB) / Math.log(10) /** Fisher's exact test, returned as -1 * natural-log probability (i.e. a positive number). */ def asLog(totalA: Int, totalB: Int, conditionA: Int, conditionB: Int): Double = binomialCoefficientLog(totalA, conditionA) + binomialCoefficientLog(totalB, conditionB) - binomialCoefficientLog(totalA + totalB, conditionA + conditionB) }
hammerlab/guacamole
src/main/scala/org/hammerlab/guacamole/filters/FishersExactTest.scala
Scala
apache-2.0
944
package org.openurp.edu.eams.core.web.action import java.io.File import java.io.FileInputStream import java.io.IOException import java.io.InputStream import java.text.SimpleDateFormat import java.util.Calendar import java.util.Date import java.util.TreeMap import javax.servlet.http.HttpServletResponse import org.apache.commons.lang3.time.DateUtils import org.apache.poi.hssf.usermodel.HSSFWorkbook import org.apache.struts2.ServletActionContext import org.beangle.commons.collection.Collections import org.beangle.data.jpa.dao.OqlBuilder import org.beangle.data.model.Entity import org.beangle.commons.entity.metadata.Model import org.beangle.commons.lang.Arrays import org.beangle.commons.lang.Strings import org.beangle.commons.text.i18n.TextResource import org.beangle.commons.transfer.TransferListener import org.beangle.commons.transfer.TransferResult import org.beangle.commons.transfer.excel.ExcelItemReader import org.beangle.commons.transfer.excel.ExcelTemplateWriter import org.beangle.commons.transfer.exporter.Context import org.beangle.commons.transfer.exporter.DefaultPropertyExtractor import org.beangle.commons.transfer.exporter.Exporter import org.beangle.commons.transfer.exporter.PropertyExtractor import org.beangle.commons.transfer.exporter.TemplateExporter import org.beangle.commons.transfer.exporter.TemplateWriter import org.beangle.commons.transfer.importer.EntityImporter import org.beangle.commons.transfer.importer.MultiEntityImporter import org.beangle.commons.transfer.importer.listener.ImporterForeignerListener import org.beangle.commons.transfer.io.TransferFormat import org.beangle.commons.web.util.RequestUtils import org.beangle.ems.dictionary.service.CodeFixture import org.beangle.struts2.convention.route.Action import org.openurp.edu.base.Adminclass import org.openurp.edu.base.Direction import org.openurp.edu.base.Major import org.openurp.edu.base.Project import org.openurp.edu.base.Student import org.openurp.edu.base.StudentJournal import org.openurp.edu.eams.core.model.AdminclassBean import org.openurp.edu.eams.core.service.listener.AdminclassImportListener import com.google.gson.Gson import com.opensymphony.xwork2.ActionContext class AdminclassAction extends AdminclassSearchAction { var importerListeners: List[_ <: TransferListener] = Collections.newBuffer[Any] def edit(): String = { put("departments", getDeparts) put("educations", getEducations) put("stdTypes", getStdTypes) put("majors", baseInfoService.getBaseInfos(classOf[Major])) put("directions", baseInfoService.getBaseInfos(classOf[Direction])) if (getIntId("adminclass") == null) { put("adminclass", new AdminclassBean()) return forward() } put("adminclass", entityDao.get(classOf[Adminclass], getIntId("adminclass"))) forward() } def save(): String = { val adminclass = populateEntity(classOf[Adminclass], "adminclass") if (adminclass.isTransient) { adminclass.setCreatedAt(new Date()) } val cal = Calendar.getInstance cal.setTime(adminclass.getEffectiveAt) adminclass.setEffectiveAt(DateUtils.truncate(adminclass.getEffectiveAt, Calendar.DAY_OF_MONTH)) if (adminclass.getInvalidAt != null) { adminclass.setInvalidAt(DateUtils.truncate(adminclass.getInvalidAt, Calendar.DAY_OF_MONTH)) } adminclass.setUpdatedAt(new Date()) entityDao.saveOrUpdate(adminclass) redirect("search", "info.action.success") } def checkCode() { val newCode = get("newCode") val builder = OqlBuilder.from(classOf[Adminclass], "adminclass") builder.where("adminclass.code=:newCode", newCode) val adminclassList = entityDao.search(builder) if (adminclassList.size == 0) { getResponse.getWriter.append("0") } else { getResponse.getWriter.append("1") } getResponse.getWriter.flush() getResponse.getWriter.close() } def checkName() { val newName = get("newName") val builder = OqlBuilder.from(classOf[Adminclass], "adminclass") builder.where("adminclass.name=:newName", newName) val adminclassList = entityDao.search(builder) if (adminclassList.size == 0) { getResponse.getWriter.append("0") } else { getResponse.getWriter.append("1") } getResponse.getWriter.flush() getResponse.getWriter.close() } private def saveOrUpdate(adminClass: Adminclass): String = { if (!codeGenerator.isValidCode(adminClass.getCode)) { val code = codeGenerator.gen(new CodeFixture(adminClass)) if (codeGenerator.isValidCode(code)) { adminClass.setCode(code) } else { addMessage(getText("system.codeGen.failure")) return forward(new Action(this.getClass, "edit")) } } adminClass.setUpdatedAt(new java.sql.Date(System.currentTimeMillis())) if (!adminClass.isPersisted) { adminClass.setCreatedAt(new java.sql.Date(System.currentTimeMillis())) } onSave(adminClass) null } def batchUpdateStdCount(): String = { val adminclassIds = Strings.splitToInt(get("adminclassIds")) val adminclassList = entityDao.get(classOf[Adminclass], adminclassIds) for (adminclass <- adminclassList) { adminclass.setStdCount(adminclass.getStudents.size) entityDao.saveOrUpdate(adminclass) } redirect("search", "info.update.success") } def removeAdminclass(): String = { val adminclassList = entityDao.get(classOf[Adminclass], getIntIds("adminclass")) for (adminclass <- adminclassList if adminclass.getStudents.size > 0) { return redirect("search", "你选择了有学生的行政班,不能删除这样的行政班") } entityDao.remove(adminclassList) redirect("search", "info.delete.success") } def downloadAdminclassStdTemp(): String = { val context = new Context() context.put("format", TransferFormat.Xls) val exporter = new TemplateExporter() val response = ServletActionContext.getResponse val templateWriter = new ExcelTemplateWriter() if ("std" == get("templateType")) { templateWriter.setTemplate(getResource("template/excel/班级学生导入数据模版.xls")) } if ("adminclass" == get("templateType")) { templateWriter.setTemplate(getResource("template/excel/班级导入数据模版.xls")) } templateWriter.setOutputStream(response.getOutputStream) templateWriter.setContext(context) exporter.setWriter(templateWriter) response.setContentType("application/vnd.ms-excel;charset=GBK") var oldFileName = "" if ("std" == get("templateType")) { oldFileName = "班级学生导入数据模版.xls" } if ("adminclass" == get("templateType")) { oldFileName = "班级导入数据模版.xls" } val fileName = RequestUtils.encodeAttachName(ServletActionContext.getRequest, oldFileName) response.setHeader("Content-Disposition", "attachment;filename=" + fileName) exporter.setContext(context) exporter.transfer(new TransferResult()) null } protected override def getExportDatas(): Iterable[_] = { if ("std" == get("exportType")) { val ids = getIntIds("adminclass") val builder = OqlBuilder.from(classOf[Student], "student") builder.orderBy("student.adminclass.name").orderBy("student.code") .limit(null) if (Arrays.isEmpty(ids)) { val classQuery = OqlBuilder.from(classOf[Adminclass], "adminclass") populateConditions(classQuery) val classes = entityDao.search(classQuery) if (Collections.isEmpty(classes)) { return Collections.emptyList() } else { builder.where("student.adminclass in (:adminclasses)", classes) } } else { builder.where("student.adminclass.id in (:adminclasses)", ids) } entityDao.search(builder) } else { val ids = Strings.splitToInt(get("adminclassIds")) if (Arrays.isEmpty(ids)) { entityDao.search(getQueryBuilder.limit(null)) } else { entityDao.get(classOf[Adminclass], ids) } } } protected def getPropertyExtractor(): PropertyExtractor = { if ("std" == get("exportType")) { val adminclassPropertyExtractor = new AdminclassPropertyExtractor(getTextResource) val query = OqlBuilder.from(classOf[StudentJournal], "studentJournal") .where("beginOn <= :now and :now <= endOn", new Date()) val ids = ids("adminclass", classOf[Long]) val builder = OqlBuilder.from(classOf[Student], "student") builder.orderBy("student.adminclass.name").orderBy("student.code") .limit(null) if (Arrays.isEmpty(ids)) { val classQuery = getQueryBuilder.limit(null).asInstanceOf[OqlBuilder[Adminclass]] populateConditions(classQuery) val classes = entityDao.search(classQuery) if (Collections.isEmpty(classes)) { query.where("1=2") } else { query.where("studentJournal.std.adminclass in (:adminclasses)", classes) } } else { query.where("studentJournal.std.adminclass.id in (:adminclasses)", getIntIds("adminclass")) } val journals = entityDao.search(query) adminclassPropertyExtractor.setJournals(journals) adminclassPropertyExtractor } else { super.getPropertyExtractor } } protected def onSave(entity: Entity) { adminclassService.saveOrUpdate(entity.asInstanceOf[Adminclass]) } def genderStatistic(): String = { val id = get("ids").split(",") val deptMap = new TreeMap[String, Map[_,_]]() var majorMap: Map[String, Map[_,_]] = null var fieldMap: Map[String, ArrayList[Adminclass]] = null var list: ArrayList[Adminclass] = null val mwnum = new HashMap() val gender = Array.ofDim[Integer](id.length, 2) val major_field = Model.newInstance(classOf[Direction]).asInstanceOf[Direction] major_field.setCode("-1") major_field.setName(" ") for (i <- 0 until id.length) { val idl = java.lang.Integer.parseInt(id(i)) val ac = entityDao.get(classOf[Adminclass], idl).asInstanceOf[Adminclass] val s = ac.getStudents gender(i)(0) = 0 gender(i)(1) = 0 for (obj <- s) { val student = obj.asInstanceOf[Student] val journals = student.getJournals val date = new Date() var isInSchool = false var iterator = journals.iterator() while (iterator.hasNext) { val journal = iterator.next() if (date.before(journal.getEndOn) && date.after(journal.getBeginOn)) { isInSchool = journal.isInschool } } if (student.getGender.getName == "男" && isInSchool) { gender(i)(0) += 1 } else if (student.getGender.getName == "女" && isInSchool) { gender(i)(1) += 1 } } mwnum.put(ac.getCode, gender(i)) if (ac.department != null && ac.major != null) { val departmentcode = ac.department.getCode val majorcode = ac.major.getCode var majorfield = ac.direction if (deptMap.get(departmentcode) == null) { majorMap = new TreeMap[String, Map[_,_]]() fieldMap = new TreeMap[String, ArrayList[Adminclass]]() list = new ArrayList[Adminclass]() list.add(ac) if (majorfield == null) { fieldMap.put(major_field.getCode, list) } else { fieldMap.put(majorfield.getCode, list) } majorMap.put(majorcode, fieldMap) deptMap.put(departmentcode, majorMap) } else { majorMap = deptMap.get(departmentcode).asInstanceOf[Map[_,_]] if (majorMap.get(majorcode) == null) { fieldMap = new TreeMap[String, ArrayList[Adminclass]]() list = new ArrayList[Adminclass]() list.add(ac) if (majorfield == null) { fieldMap.put(major_field.getCode, list) } else { fieldMap.put(majorfield.getCode, list) } majorMap.put(majorcode, fieldMap) deptMap.put(departmentcode, majorMap) } else { fieldMap = majorMap.get(majorcode) if (majorfield == null) { majorfield = major_field } else { } if (fieldMap.get(majorfield.getCode) == null) { list = new ArrayList[Adminclass]() list.add(ac) fieldMap.put(majorfield.getCode, list) majorMap.put(majorcode, fieldMap) deptMap.put(departmentcode, majorMap) } else { list = fieldMap.get(majorfield.getCode) list.add(ac) fieldMap.put(majorfield.getCode, list) majorMap.put(majorcode, fieldMap) deptMap.put(departmentcode, majorMap) } } } } } val xueyuan = deptMap.keySet var sum = 0 var sum2 = 0 val renshu = new HashMap() val majornum = new HashMap() val iter = xueyuan.iterator() while (iter.hasNext) { val s = iter.next().asInstanceOf[String] val zyMap = deptMap.get(s) val zhuanye = zyMap.keySet val iter2 = zhuanye.iterator() while (iter2.hasNext) { val s2 = iter2.next().asInstanceOf[String] val zyfxMap = zyMap.get(s2).asInstanceOf[Map[_,_]] val zyfx = zyfxMap.keySet val iter3 = zyfx.iterator() while (iter3.hasNext) { val s3 = iter3.next().asInstanceOf[String] val l = zyfxMap.get(s3).asInstanceOf[List[_]] sum = sum + l.size sum2 = sum2 + l.size } majornum.put(s2, sum2) sum2 = 0 } renshu.put(s, sum) sum = 0 } put("deptmap", deptMap) put("majornum", majornum) put("renshu", renshu) put("mwnum", mwnum) forward() } protected def buildEntityImporter(): EntityImporter = { val upload = "importFile" try { val files = ActionContext.getContext.getParameters.get(upload).asInstanceOf[Array[File]] if (files == null || files.length < 1) { logger.error("cannot get {} file.", upload) } val fileName = get(upload + "FileName") val is = new FileInputStream(files(0)) if (fileName.endsWith(".xls")) { val wb = new HSSFWorkbook(is) if (wb.getNumberOfSheets < 1 || wb.getSheetAt(0).getLastRowNum == 0) { return null } val importer = new MultiEntityImporter() importer.setReader(new ExcelItemReader(wb, 1)) put("importer", importer) importer } else { throw new RuntimeException("donot support other format except excel") } } catch { case e: Exception => { logger.error("error", e) null } } } protected def configImporter(importer: EntityImporter) { val mimporter = importer.asInstanceOf[MultiEntityImporter] mimporter.addForeignedKeys("name") mimporter.addForeignedKeys("code") val template = get("templateType") if ("std" == template) { mimporter.addEntity("student", classOf[Student]) } if ("adminclass" == template) { mimporter.addEntity("adminclass", classOf[Adminclass]) } val l = new ImporterForeignerListener(entityDao) l.addForeigerKey("name") l.addForeigerKey("code") val project = entityDao.get(classOf[Project], getSession.get("projectId").asInstanceOf[java.lang.Integer]).asInstanceOf[Project] importer.addListener(l).addListener(new AdminclassImportListener(entityDao, template, project)) } def setClassStudentForm(): String = { val adminclassId = getInt("adminclassId") val adminclass = entityDao.get(classOf[Adminclass], adminclassId) put("adminclass", adminclass) val builder = OqlBuilder.from(classOf[Student], "student") builder.where("student.adminclass=:adminClass", adminclass) builder.orderBy("code") put("students", entityDao.search(builder)) forward() } def addClassStudentList(): String = { val builder = OqlBuilder.from(classOf[Student], "std") val student = populate(classOf[Student], "student") val adminclassId = getInt("adminclassId") if (getBoolean("frist") != null && getBoolean("frist")) { val adminclass = entityDao.get(classOf[Adminclass], adminclassId) student.setGrade(adminclass.grade) student.setDepartment(adminclass.department) student.setMajor(adminclass.major) student.setAdminclass(adminclass) builder.where("std.grade = :grade", adminclass.grade) builder.where("std.department.name = :department", adminclass.department.getName) builder.where("std.major.name = :major", adminclass.major.getName) } else { if (student.getCode != null) builder.where("std.code like :code", "%" + student.getCode + "%") if (student.getName != null) builder.where("std.name like :name", "%" + student.getName + "%") if (student.grade != null) builder.where("std.grade like :grade", "%" + student.grade + "%") if (student.department != null && student.department.getName != null) builder.where("std.department.name like :department", "%" + student.department.getName + "%") if (student.major != null && student.major.getName != null) builder.where("std.major.name like :major", "%" + student.major.getName + "%") if (student.getAdminclass != null && student.getAdminclass.getName != null) builder.where("std.adminclass.name like :adminclass", "%" + student.getAdminclass.getName + "%") } builder.where("std.adminclass is null or std.adminclass.id <> :adminClass", getInt("adminclassId")) builder.where("std.department in (:departments)", getDeparts) builder.where("std.education in (:educations)", getEducations) builder.orderBy("std.code desc") builder.limit(getPageLimit) val search = entityDao.search(builder) put("students", search) put("adminclassId", adminclassId) put("student", student) put("stdCodes", get("stdCodes")) forward() } def addClassStudent(): String = { var codes = get("stdCodes") if (Strings.isNotEmpty(codes)) { codes = codes.replaceAll("[\\\\s;,;]", ",").replaceAll(",,", ",") val projectId = getInt("student.project.id") val studentList = new ArrayList[Student]() val notAddCodes = new ArrayList[String]() val codeArr = Strings.split(codes) for (code <- codeArr) { val t = entityDao.get(classOf[Student], "code", code) var b = false if (Collections.isNotEmpty(t)) { val std = t.get(0) if (std.getProject.id == projectId && !studentList.contains(std)) { studentList.add(std) b = true } } else { val t1 = entityDao.search(OqlBuilder.from(classOf[Student], "c").where("c.name like :name", "%" + code.trim() + "%")) for (std <- t1 if std.getProject.id == projectId && !studentList.contains(std)) { studentList.add(std) b = true } } if (!b) { notAddCodes.add(code) } } put("studentList", studentList) put("notAddCodes", notAddCodes) } forward() } def saveClassStudent(): String = { val adminclassId = getInt("adminclassId") val adminclasses = new ArrayList[Adminclass]() val adminclassCur = entityDao.get(classOf[Adminclass], adminclassId) adminclasses.add(adminclassCur) val removeIds = get("studentRemoveIds") if (Strings.isNotEmpty(removeIds)) { val studentRemoveIds = Strings.split(removeIds) val stdIds = new ArrayList[Long]() for (studentRemoveId <- studentRemoveIds) { try { stdIds.add(java.lang.Long.parseLong(studentRemoveId)) } catch { case ex: Exception => } } val students = entityDao.get(classOf[Student], stdIds) for (student <- students) { val adminclass = student.getAdminclass if (adminclass == null || adminclass.id != adminclassId) { //continue } student.setAdminclass(null) entityDao.saveOrUpdate(student) entityDao.refresh(adminclass) adminclass.setStdCount(adminclass.getStudents.size) entityDao.saveOrUpdate(adminclass) } } val stdIds = get("studentIds") if (Strings.isNotEmpty(stdIds)) { val studentIds = Strings.split(stdIds) val students = entityDao.get(classOf[Student], Strings.transformToLong(studentIds)) val msg = new StringBuffer() var errorNum = 0 for (student <- students) { if (student.major != adminclassCur.major || student.direction != adminclassCur.direction) { msg.append("\\n失败:").append(student.getCode).append(" ") .append(student.getName) errorNum += 1 } else { val oriAdminclass = student.getAdminclass student.setAdminclass(adminclassCur) entityDao.saveOrUpdate(student) if (oriAdminclass != null) { entityDao.refresh(oriAdminclass) oriAdminclass.setStdCount(oriAdminclass.getStudents.size) entityDao.saveOrUpdate(oriAdminclass) } } } if (errorNum > 0) { getFlash.put("message", ("\\n不符合要求的学生 " + errorNum + ";分别是:") + msg) } } entityDao.refresh(adminclassCur) adminclassCur.setStdCount(adminclassCur.getStudents.size) entityDao.saveOrUpdate(adminclassCur) redirect(new Action(classOf[AdminclassAction], "setClassStudentForm", "&adminclassId=" + adminclassId), "info.save.success") } def getMajorDuration(): String = { val majorId = getInt("majorId") val start = getDate("start") val response = getResponse if (majorId == null || start == null) { response.setContentType("text/plain;charset=UTF-8") response.getWriter.write("") response.getWriter.close() return null } val major = entityDao.get(classOf[Major], majorId) val duration = major.getDuration if (major == null || duration == null) { response.setContentType("text/plain;charset=UTF-8") response.getWriter.write("") response.getWriter.close() return null } val mnum = (duration.floatValue() * 12).toInt val c = Calendar.getInstance c.setTime(start) c.add(Calendar.MONTH, mnum) val end = c.getTime val sdf = new SimpleDateFormat("yyyy-MM-dd") val result = Collections.newMap[Any] result.put("invalidOn", sdf.format(end)) result.put("duration", duration) response.setContentType("text/plain;charset=UTF-8") response.getWriter.write(new Gson().toJson(result)) response.getWriter.close() null } } class AdminclassPropertyExtractor(resource: TextResource) extends DefaultPropertyExtractor(resource) { var textResource: TextResource = _ var journals: List[StudentJournal] = _ def getPropertyValue(target: AnyRef, property: String): AnyRef = { val student = target.asInstanceOf[Student] if (property == "studentJournal.status") { val journal = searchJournal(student) if (journal != null) { return journal.getStatus.getName } "" } else { super.getPropertyValue(target, property) } } private def searchJournal(std: Student): StudentJournal = { journals.find(_.getStd.id == std.id).getOrElse(null) } }
openurp/edu-eams-webapp
web/src/main/scala/org/openurp/edu/eams/core/web/action/AdminclassAction.scala
Scala
gpl-3.0
23,443
package synahive.restapi.models.db import synahive.restapi.models.TokenEntity import synahive.restapi.utils.DatabaseConfig trait TokenEntityTable extends UserEntityTable with DatabaseConfig { import driver.api._ class Tokens(tag: Tag) extends Table[TokenEntity](tag, "tokens") { def id = column[Option[Long]]("id", O.PrimaryKey, O.AutoInc) def userId = column[Option[Long]]("user_id") def token = column[String]("token") def userFk = foreignKey("USER_FK", userId, users)(_.id, onUpdate = ForeignKeyAction.Restrict, onDelete = ForeignKeyAction.Cascade) def * = (id, userId, token) <> ((TokenEntity.apply _).tupled, TokenEntity.unapply) } protected val tokens = TableQuery[Tokens] }
synahive/synahive-server
src/main/scala/synahive/restapi/models/db/TokenEntityTable.scala
Scala
mit
717
package com.lucidchart.open.nark.controllers import com.lucidchart.open.nark.request.{AppFlash, AppAction, AuthAction, DashboardAction} import com.lucidchart.open.nark.views import com.lucidchart.open.nark.models.{GraphModel, DashboardModel} import com.lucidchart.open.nark.models.records.GraphType import com.lucidchart.open.nark.models.records.GraphAxisLabel import java.util.UUID import play.api.data.Form import play.api.data.Forms._ import com.lucidchart.open.nark.models.records.Graph import play.api.data.validation.Constraints class GraphsController extends AppController { private case class EditGraphSubmission(name: String, graphType: GraphType.Value, axisLabel: GraphAxisLabel.Value) private val editGraphForm = Form( mapping( "name" -> text.verifying(Constraints.minLength(1)), "type" -> number.verifying("Invalid graph type", x => GraphType.values.map(_.id).contains(x)).transform[GraphType.Value](GraphType(_), _.id), "axis" -> number.verifying("Invalid axis label", x => GraphAxisLabel.values.map(_.id).contains(x)).transform[GraphAxisLabel.Value](GraphAxisLabel(_), _.id) )(EditGraphSubmission.apply)(EditGraphSubmission.unapply) ) /** * Add a graph to a dashboard */ def add(dashboardId: UUID) = AuthAction.authenticatedUser { implicit user => DashboardAction.dashboardManagementAccess(dashboardId, user.id) { dashboard => AppAction { implicit request => val form = editGraphForm.fill(EditGraphSubmission("", GraphType.normal, GraphAxisLabel.auto)) Ok(views.html.graphs.add(form, dashboard)) } } } /** * Submit the add form */ def addSubmit(dashboardId: UUID) = AuthAction.authenticatedUser { implicit user => DashboardAction.dashboardManagementAccess(dashboardId, user.id) { dashboard => AppAction { implicit request => editGraphForm.bindFromRequest().fold( formWithErrors => { Ok(views.html.graphs.add(formWithErrors, dashboard)) }, data => { val oldGraphs = GraphModel.findGraphsByDashboardId(dashboard.id) val sort = if (oldGraphs.isEmpty) 0 else (oldGraphs.maxBy(_.sort).sort + 1) val graph = new Graph(data.name, dashboard.id, sort, data.graphType, data.axisLabel) GraphModel.createGraph(graph) Redirect(routes.TargetsController.add(graph.id)).flashing(AppFlash.success("Graph was added successfully.")) } ) } } } /** * Edit a graph */ def edit(graphId: UUID) = AuthAction.authenticatedUser { implicit user => DashboardAction.graphManagementAccess(graphId, user.id) { (dashboard, graph) => AppAction { implicit request => val form = editGraphForm.fill(EditGraphSubmission(graph.name, graph.typeGraph, graph.axisLabel)) Ok(views.html.graphs.edit(form, graph)) } } } /** * Submit the edit form */ def editSubmit(graphId: UUID) = AuthAction.authenticatedUser { implicit user => DashboardAction.graphManagementAccess(graphId, user.id) { (dashboard, graph) => AppAction { implicit request => editGraphForm.bindFromRequest().fold( formWithErrors => { Ok(views.html.graphs.edit(formWithErrors, graph)) }, data => { GraphModel.editGraph(graph.copy(name = data.name, typeGraph = data.graphType, axisLabel = data.axisLabel)) Redirect(routes.DashboardsController.manageGraphsAndTargets(dashboard.id)).flashing(AppFlash.success("Graph was updated successfully.")) } ) } } } /** * Activate a graph */ def activate(graphId: UUID) = AuthAction.authenticatedUser { implicit user => DashboardAction.graphManagementAccess(graphId, user.id, allowDeletedGraph = true) { (dashboard, graph) => AppAction { implicit request => GraphModel.editGraph(graph.copy(deleted = false)) Redirect(routes.DashboardsController.manageGraphsAndTargets(dashboard.id)).flashing(AppFlash.success("Graph was activated successfully.")) } } } /** * Deactivate a graph */ def deactivate(graphId: UUID) = AuthAction.authenticatedUser { implicit user => DashboardAction.graphManagementAccess(graphId, user.id) { (dashboard, graph) => AppAction { implicit request => GraphModel.editGraph(graph.copy(deleted = true)) Redirect(routes.DashboardsController.manageGraphsAndTargets(dashboard.id)).flashing(AppFlash.success("Graph was activated successfully.")) } } } } object GraphsController extends GraphsController
lucidsoftware/nark
app/com/lucidchart/open/nark/controllers/GraphsController.scala
Scala
apache-2.0
4,358
package com.stefansavev.randomprojections.implementation import com.stefansavev.randomprojections.datarepr.sparse.SparseVector object OptimizedHadamard { def multiplyIntoHelper(from: Int, input: Array[Double], output: Array[Double]): Unit = { var a = input(from) var b = input(from + 1) output(from) = a + b output(from + 1) = a - b a = input(from + 2) b = input(from + 3) output(from + 2) = a + b output(from + 3) = a - b a = output(from) b = output(from + 2) output(from) = a + b output(from + 2) = a - b a = output(from + 1) b = output(from + 3) output(from + 1) = a + b output(from + 3) = a - b } def mix2(j1: Int, j2: Int, output: Array[Double]): Unit = { val a = output(j1) val b = output(j2) output(j1) = a + b output(j2) = a - b } def mix(output: Array[Double]): Unit = { val a1 = output(0) val a2 = output(1) val a3 = output(2) val a4 = output(3) val a5 = output(4) val a6 = output(5) val a7 = output(6) val a8 = output(7) output(0) = a1 + a5 output(4) = a1 - a5 output(1) = a2 + a6 output(5) = a2 - a6 output(2) = a3 + a7 output(6) = a3 - a7 output(3) = a4 + a8 output(7) = a4 - a8 } def multiplyInto(dim: Int, input: Array[Double], output: Array[Double]): Unit = { multiplyIntoHelper(0, input, output) multiplyIntoHelper(4, input, output) mix(output) } } object HadamardUtils { //assume k is a power of 2 //TODO: make it work without k being a power of 2 val eps = 0.0001 def recurse(from: Int, to: Int, input: Array[Double], output: Array[Double]): Unit = { if (to - from == 1) { output(from) = input(from) } else if (to - from == 2) { val a = input(from) val b = input(from + 1) output(from) = a + b output(from + 1) = a - b } else if (to - from == 4) { var a = input(from) var b = input(from + 1) output(from) = a + b output(from + 1) = a - b a = input(from + 2) b = input(from + 3) output(from + 2) = a + b output(from + 3) = a - b a = output(from) b = output(from + 2) output(from) = a + b output(from + 2) = a - b a = output(from + 1) b = output(from + 3) output(from + 1) = a + b output(from + 3) = a - b } else { val mid = from + (to - from) / 2 recurse(from, mid, input, output) recurse(mid, to, input, output) var j1 = from var j2 = mid while (j1 < mid) { val a = output(j1) val b = output(j2) output(j1) = a + b output(j2) = a - b j1 += 1 j2 += 1 } } } object MaxAbsValue { //in some cases we put normalized data, but not in query time val V: Double = 0.0001 //need to add test cases because for large V this method does not work //0.01 } def argAbsValueMax(dim: Int, values: Array[Double]): Int = { //sometimes the values here come normalized and sometimes not, need to fix that var i = 0 var maxAbsValue = MaxAbsValue.V // 0.001 //try channging this after var sign = 1 var maxIdx = 2 * dim //last cell is reserved for empty while (i < dim) { //forcing split into two val v = values(i) if (v > maxAbsValue) { maxAbsValue = v sign = 1 maxIdx = i } else if (-v > maxAbsValue) { maxAbsValue = -v sign = -1 maxIdx = i } i += 1 } if (maxIdx != 2 * dim) { if (sign > 0) maxIdx else maxIdx + dim } else { maxIdx } } def constrainedArgAbsValueMax(dim: Int, values: Array[Double], availableData: Array[RandomTree]): Int = { //sometimes the values here come normalized and sometimes not, need to fix that var i = 0 var maxAbsValue = MaxAbsValue.V var sign = 1 var maxIdx = 2 * dim //last cell is reserved for empty while (i < dim) { //forcing split into two val v = values(i) if (v > maxAbsValue && availableData(i) != null) { maxAbsValue = v sign = 1 maxIdx = i } else if (-v > maxAbsValue && availableData(i + dim) != null) { maxAbsValue = -v sign = -1 maxIdx = i } i += 1 } if (maxIdx != 2 * dim) { if (sign > 0) maxIdx else maxIdx + dim } else { maxIdx } } def getAbsValue(dim: Int, values: Array[Double], prevBucketIndex: Int): Double = { if (prevBucketIndex >= 2 * dim) { 0.0 } else { val prevAbsMax = if (prevBucketIndex < dim) values(prevBucketIndex) else -values(prevBucketIndex - dim) prevAbsMax } } def nextArgAbsMax(dim: Int, values: Array[Double], prevBucketIndex: Int): Int = { val prevAbsMax = getAbsValue(dim, values, prevBucketIndex) //if (prevBucketIndex < values.length) values(prevBucketIndex) else -values(prevBucketIndex - values.length) var i = 0 var maxAbsValue = MaxAbsValue.V var sign = 1 var maxIdx = 2 * dim while (i < dim) { //forcing split into two val v = values(i) if (v > maxAbsValue && v < prevAbsMax) { maxAbsValue = v sign = 1 maxIdx = i } else if (-v > maxAbsValue && -v < prevAbsMax) { maxAbsValue = -v sign = -1 maxIdx = i } i += 1 } if (maxIdx != -1) { if (sign > 0) maxIdx else maxIdx + dim } else { maxIdx } } def largestPowerOf2(k: Int): Int = { var j = 0 var i = 1 while (i <= k) { j = i i = i + i } j } def isPowerOf2(k: Int): Boolean = { largestPowerOf2(k) == k } def multiplyInto(input: Array[Double], output: Array[Double]): Unit = { val k = largestPowerOf2(input.length) var i = 0 while (i < output.length) { output(i) = 0.0 i += 1 } recurse(0, k, input, output) } def multiplyInto(dim: Int, input: Array[Double], output: Array[Double]): Unit = { val k = dim var i = 0 while (i < dim) { output(i) = 0.0 i += 1 } recurse(0, k, input, output) } def normalizeOutput(dim: Int, output: Array[Double]): Unit = { val norm = Math.sqrt(dim) var i = 0 while (i < dim) { output(i) /= norm i += 1 } } def roundUp(dim: Int): Int = { val powOf2 = largestPowerOf2(dim) val k = if (powOf2 == dim) dim else 2 * powOf2 k } def roundDown(dim: Int): Int = { largestPowerOf2(dim) } def computeHadamardFeatures(signs: SparseVector, query: Array[Double], input: Array[Double], output: Array[Double]): Unit = { val dim = signs.dim //TODO: move to function (significant overlap with code in Signatures) var j = 0 while (j < input.length) { input(j) = 0.0 j += 1 } j = 0 while (j < signs.ids.length) { val index = signs.ids(j) val b = signs.values(j) val a = query(index) input(j) = a * b j += 1 } HadamardUtils.multiplyInto(input.length, input, output) } }
stefansavev/random-projections-at-berlinbuzzwords
src/main/scala/com/stefansavev/randomprojections/implementation/HadamardUtils.scala
Scala
apache-2.0
7,115
package org.powlab.jeye.decode.graph import org.powlab.jeye.decode.processor.comparison.ComparisonInformator import org.powlab.jeye.decode.graph.OpcodeDetails._ import org.powlab.jeye.decode.sids.Sid import org.powlab.jeye.decode.processor.control.GotoInstructionInformator._ import org.powlab.jeye.decode.processor.control.SwitchInstructionInformator.isSwitchNode import org.powlab.jeye.decode.processor.custom.CustomInformator.isSwitchChild /** * Перечисление возможных представлений инструкции goto. */ object GotoTypes { class GotoType(name: String) { override def toString = name } val GT_BREAK = new GotoType("break") val GT_BREAK_TO_LABEL = new GotoType("break to label") val GT_CONTINUE = new GotoType("continue") val GT_CONTINUE_TO_LABEL = new GotoType("continue to label") val GT_JUMP = new GotoType("goto") // TODO here: для инструкции switch по строке возможен переход из тела одного case в другой через оператор goto // TODO here: реализация отложена val GT_CROSS_JUMP = new GotoType("cross_jump") /** * Инструкция goto может быть представлена как break, continue или быть не значащей (jump) */ def detect(gotoNode: OpcodeNode, tree: OpcodeTree, strict: Boolean = true): GotoType = { // Если это не goto инструкция то и тип для нее неопределен if (!isGotoNode(gotoNode)) { return null } // Если sid отсутсвует, то это не значащая goto инструкция в составе некой группы if (tree.sid(gotoNode) == null) { return GT_JUMP } val selector = tree.selector val gsid = tree.sido(gotoNode) // Если узел goto не вложен ни в одну структуру, то это просто переход if (!gsid.hasParentSid) { return GT_JUMP } val gotoNextNode = tree.next(gotoNode) // проверяем если это case/default тогда реальный узел - следующий val targetNode = if (isSwitchChild(gotoNextNode)) { tree.next(gotoNextNode) } else { gotoNextNode } // Если goto указывает на последний незначащий узел, то это или jump тип или break val psid = gsid.parentSid var tsid = psid var gotoType = GT_JUMP var lastFlag = strict while (tsid != null && selector.contains(tsid.sid)) { val parentNode = selector.current(tsid.sid) // Если владелец goto инструкции - это switch, то переход всегда является break, при определенных условиях конечно if (isSwitchNode(parentNode) && gotoType == GT_JUMP) { if (targetNode == null) { return GT_BREAK } val targetSid = tree.sid(targetNode) if (!tsid.isParentFor(targetSid)) { gotoType = GT_BREAK } else if (isSwitchChild(targetNode)) { return GT_BREAK } } val parentDetails = tree.details(parentNode) // Флаг проверки, что рассматриваемый элемент всегда последний, используется для определения естественного перехода if (lastFlag && selector.contains(tsid.nextId) && !isSameById(parentNode, targetNode)) { lastFlag = false } // Если владелец goto инструкции - это цикл if (isCycleDetails(parentDetails)) { if (targetNode == null) { return GT_BREAK } // Если переход внутри цикла, то это просто переход val targetSid = tree.sid(targetNode) if (tsid.isParentFor(targetSid) && gotoType == GT_JUMP) { return GT_JUMP } // Если переход осуществляется на цикл то это continue if (isSameById(parentNode, tree.owner(targetNode))) { if (gotoType == GT_JUMP) { // если это естественный переход, то возвращаем jump, иначе continue if (lastFlag) { return GT_JUMP } return GT_CONTINUE } return GT_CONTINUE_TO_LABEL } // Если переход осуществляется за цикл, то это break if (gotoType == GT_JUMP) { gotoType = GT_BREAK if (tsid.nextId == targetSid) { return gotoType } } else { return GT_BREAK_TO_LABEL } } tsid = tsid.parentSid } gotoType } /** * Основная задача метода определить идентичность по id * 25 идентичен 25 * 25#2 идентичен 25#1 */ private def isSameById(node1: OpcodeNode, node2: OpcodeNode): Boolean = { if (node1 == null) { return false } if (node2 == null) { return false } if (node1.id == node2.id) { return true } val sharp1 = node1.id.indexOf('#') val sharp2 = node2.id.indexOf('#') return sharp1 != -1 && sharp1 == sharp2 && node1.id.substring(0, sharp1) == node2.id.substring(0, sharp2) } }
powlab/jeye
src/main/scala/org/powlab/jeye/decode/graph/GotoTypes.scala
Scala
apache-2.0
5,506
package nars.gui import java.awt._ import java.awt.event._ import nars.entity.Concept import nars.storage.Memory //remove if not needed import scala.collection.JavaConversions._ /** * Window accept a Term, then display the content of the corresponding Concept */ class TermWindow(var memory: Memory) extends NarsFrame("Term Window") with ActionListener { /** Display label */ private var termLabel: Label = new Label("Term:", Label.RIGHT) /** Input field for term name */ private var termField: TextField = new TextField("") /** Control buttons */ private var playButton: Button = new Button("Show") private var hideButton: Button = new Button("Hide") // super("Term Window") setBackground(NarsFrame.SINGLE_WINDOW_COLOR) val gridbag = new GridBagLayout() val c = new GridBagConstraints() setLayout(gridbag) c.ipadx = 3 c.ipady = 3 c.insets = new Insets(5, 5, 5, 5) c.fill = GridBagConstraints.BOTH c.gridwidth = 1 c.weightx = 0.0 c.weighty = 0.0 termLabel.setBackground(NarsFrame.SINGLE_WINDOW_COLOR) gridbag.setConstraints(termLabel, c) add(termLabel) c.weightx = 1.0 gridbag.setConstraints(termField, c) add(termField) c.weightx = 0.0 playButton.addActionListener(this) gridbag.setConstraints(playButton, c) add(playButton) hideButton.addActionListener(this) gridbag.setConstraints(hideButton, c) add(hideButton) setBounds(400, 0, 400, 100) /** * Handling button click * @param e The ActionEvent */ def actionPerformed(e: ActionEvent) { val b = e.getSource.asInstanceOf[Button] if (b == playButton) { val concept = memory.nameToConcept(termField.getText.trim()) if (concept != null) { concept.startPlay(true) } } else if (b == hideButton) { close() } } private def close() { setVisible(false) } override def windowClosing(arg0: WindowEvent) { close() } }
automenta/opennars
nars_scala/src/main/scala/nars/gui/TermWindow.scala
Scala
gpl-2.0
1,956
package com.github.cuzfrog.tool.bmt import java.io.File import java.nio.charset.Charset import com.typesafe.config.Config import com.github.cuzfrog.maila.Mail /** * This class takes care of reading and parsing file that contains mails infomation. * * Created by cuz on 2016-08-09. */ private[bmt] class FileMails(config: Config, mailsPath: String) { private val encoding: String = config.getString("encoding").toLowerCase match { case "default" => Charset.defaultCharset.displayName case c => c } private val toHead = config.getString("head.to") private val toSubject = config.getString("head.subject") private val toText = config.getString("head.text") private val delimiter = config.getString("delimiter") private val bufferedSource = io.Source.fromFile(new File(mailsPath))(encoding) private val allRaw = try { bufferedSource.getLines.toList } finally { bufferedSource.close } private val RegexString = """"(.*)"""".r //(?=([^"]*"[^"]*")*[^"]*$) does not escape quote" private val all = allRaw.map(_.split(s"""$delimiter(?=(([^"]|(\\\\"))*"([^"]|(\\\\"))*")*([^"]|(\\\\"))*$$)""", -1).map { case RegexString(s) => s case os => os.trim }) private val heads = all.head //println(s"headers: ${heads.mkString("|")}") private val mailList = all.tail.map { m => (heads zip m).toMap } val mails = mailList.map { m => Mail(List(m(toHead)), m(toSubject), StringContext.treatEscapes(m(toText))) } }
cuzfrog/maila
bmt/src/main/scala/com/github/cuzfrog/tool/bmt/FileMails.scala
Scala
apache-2.0
1,466
// Copyright (C) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in project root for information. package com.microsoft.ml.spark import org.apache.spark.ml.feature.VectorAssembler import org.apache.spark.ml.linalg.DenseVector class EnsembleByKeySuite extends TestBase { test("Should work on Dataframes doubles or vectors") { val scoreDF = session.createDataFrame(Seq( (0, "foo", 1.0, .1), (1, "bar", 4.0, -2.0), (1, "bar", 0.0, -3.0))) .toDF("label1", "label2", "score1", "score2") val va = new VectorAssembler().setInputCols(Array("score1", "score2")).setOutputCol("v1") val scoreDF2 = va.transform(scoreDF) val t = new EnsembleByKey().setKey("label1").setCol("score1") val df1 = t.transform(scoreDF2) df1.printSchema() assert(df1.collect().map(r => (r.getInt(0), r.getDouble(1))).toSet === Set((1, 2.0), (0, 1.0))) val t2 = new EnsembleByKey().setKeys("label1", "label2").setCols("score1", "score2", "v1") val df2 = t2.transform(scoreDF2) val res2 = df2.select("mean(score1)", "mean(v1)").collect().map(r => (r.getDouble(0), r.getAs[DenseVector](1))) val true2 = Set( (2.0, new DenseVector(Array(2.0, -2.5))), (1.0, new DenseVector(Array(1.0, 0.1)))) assert(res2.toSet === true2) } test("should support collapsing or not") { val scoreDF = session.createDataFrame( Seq((0, "foo", 1.0, .1), (1, "bar", 4.0, -2.0), (1, "bar", 0.0, -3.0))) .toDF("label1", "label2", "score1", "score2") val va = new VectorAssembler().setInputCols(Array("score1", "score2")).setOutputCol("v1") val scoreDF2 = va.transform(scoreDF) val t = new EnsembleByKey().setKey("label1").setCol("score1").setCollapseGroup(false) val df1 = t.transform(scoreDF2) assert(df1.collect().map(r => (r.getInt(0), r.getDouble(5))).toSet === Set((1, 2.0), (0, 1.0))) assert(df1.count() == scoreDF.count()) df1.show() } test("should overwrite a column if instructed") { val scoreDF = session.createDataFrame( Seq((0, "foo", 1.0, .1), (1, "bar", 4.0, -2.0), (1, "bar", 0.0, -3.0))) .toDF("label1", "label2", "score1", "score2") val va = new VectorAssembler().setInputCols(Array("score1", "score2")).setOutputCol("v1") val scoreDF2 = va.transform(scoreDF) val t = new EnsembleByKey().setKey("label1").setCol("score1").setColName("score1").setCollapseGroup(false) val df1 = t.transform(scoreDF2) assert(scoreDF2.columns.toSet === df1.columns.toSet) } }
rastala/mmlspark
src/ensemble/src/test/scala/EnsembleByKeySuite.scala
Scala
mit
2,595
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.runtime.batch.sql.join import org.apache.flink.api.common.typeinfo.BasicTypeInfo._ import org.apache.flink.api.java.typeutils.RowTypeInfo import org.apache.flink.table.api.Types import org.apache.flink.table.planner.runtime.utils.{BatchTableEnvUtil, BatchTestBase, InMemoryLookupableTableSource} import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.{Before, Test} import java.lang.Boolean import java.util import scala.collection.JavaConversions._ @RunWith(classOf[Parameterized]) class LookupJoinITCase(isAsyncMode: Boolean) extends BatchTestBase { val data = List( BatchTestBase.row(1L, 12L, "Julian"), BatchTestBase.row(2L, 15L, "Hello"), BatchTestBase.row(3L, 15L, "Fabian"), BatchTestBase.row(8L, 11L, "Hello world"), BatchTestBase.row(9L, 12L, "Hello world!")) val dataWithNull = List( BatchTestBase.row(null, 15L, "Hello"), BatchTestBase.row(3L, 15L, "Fabian"), BatchTestBase.row(null, 11L, "Hello world"), BatchTestBase.row(9L, 12L, "Hello world!")) val typeInfo = new RowTypeInfo(LONG_TYPE_INFO, LONG_TYPE_INFO, STRING_TYPE_INFO) val userData = List( (11, 1L, "Julian"), (22, 2L, "Jark"), (33, 3L, "Fabian")) val userTableSource = InMemoryLookupableTableSource.builder() .data(userData) .field("age", Types.INT) .field("id", Types.LONG) .field("name", Types.STRING) .build() val userAsyncTableSource = InMemoryLookupableTableSource.builder() .data(userData) .field("age", Types.INT) .field("id", Types.LONG) .field("name", Types.STRING) .enableAsync() .build() val userDataWithNull = List( (11, 1L, "Julian"), (22, null, "Hello"), (33, 3L, "Fabian"), (44, null, "Hello world")) val userWithNullDataTableSource = InMemoryLookupableTableSource.builder() .data(userDataWithNull) .field("age", Types.INT) .field("id", Types.LONG) .field("name", Types.STRING) .build() val userAsyncWithNullDataTableSource = InMemoryLookupableTableSource.builder() .data(userDataWithNull) .field("age", Types.INT) .field("id", Types.LONG) .field("name", Types.STRING) .enableAsync() .build() var userTable: String = _ var userTableWithNull: String = _ @Before override def before() { super.before() BatchTableEnvUtil.registerCollection(tEnv, "T0", data, typeInfo, "id, len, content") val myTable = tEnv.sqlQuery("SELECT *, PROCTIME() as proctime FROM T0") tEnv.registerTable("T", myTable) BatchTableEnvUtil.registerCollection( tEnv, "T1", dataWithNull, typeInfo, "id, len, content") val myTable1 = tEnv.sqlQuery("SELECT *, PROCTIME() as proctime FROM T1") tEnv.registerTable("nullableT", myTable1) tEnv.registerTableSource("userTable", userTableSource) tEnv.registerTableSource("userAsyncTable", userAsyncTableSource) userTable = if (isAsyncMode) "userAsyncTable" else "userTable" tEnv.registerTableSource("userWithNullDataTable", userWithNullDataTableSource) tEnv.registerTableSource("userWithNullDataAsyncTable", userAsyncWithNullDataTableSource) userTableWithNull = if (isAsyncMode) "userWithNullDataAsyncTable" else "userWithNullDataTable" // TODO: enable object reuse until [FLINK-12351] is fixed. env.getConfig.disableObjectReuse() } @Test def testLeftJoinTemporalTableWithLocalPredicate(): Unit = { val sql = s"SELECT T.id, T.len, T.content, D.name, D.age FROM T LEFT JOIN $userTable " + "for system_time as of T.proctime AS D ON T.id = D.id " + "AND T.len > 1 AND D.age > 20 AND D.name = 'Fabian' " + "WHERE T.id > 1" val expected = Seq( BatchTestBase.row(2, 15, "Hello", null, null), BatchTestBase.row(3, 15, "Fabian", "Fabian", 33), BatchTestBase.row(8, 11, "Hello world", null, null), BatchTestBase.row(9, 12, "Hello world!", null, null)) checkResult(sql, expected, false) } @Test def testJoinTemporalTable(): Unit = { val sql = s"SELECT T.id, T.len, T.content, D.name FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON T.id = D.id" val expected = Seq( BatchTestBase.row(1, 12, "Julian", "Julian"), BatchTestBase.row(2, 15, "Hello", "Jark"), BatchTestBase.row(3, 15, "Fabian", "Fabian")) checkResult(sql, expected, false) } @Test def testJoinTemporalTableWithPushDown(): Unit = { val sql = s"SELECT T.id, T.len, T.content, D.name FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON T.id = D.id AND D.age > 20" val expected = Seq( BatchTestBase.row(2, 15, "Hello", "Jark"), BatchTestBase.row(3, 15, "Fabian", "Fabian")) checkResult(sql, expected, false) } @Test def testJoinTemporalTableWithNonEqualFilter(): Unit = { val sql = s"SELECT T.id, T.len, T.content, D.name, D.age FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON T.id = D.id WHERE T.len <= D.age" val expected = Seq( BatchTestBase.row(2, 15, "Hello", "Jark", 22), BatchTestBase.row(3, 15, "Fabian", "Fabian", 33)) checkResult(sql, expected, false) } @Test def testJoinTemporalTableOnMultiFields(): Unit = { val sql = s"SELECT T.id, T.len, D.name FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON T.id = D.id AND T.content = D.name" val expected = Seq( BatchTestBase.row(1, 12, "Julian"), BatchTestBase.row(3, 15, "Fabian")) checkResult(sql, expected, false) } @Test def testJoinTemporalTableOnMultiFieldsWithUdf(): Unit = { val sql = s"SELECT T.id, T.len, D.name FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON mod(T.id, 4) = D.id AND T.content = D.name" val expected = Seq( BatchTestBase.row(1, 12, "Julian"), BatchTestBase.row(3, 15, "Fabian")) checkResult(sql, expected, false) } @Test def testJoinTemporalTableOnMultiKeyFields(): Unit = { val sql = s"SELECT T.id, T.len, D.name FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON T.content = D.name AND T.id = D.id" val expected = Seq( BatchTestBase.row(1, 12, "Julian"), BatchTestBase.row(3, 15, "Fabian")) checkResult(sql, expected, false) } @Test def testLeftJoinTemporalTable(): Unit = { val sql = s"SELECT T.id, T.len, D.name, D.age FROM T LEFT JOIN $userTable " + "for system_time as of T.proctime AS D ON T.id = D.id" val expected = Seq( BatchTestBase.row(1, 12, "Julian", 11), BatchTestBase.row(2, 15, "Jark", 22), BatchTestBase.row(3, 15, "Fabian", 33), BatchTestBase.row(8, 11, null, null), BatchTestBase.row(9, 12, null, null)) checkResult(sql, expected, false) } @Test def testJoinTemporalTableOnMultiKeyFieldsWithNullData(): Unit = { val sql = s"SELECT T.id, T.len, D.name FROM nullableT T JOIN $userTableWithNull " + "for system_time as of T.proctime AS D ON T.content = D.name AND T.id = D.id" val expected = Seq( BatchTestBase.row(3,15,"Fabian")) checkResult(sql, expected, false) } @Test def testLeftJoinTemporalTableOnMultiKeyFieldsWithNullData(): Unit = { val sql = s"SELECT D.id, T.len, D.name FROM nullableT T LEFT JOIN $userTableWithNull " + "for system_time as of T.proctime AS D ON T.content = D.name AND T.id = D.id" val expected = Seq( BatchTestBase.row(null,15,null), BatchTestBase.row(3,15,"Fabian"), BatchTestBase.row(null,11,null), BatchTestBase.row(null,12,null)) checkResult(sql, expected, false) } @Test def testJoinTemporalTableOnNullConstantKey(): Unit = { val sql = s"SELECT T.id, T.len, T.content FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON D.id = null" val expected = Seq() checkResult(sql, expected, false) } @Test def testJoinTemporalTableOnMultiKeyFieldsWithNullConstantKey(): Unit = { val sql = s"SELECT T.id, T.len, D.name FROM T JOIN $userTable " + "for system_time as of T.proctime AS D ON T.content = D.name AND null = D.id" val expected = Seq() checkResult(sql, expected, false) } } object LookupJoinITCase { @Parameterized.Parameters(name = "isAsyncMode = {0}") def parameters(): util.Collection[Array[java.lang.Object]] = { Seq[Array[AnyRef]]( Array(Boolean.TRUE), Array(Boolean.FALSE) ) } }
bowenli86/flink
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/join/LookupJoinITCase.scala
Scala
apache-2.0
9,243
/** * Copyright (C) 2013 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.externalcontext import org.orbeon.oxf.pipeline.api.ExternalContext.Request import collection.JavaConverters._ object ExternalContextOps { implicit class RequestOps(request: Request) { // NOTE: Ideally would return immutable.Map def parameters: collection.Map[String, Array[AnyRef]] = request.getParameterMap.asScala def getFirstParamAsString(name: String) = Option(request.getParameterMap.get(name)) flatMap (_ collectFirst { case s: String ⇒ s }) def getFirstHeader(name: String) = Option(request.getHeaderValuesMap.get(name)) flatMap (_.lift(0)) } }
wesley1001/orbeon-forms
src/main/scala/org/orbeon/oxf/externalcontext/ExternalContextOps.scala
Scala
lgpl-2.1
1,267
package clide.nlp.assistant import java.util.Collections import clide.assistants.{AssistantControl, AssistantBehavior, Cursor} import clide.collaboration.{Annotations, Operation} import clide.models.{OpenedFile, SessionInfo} /** * If you override any existing methods do not forget to call their * super-class methods. */ trait ReconcilingAssistantBehavior[State,Chunk] extends AssistantBehavior with ReconcilerStateTracker[State,Chunk] { //type AnnotationRequest = (OpenedFile, State, Chunk, Int) /** * Return the assistant control that is passed to your AssistantServer. * Use `control` to access it later. */ protected def assistantControl: AssistantControl lazy val control: ReconcilingAssistantControl = new ReconcilingAssistantControl(assistantControl) /** * Return a IReconciler instance that defines the reconciling behavior of the * assistant */ def reconciler: IReconciler[State,Chunk] def reconcileChunkAtPoint(session: SessionInfo, file: OpenedFile, point: Int) = for { (newState, Some(chunk)) <- chunkAtPoint(file, point) } yield { val v = (file, newState, chunk, point) rememberLastAnnotationPoint(session, file, point) enqueueAnnotationRequest(v) } /** * Implementing classes should pass the given request their own annotation * processing pipeline. This method should return as fast as possible, so defer * processing the request for later. * * If desired, an annotation request can be safely ignored. */ def enqueueAnnotationRequest(request: AnnotationRequest) /** * @return the annotation streams the assistant can offer */ def streams: Map[String,String] def offerStreams(file: OpenedFile): Unit = streams.toSeq.sortBy(_._1).foreach { case (k, v) => control.offerAnnotations(file, k, Some(v)) } def fileOpened(file: OpenedFile): Unit = { track(file) offerStreams(file) } def fileActivated(file: OpenedFile): Unit = offerStreams(file) def fileClosed(file: OpenedFile): Unit = untrack(file) /** * called when a file in the assistants scope has been edited. * @param file the state of the file **after** the edit occured. * @param delta the operation that has been performed */ def fileChanged(file: OpenedFile, delta: Operation, cursors: Seq[Cursor]): Unit = { control.workOnFile(file) update(file, delta) for(cursor <- cursors) reconcileChunkAtPoint(cursor.owner, cursor.file, cursor.anchor) } /** * called when some active collaborator moved the cursor in some file that * belongs to the assistants scope. */ def cursorMoved(cursor: Cursor): Unit = { control.workOnFile(cursor.file) reconcileChunkAtPoint(cursor.owner, cursor.file, cursor.anchor) } /** * at least one client is interested in seeing the specified annotation stream */ def annotationsRequested(file: OpenedFile, name: String): Unit = { control.annotate(file, name, new Annotations()) control.subscribe(file, name) // Reannotate last chunk to make the annotation visible reannotateLastPoint(file) } /** * all clients dropped their interest in seeing the specified annotation stream */ def annotationsDisregarded(file: OpenedFile, name: String): Unit = { control.unsubscribe(file, name) // Reannotate last chunk to hide the annotation for all collaborators reannotateLastPoint(file) } def reannotateLastPoint(file: OpenedFile): Unit = lastAnnotationPoint match { case Some(last@(f, _, _, _)) if f.info.id == file.info.id => control.workOnFile(file) enqueueAnnotationRequest(last) case _ => Unit } }
t6/clide-nlp
src-scala/clide/nlp/assistant/ReconcilingAssistantBehavior.scala
Scala
lgpl-3.0
3,837
package org.coursera.courier.data import javax.annotation.Generated import com.linkedin.data.ByteString import com.linkedin.data.DataList import com.linkedin.data.DataMap import com.linkedin.data.schema.ArrayDataSchema import com.linkedin.data.schema.DataSchema import com.linkedin.data.template.DataTemplateUtil import com.linkedin.data.template.DataTemplate import org.coursera.courier.companions.ArrayCompanion import org.coursera.courier.templates.DataTemplates import org.coursera.courier.templates.DataTemplates.DataConversion import org.coursera.courier.templates.ScalaArrayTemplate import scala.collection.GenTraversable import scala.collection.JavaConverters._ import scala.collection.generic.CanBuildFrom import scala.collection.mutable import com.linkedin.data.template.Custom @Generated(value = Array("StringArray"), comments = "Courier Data Template.", date = "Fri Aug 14 14:51:38 PDT 2015") final class StringArray(private val dataList: DataList) extends IndexedSeq[String] with Product with GenTraversable[String] with DataTemplate[DataList] with ScalaArrayTemplate { override def length: Int = dataList.size() private[this] lazy val list = dataList.asScala.map(coerceInput) private[this] def coerceInput(any: AnyRef): String = { DataTemplateUtil.coerceOutput(any, classOf[java.lang.String]) } override def apply(idx: Int): String = list(idx) override def productElement(n: Int): Any = dataList.get(n) override def productArity: Int = dataList.size() override def schema(): DataSchema = StringArray.SCHEMA override def data(): DataList = dataList override def copy(): DataTemplate[DataList] = this override def copy(dataList: DataList, conversion: DataConversion): ScalaArrayTemplate = StringArray.build(dataList, conversion) } object StringArray extends ArrayCompanion[StringArray] { val SCHEMA = DataTemplateUtil.parseSchema("""{"type":"array","items":"string"}""").asInstanceOf[ArrayDataSchema] val empty = StringArray() def apply(elems: String*): StringArray = { new StringArray(new DataList(elems.map(coerceOutput).toList.asJava)) } def apply(collection: Traversable[String]): StringArray = { new StringArray(new DataList(collection.map(coerceOutput).toList.asJava)) } def build(dataList: DataList, conversion: DataConversion): StringArray = { new StringArray(DataTemplates.makeImmutable(dataList, conversion)) } def newBuilder = new DataBuilder() implicit val canBuildFrom = new CanBuildFrom[StringArray, String, StringArray] { def apply(from: StringArray) = new DataBuilder(from) def apply() = newBuilder } class DataBuilder(initial: StringArray) extends mutable.Builder[String, StringArray] { def this() = this(new StringArray(new DataList())) val elems = new DataList(initial.data()) def +=(x: String): this.type = { elems.add(coerceOutput(x)) this } def clear() = { elems.clear() } def result() = { elems.makeReadOnly() new StringArray(elems) } } private def coerceOutput(value: String): AnyRef = { DataTemplateUtil.coerceInput(value, classOf[java.lang.String], classOf[java.lang.String]) } implicit def wrap(traversable: Traversable[String]): StringArray = { StringArray(traversable) } }
coursera/courier
scala/runtime/src/main/scala/org/coursera/courier/data/StringArray.scala
Scala
apache-2.0
3,309
// scalac: -Ycheck-all-patmat object Test{ def foo(a: Int) = a match { case 5 => "Five!" case 42 => "The answer." } def bar(a: (Int, Int)) = a match { case (5, 5) => "Two fives!" case (42, 21) => "The answer and a half." } def baz(a: (Boolean, Boolean)) = a match { case (true, false) => "tf" case (false, true) => "ft" } }
dotty-staging/dotty
tests/patmat/t4526.scala
Scala
apache-2.0
366
// Copyright (c) 2016 Marco Marini, marco.marini@mmarini.org // // Licensed under the MIT License (MIT); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://opensource.org/licenses/MIT // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. package org.mmarini.actd import breeze.linalg.DenseVector /** * A learning agent that replies to stimulus with actions and learns by receiving rewards * * [[TDAgent]] extends Agent */ trait Agent { /** Returns the action to be taken in a state */ def action(status: Status): Int /** Returns a new agent that learns by the feedback */ def train(feedback: Feedback): (Agent, Double) }
m-marini/actd
src/main/scala/org/mmarini/actd/Agent.scala
Scala
mit
1,741
package io.udash.bootstrap.button import io.udash._ import io.udash.bootstrap._ import io.udash.bootstrap.utils.BootstrapStyles import io.udash.testing.UdashCoreFrontendTest import io.udash.wrappers.jquery._ class UdashButtonGroupTest extends UdashCoreFrontendTest { "UdashButtonGroup component" should { "render static buttons group" in { val buttons = UdashButtonGroup()( UdashButton()("Button 1").render, UdashButton()("Button 2").render, UdashButton()("Button 3").render ) val el = buttons.render el.childElementCount should be(3) el.textContent should be("Button 1Button 2Button 3") } "render reactive buttons group" in { val labels = SeqProperty(Seq("Button 1", "Button 2", "Button 3")) val buttons = UdashButtonGroup.reactive(labels) { (label, nested) => val btn = UdashButton()(n => n(bind(label))) nested(btn) btn.render } val el = buttons.render el.childElementCount should be(3) el.textContent should be("Button 1Button 2Button 3") labels.append("Button 4") el.childElementCount should be(4) el.textContent should be("Button 1Button 2Button 3Button 4") buttons.kill() ensureNoListeners(labels) } "render checkbox buttons group" in { val buttons = SeqProperty("Button 1", "Button 2", "Button 3") val selected = SeqProperty.blank[String] val group = UdashButtonGroup.checkboxes(selected, buttons)() val el = group.render val children: JQuery = jQ(el).children() el.childElementCount should be(3) el.textContent should be("Button 1Button 2Button 3") children.at(0).hasClass(BootstrapStyles.active.className) should be(false) children.at(1).hasClass(BootstrapStyles.active.className) should be(false) children.at(2).hasClass(BootstrapStyles.active.className) should be(false) selected.append("Button 1") selected.append("Button 2") el.childElementCount should be(3) el.textContent should be("Button 1Button 2Button 3") children.at(0).hasClass(BootstrapStyles.active.className) should be(true) children.at(1).hasClass(BootstrapStyles.active.className) should be(true) children.at(2).hasClass(BootstrapStyles.active.className) should be(false) buttons.append("Button 4") el.childElementCount should be(4) el.textContent should be("Button 1Button 2Button 3Button 4") val children4: JQuery = jQ(el).children() children4.at(0).hasClass(BootstrapStyles.active.className) should be(true) children4.at(1).hasClass(BootstrapStyles.active.className) should be(true) children4.at(2).hasClass(BootstrapStyles.active.className) should be(false) children4.at(3).hasClass(BootstrapStyles.active.className) should be(false) selected.append("Button 4") el.childElementCount should be(4) el.textContent should be("Button 1Button 2Button 3Button 4") children4.at(0).hasClass(BootstrapStyles.active.className) should be(true) children4.at(1).hasClass(BootstrapStyles.active.className) should be(true) children4.at(2).hasClass(BootstrapStyles.active.className) should be(false) children4.at(3).hasClass(BootstrapStyles.active.className) should be(true) group.kill() ensureNoListeners(buttons) ensureNoListeners(selected) } "render radio buttons group" in { val buttons = SeqProperty("Button 1", "Button 2", "Button 3") val selected = Property("") val group = UdashButtonGroup.radio(selected, buttons)() val el = group.render val children: JQuery = jQ(el).children() el.childElementCount should be(3) el.textContent should be("Button 1Button 2Button 3") children.at(0).hasClass(BootstrapStyles.active.className) should be(false) children.at(1).hasClass(BootstrapStyles.active.className) should be(false) children.at(2).hasClass(BootstrapStyles.active.className) should be(false) selected.set("Button 1") el.childElementCount should be(3) el.textContent should be("Button 1Button 2Button 3") children.at(0).hasClass(BootstrapStyles.active.className) should be(true) children.at(1).hasClass(BootstrapStyles.active.className) should be(false) children.at(2).hasClass(BootstrapStyles.active.className) should be(false) buttons.append("Button 4") el.childElementCount should be(4) el.textContent should be("Button 1Button 2Button 3Button 4") val children4: JQuery = jQ(el).children() children4.at(0).hasClass(BootstrapStyles.active.className) should be(true) children4.at(1).hasClass(BootstrapStyles.active.className) should be(false) children4.at(2).hasClass(BootstrapStyles.active.className) should be(false) children4.at(3).hasClass(BootstrapStyles.active.className) should be(false) selected.set("Button 4") el.childElementCount should be(4) el.textContent should be("Button 1Button 2Button 3Button 4") children4.at(0).hasClass(BootstrapStyles.active.className) should be(false) children4.at(1).hasClass(BootstrapStyles.active.className) should be(false) children4.at(2).hasClass(BootstrapStyles.active.className) should be(false) children4.at(3).hasClass(BootstrapStyles.active.className) should be(true) group.kill() ensureNoListeners(buttons) selected.listenersCount() should be(0) } } }
UdashFramework/udash-core
bootstrap4/.js/src/test/scala/io/udash/bootstrap/button/UdashButtonGroupTest.scala
Scala
apache-2.0
5,491
//: ---------------------------------------------------------------------------- //: Copyright (C) 2015 Verizon. All Rights Reserved. //: //: Licensed under the Apache License, Version 2.0 (the "License"); //: you may not use this file except in compliance with the License. //: You may obtain a copy of the License at //: //: http://www.apache.org/licenses/LICENSE-2.0 //: //: Unless required by applicable law or agreed to in writing, software //: distributed under the License is distributed on an "AS IS" BASIS, //: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //: See the License for the specific language governing permissions and //: limitations under the License. //: //: ---------------------------------------------------------------------------- package funnel package nginx case class Stats( connections: Long = 0, accepts: Long = 0, handled: Long = 0, requests: Long = 0, reading: Long = 0, writing: Long = 0, waiting: Long = 0 )
neigor/funnel
nginx/src/main/scala/Stats.scala
Scala
apache-2.0
1,009
package org.hokiegeek2.storm.extensions.io.client.config class ClientConfig(host : String,port : Int) { def getHost : String = { host } def getPort : Int = { port } }
hokiegeek2/storm-extensions
src/main/scala/org/hokiegeek2/storm/extensions/io/client/config/ClientConfig.scala
Scala
apache-2.0
190
package mesosphere.marathon.state import com.codahale.metrics.MetricRegistry import mesosphere.marathon.metrics.Metrics import mesosphere.marathon.state.PathId._ import mesosphere.marathon.{ MarathonConf, MarathonSpec, StoreCommandFailedException } import mesosphere.util.ThreadPoolContext import mesosphere.util.state.memory.InMemoryStore import mesosphere.util.state.{ PersistentEntity, PersistentStore } import org.mockito.Matchers._ import org.mockito.Mockito._ import org.rogach.scallop.ScallopConf import org.scalatest.Matchers import org.scalatest.concurrent.ScalaFutures._ import scala.collection.immutable.Seq import scala.concurrent._ import scala.concurrent.duration._ import scala.language.postfixOps class MarathonStoreTest extends MarathonSpec with Matchers { var metrics: Metrics = _ before { metrics = new Metrics(new MetricRegistry) } test("Fetch") { val state = mock[PersistentStore] val variable = mock[PersistentEntity] val now = Timestamp.now() val appDef = AppDefinition(id = "testApp".toPath, args = Some(Seq("arg")), versionInfo = AppDefinition.VersionInfo.forNewConfig(now)) val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() when(variable.bytes).thenReturn(appDef.toProtoByteArray) when(state.load("app:testApp")).thenReturn(Future.successful(Some(variable))) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.fetch("testApp") verify(state).load("app:testApp") assert(Some(appDef) == Await.result(res, 5.seconds), "Should return the expected AppDef") } test("FetchFail") { val state = mock[PersistentStore] when(state.load("app:testApp")).thenReturn(Future.failed(new StoreCommandFailedException("failed"))) val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.fetch("testApp") verify(state).load("app:testApp") intercept[StoreCommandFailedException] { Await.result(res, 5.seconds) } } test("Modify") { val state = mock[PersistentStore] val variable = mock[PersistentEntity] val now = Timestamp.now() val appDef = AppDefinition(id = "testApp".toPath, args = Some(Seq("arg")), versionInfo = AppDefinition.VersionInfo.forNewConfig(now)) val newAppDef = appDef.copy(id = "newTestApp".toPath) val newVariable = mock[PersistentEntity] val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() when(newVariable.bytes).thenReturn(newAppDef.toProtoByteArray) when(variable.bytes).thenReturn(appDef.toProtoByteArray) when(variable.withNewContent(any())).thenReturn(newVariable) when(state.load("app:testApp")).thenReturn(Future.successful(Some(variable))) when(state.update(newVariable)).thenReturn(Future.successful(newVariable)) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.modify("testApp") { _ => newAppDef } assert(newAppDef == Await.result(res, 5.seconds), "Should return the new AppDef") verify(state).load("app:testApp") verify(state).update(newVariable) } test("ModifyFail") { val state = mock[PersistentStore] val variable = mock[PersistentEntity] val appDef = AppDefinition(id = "testApp".toPath, args = Some(Seq("arg"))) val newAppDef = appDef.copy(id = "newTestApp".toPath) val newVariable = mock[PersistentEntity] val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() when(newVariable.bytes).thenReturn(newAppDef.toProtoByteArray) when(variable.bytes).thenReturn(appDef.toProtoByteArray) when(variable.withNewContent(any())).thenReturn(newVariable) when(state.load("app:testApp")).thenReturn(Future.successful(Some(variable))) when(state.update(newVariable)).thenReturn(Future.failed(new StoreCommandFailedException("failed"))) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.modify("testApp") { _ => newAppDef } intercept[StoreCommandFailedException] { Await.result(res, 5.seconds) } } test("Expunge") { val state = mock[PersistentStore] val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() when(state.delete("app:testApp")).thenReturn(Future.successful(true)) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.expunge("testApp") Await.ready(res, 5.seconds) verify(state).delete("app:testApp") } test("ExpungeFail") { val state = mock[PersistentStore] val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() when(state.delete("app:testApp")).thenReturn(Future.failed(new StoreCommandFailedException("failed"))) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.expunge("testApp") intercept[StoreCommandFailedException] { Await.result(res, 5.seconds) } } test("Names") { val state = new InMemoryStore val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() def populate(key: String, value: Array[Byte]) = { state.load(key).futureValue match { case Some(ent) => state.update(ent.withNewContent(value)).futureValue case None => state.create(key, value).futureValue } } populate("app:foo", Array()) populate("app:bar", Array()) populate("no_match", Array()) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.names() assert(Set("foo", "bar") == Await.result(res, 5.seconds).toSet, "Should return all application keys") } test("NamesFail") { val state = mock[PersistentStore] val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() when(state.allIds()).thenReturn(Future.failed(new StoreCommandFailedException("failed"))) val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") val res = store.names() whenReady(res.failed) { _ shouldBe a[StoreCommandFailedException] } } test("ConcurrentModifications") { val state = new InMemoryStore val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") store.store("foo", AppDefinition(id = "foo".toPath, instances = 0)).futureValue def plusOne() = { store.modify("foo") { f => val appDef = f() appDef.copy(instances = appDef.instances + 1) } } val results = for (_ <- 0 until 1000) yield plusOne() implicit val ec = ThreadPoolContext.context val res = Future.sequence(results) Await.ready(res, 5.seconds) assert(1000 == Await.result(store.fetch("foo"), 5.seconds).map(_.instances) .getOrElse(0), "Instances of 'foo' should be set to 1000") } // regression test for #1481 ignore("names() correctly uses timeouts") { val state = new InMemoryStore() { override def allIds(): Future[scala.Seq[ID]] = Future { synchronized { blocking(wait()) } Seq.empty } } val config = new ScallopConf(Seq("--master", "foo", "--marathon_store_timeout", "1")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") noException should be thrownBy { Await.result(store.names(), 1.second) } } // regression test for #1507 test("state.names() throwing exception is treated as empty iterator (ExecutionException without cause)") { val state = new InMemoryStore() { override def allIds(): Future[scala.Seq[ID]] = super.allIds() } val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") noException should be thrownBy { Await.result(store.names(), 1.second) } } class MyWeirdExecutionException extends ExecutionException("weird without cause") // regression test for #1507 test("state.names() throwing exception is treated as empty iterator (ExecutionException with itself as cause)") { val state = new InMemoryStore() { override def allIds(): Future[scala.Seq[ID]] = super.allIds() } val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") noException should be thrownBy { Await.result(store.names(), 1.second) } } test("state.names() throwing exception is treated as empty iterator (direct)") { val state = new InMemoryStore() { override def allIds(): Future[scala.Seq[ID]] = super.allIds() } val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") noException should be thrownBy { Await.result(store.names(), 1.second) } } test("state.names() throwing exception is treated as empty iterator (RuntimeException in ExecutionException)") { val state = new InMemoryStore() { override def allIds(): Future[scala.Seq[ID]] = super.allIds() } val config = new ScallopConf(Seq("--master", "foo")) with MarathonConf config.afterInit() val store = new MarathonStore[AppDefinition](state, metrics, () => AppDefinition(), "app:") noException should be thrownBy { Await.result(store.names(), 1.second) } } def registry: MetricRegistry = new MetricRegistry }
okuryu/marathon
src/test/scala/mesosphere/marathon/state/MarathonStoreTest.scala
Scala
apache-2.0
10,146
package com.programmaticallyspeaking.ncd.chrome.domains import com.programmaticallyspeaking.ncd.host.ObjectId import com.programmaticallyspeaking.ncd.testing.UnitTest class ScriptEvaluateSupportTest extends UnitTest { "wrapInFunction" - { "doesn't wrap if there are no call arguments" in { val functionDecl = "function () {}" val args = Seq.empty[Runtime.CallArgument] val wrapper = ScriptEvaluateSupport.wrapInFunction(functionDecl, args) wrapper should be ((functionDecl, Seq.empty)) } "doesn't wrap if there are only object arguments" in { val functionDecl = "function (arg) {}" val args = Seq(Runtime.CallArgument(None, None, Some(ObjectId("x").toString))) val wrapper = ScriptEvaluateSupport.wrapInFunction(functionDecl, args) wrapper should be ((functionDecl, Seq(ObjectId("x")))) } "wraps if there are only value arguments" in { val functionDecl = "function (arg) {}" val args = Seq(Runtime.CallArgument(Some("test"), None, None)) val wrapper = ScriptEvaluateSupport.wrapInFunction(functionDecl, args) val wf = """function() { | var argsInOrder=["test"]; | var f=(function (arg) {}); | return f.apply(this,argsInOrder); |} """.stripMargin.trim wrapper should be ((wf, Seq.empty)) } "wraps if there are only unserializable arguments" in { val functionDecl = "function (arg) {}" val args = Seq(Runtime.CallArgument(None, Some("NaN"), None)) val wrapper = ScriptEvaluateSupport.wrapInFunction(functionDecl, args) val wf = """function() { | var argsInOrder=[NaN]; | var f=(function (arg) {}); | return f.apply(this,argsInOrder); |} """.stripMargin.trim wrapper should be ((wf, Seq.empty)) } "supports a mix of object + value" in { val functionDecl = "function (a1, a2) {}" val args = Seq(Runtime.CallArgument(None, None, Some(ObjectId("x").toString)), Runtime.CallArgument(Some("test"), None, None)) val wrapper = ScriptEvaluateSupport.wrapInFunction(functionDecl, args) val wf = """function(__o_0) { | var argsInOrder=[__o_0,"test"]; | var f=(function (a1, a2) {}); | return f.apply(this,argsInOrder); |} """.stripMargin.trim wrapper should be ((wf, Seq(ObjectId("x")))) } "supports undefined as call argument" in { val functionDecl = "function (arg) {}" val args = Seq(Runtime.CallArgument(None, None, None)) val wrapper = ScriptEvaluateSupport.wrapInFunction(functionDecl, args) val wf = """function() { | var argsInOrder=[void 0]; | var f=(function (arg) {}); | return f.apply(this,argsInOrder); |} """.stripMargin.trim wrapper should be ((wf, Seq.empty)) } } }
provegard/ncdbg
src/test/scala/com/programmaticallyspeaking/ncd/chrome/domains/ScriptEvaluateSupportTest.scala
Scala
bsd-3-clause
2,939
package Pacman import Chisel._ class Net(layers: List[LayerData]) extends Module { val io = new Bundle { val ready = Bool().asOutput val start = Bool().asInput val xsIn = Vec .fill(layers(0).parameters.NumberOfCores)( Bits(width = layers(0).parameters.K)) .asInput val xsOut = Vec.fill(layers.last.parameters.NumberOfCores)(Bits(width = 1)).asOutput val xsOutValid = Bool().asOutput val done = Bool().asOutput val pipeReady = Bool().asInput } val warps = layers.map(layer => { Module(new Warp(layer)) }) val gearBoxes = layers.zip(layers.drop(1)).map { case (a, b) => { val gearBoxParameters = new GearBoxParameters(a.parameters, b.parameters) Module(new GearBox(gearBoxParameters)) } } // Hook up each warp to the gear box behind it warps.zip(gearBoxes).foreach { case (warp, gearBox) => { gearBox.io.xsIn := warp.io.xOut gearBox.io.validIn := warp.io.xOutValid gearBox.io.prevDone := warp.io.done gearBox.io.prevStart := warp.io.startOut warp.io.pipeReady := gearBox.io.ready } } // Hook up each warp to the gear box in front of it gearBoxes.zip(warps.drop(1)).foreach { case (gearBox, warp) => { warp.io.start := gearBox.io.startNext warp.io.xIn := gearBox.io.xsOut gearBox.io.nextReady := warp.io.ready } } // Hook up first warp to net io io.ready := warps(0).io.ready warps(0).io.start := io.start warps(0).io.xIn := io.xsIn // Hook up last warp to net io io.xsOut := warps.last.io.xOut io.xsOutValid := warps.last.io.xOutValid io.done := warps.last.io.done warps.last.io.pipeReady := io.pipeReady }
martinhath/bnn
src/main/scala/Net.scala
Scala
mit
1,701
/** * Copyright 2014 Dropbox, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package djinni import djinni.ast._ import java.io._ import djinni.generatorTools._ import djinni.meta._ import djinni.syntax.Error import djinni.writer.IndentWriter import scala.language.implicitConversions import scala.collection.mutable package object generatorTools { case class Spec( javaOutFolder: Option[File], javaPackage: Option[String], javaIdentStyle: JavaIdentStyle, javaCppException: Option[String], javaAnnotation: Option[String], javaNullableAnnotation: Option[String], javaNonnullAnnotation: Option[String], cppOutFolder: Option[File], cppHeaderOutFolder: Option[File], cppIncludePrefix: String, cppNamespace: String, cppIdentStyle: CppIdentStyle, cppFileIdentStyle: IdentConverter, cppOptionalTemplate: String, cppOptionalHeader: String, cppEnumHashWorkaround: Boolean, jniOutFolder: Option[File], jniHeaderOutFolder: Option[File], jniIncludePrefix: String, jniIncludeCppPrefix: String, jniNamespace: String, jniClassIdentStyle: IdentConverter, jniFileIdentStyle: IdentConverter, jniBaseLibIncludePrefix: String, cppExt: String, cppHeaderExt: String, objcOutFolder: Option[File], objcppOutFolder: Option[File], objcIdentStyle: ObjcIdentStyle, objcFileIdentStyle: IdentConverter, objcppExt: String, objcHeaderExt: String, objcIncludePrefix: String, objcppIncludePrefix: String, objcppIncludeCppPrefix: String, objcppIncludeObjcPrefix: String, objcppNamespace: String, objcBaseLibIncludePrefix: String, outFileListWriter: Option[Writer], skipGeneration: Boolean, yamlOutFolder: Option[File], yamlOutFile: Option[String], yamlPrefix: String) def preComma(s: String) = { if (s.isEmpty) s else ", " + s } def q(s: String) = '"' + s + '"' def firstUpper(token: String) = token.charAt(0).toUpper + token.substring(1) type IdentConverter = String => String case class CppIdentStyle(ty: IdentConverter, enumType: IdentConverter, typeParam: IdentConverter, method: IdentConverter, field: IdentConverter, local: IdentConverter, enum: IdentConverter, const: IdentConverter) case class JavaIdentStyle(ty: IdentConverter, typeParam: IdentConverter, method: IdentConverter, field: IdentConverter, local: IdentConverter, enum: IdentConverter, const: IdentConverter) case class ObjcIdentStyle(ty: IdentConverter, typeParam: IdentConverter, method: IdentConverter, field: IdentConverter, local: IdentConverter, enum: IdentConverter, const: IdentConverter) object IdentStyle { val camelUpper = (s: String) => s.split('_').map(firstUpper).mkString val camelLower = (s: String) => { val parts = s.split('_') parts.head + parts.tail.map(firstUpper).mkString } val underLower = (s: String) => s val underUpper = (s: String) => s.split('_').map(firstUpper).mkString("_") val underCaps = (s: String) => s.toUpperCase val prefix = (prefix: String, suffix: IdentConverter) => (s: String) => prefix + suffix(s) val javaDefault = JavaIdentStyle(camelUpper, camelUpper, camelLower, camelLower, camelLower, underCaps, underCaps) val cppDefault = CppIdentStyle(camelUpper, camelUpper, camelUpper, underLower, underLower, underLower, underCaps, underCaps) val objcDefault = ObjcIdentStyle(camelUpper, camelUpper, camelLower, camelLower, camelLower, camelUpper, camelUpper) val styles = Map( "FooBar" -> camelUpper, "fooBar" -> camelLower, "foo_bar" -> underLower, "Foo_Bar" -> underUpper, "FOO_BAR" -> underCaps) def infer(input: String): Option[IdentConverter] = { styles.foreach((e) => { val (str, func) = e if (input endsWith str) { val diff = input.length - str.length return Some(if (diff > 0) { val before = input.substring(0, diff) prefix(before, func) } else { func }) } }) None } } final case class SkipFirst() { private var first = true def apply(f: => Unit) { if (first) { first = false } else { f } } } case class GenerateException(message: String) extends java.lang.Exception(message) def createFolder(name: String, folder: File) { folder.mkdirs() if (folder.exists) { if (!folder.isDirectory) { throw new GenerateException(s"Unable to create $name folder at ${q(folder.getPath)}, there's something in the way.") } } else { throw new GenerateException(s"Unable to create $name folder at ${q(folder.getPath)}.") } } def generate(idl: Seq[TypeDecl], spec: Spec): Option[String] = { try { if (spec.cppOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("C++", spec.cppOutFolder.get) createFolder("C++ header", spec.cppHeaderOutFolder.get) } new CppGenerator(spec).generate(idl) } if (spec.javaOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("Java", spec.javaOutFolder.get) } new JavaGenerator(spec).generate(idl) } if (spec.jniOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("JNI C++", spec.jniOutFolder.get) createFolder("JNI C++ header", spec.jniHeaderOutFolder.get) } new JNIGenerator(spec).generate(idl) } if (spec.objcOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("Objective-C", spec.objcOutFolder.get) } new ObjcGenerator(spec).generate(idl) } if (spec.objcppOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("Objective-C++", spec.objcppOutFolder.get) } new ObjcppGenerator(spec).generate(idl) } if (spec.yamlOutFolder.isDefined) { if (!spec.skipGeneration) { createFolder("YAML", spec.yamlOutFolder.get) new YamlGenerator(spec).generate(idl) } } None } catch { case GenerateException(message) => Some(message) } } sealed abstract class SymbolReference case class ImportRef(arg: String) extends SymbolReference // Already contains <> or "" in C contexts case class DeclRef(decl: String, namespace: Option[String]) extends SymbolReference } abstract class Generator(spec: Spec) { protected val writtenFiles = mutable.HashMap[String,String]() protected def createFile(folder: File, fileName: String, makeWriter: OutputStreamWriter => IndentWriter, f: IndentWriter => Unit): Unit = { if (spec.outFileListWriter.isDefined) { spec.outFileListWriter.get.write(new File(folder, fileName).getPath + "\\n") } if (spec.skipGeneration) { return } val file = new File(folder, fileName) val cp = file.getCanonicalPath writtenFiles.put(cp.toLowerCase, cp) match { case Some(existing) => if (existing == cp) { throw GenerateException("Refusing to write \\"" + file.getPath + "\\"; we already wrote a file to that path.") } else { throw GenerateException("Refusing to write \\"" + file.getPath + "\\"; we already wrote a file to a path that is the same when lower-cased: \\"" + existing + "\\".") } case _ => } val fout = new FileOutputStream(file) try { val out = new OutputStreamWriter(fout, "UTF-8") f(makeWriter(out)) out.flush() } finally { fout.close() } } protected def createFile(folder: File, fileName: String, f: IndentWriter => Unit): Unit = createFile(folder, fileName, out => new IndentWriter(out), f) implicit def identToString(ident: Ident): String = ident.name val idCpp = spec.cppIdentStyle val idJava = spec.javaIdentStyle val idObjc = spec.objcIdentStyle def wrapNamespace(w: IndentWriter, ns: String, f: IndentWriter => Unit) { ns match { case "" => f(w) case s => val parts = s.split("::") w.wl(parts.map("namespace "+_+" {").mkString(" ")).wl f(w) w.wl w.wl(parts.map(p => "}").mkString(" ") + s" // namespace $s") } } def wrapAnonymousNamespace(w: IndentWriter, f: IndentWriter => Unit) { w.wl("namespace { // anonymous namespace") w.wl f(w) w.wl w.wl("} // end anonymous namespace") } def writeHppFileGeneric(folder: File, namespace: String, fileIdentStyle: IdentConverter)(name: String, origin: String, includes: Iterable[String], fwds: Iterable[String], f: IndentWriter => Unit, f2: IndentWriter => Unit) { createFile(folder, fileIdentStyle(name) + "." + spec.cppHeaderExt, (w: IndentWriter) => { w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!") w.wl("// This file generated by Djinni from " + origin) w.wl w.wl("#pragma once") if (includes.nonEmpty) { w.wl includes.foreach(w.wl) } w.wl wrapNamespace(w, namespace, (w: IndentWriter) => { if (fwds.nonEmpty) { fwds.foreach(w.wl) w.wl } f(w) } ) f2(w) }) } def writeCppFileGeneric(folder: File, namespace: String, fileIdentStyle: IdentConverter, includePrefix: String)(name: String, origin: String, includes: Iterable[String], f: IndentWriter => Unit) { createFile(folder, fileIdentStyle(name) + "." + spec.cppExt, (w: IndentWriter) => { w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!") w.wl("// This file generated by Djinni from " + origin) w.wl val myHeader = q(includePrefix + fileIdentStyle(name) + "." + spec.cppHeaderExt) w.wl(s"#include $myHeader // my header") includes.foreach(w.wl(_)) w.wl wrapNamespace(w, namespace, f) }) } def generate(idl: Seq[TypeDecl]) { for (td <- idl.collect { case itd: InternTypeDecl => itd }) td.body match { case e: Enum => assert(td.params.isEmpty) generateEnum(td.origin, td.ident, td.doc, e) case r: Record => generateRecord(td.origin, td.ident, td.doc, td.params, r) case i: Interface => generateInterface(td.origin, td.ident, td.doc, td.params, i) } } def generateEnum(origin: String, ident: Ident, doc: Doc, e: Enum) def generateRecord(origin: String, ident: Ident, doc: Doc, params: Seq[TypeParam], r: Record) def generateInterface(origin: String, ident: Ident, doc: Doc, typeParams: Seq[TypeParam], i: Interface) // -------------------------------------------------------------------------- // Render type expression def withNs(namespace: Option[String], t: String) = namespace match { case None => t case Some("") => "::" + t case Some(s) => "::" + s + "::" + t } def writeAlignedCall(w: IndentWriter, call: String, params: Seq[Field], delim: String, end: String, f: Field => String): IndentWriter = { w.w(call) val skipFirst = new SkipFirst params.foreach(p => { skipFirst { w.wl(delim); w.w(" " * call.length()) } w.w(f(p)) }) w.w(end) } def writeAlignedCall(w: IndentWriter, call: String, params: Seq[Field], end: String, f: Field => String): IndentWriter = writeAlignedCall(w, call, params, ",", end, f) def writeAlignedObjcCall(w: IndentWriter, call: String, params: Seq[Field], end: String, f: Field => (String, String)) = { w.w(call) val skipFirst = new SkipFirst params.foreach(p => { val (name, value) = f(p) skipFirst { w.wl; w.w(" " * math.max(0, call.length() - name.length)); w.w(name) } w.w(":" + value) }) w.w(end) } // -------------------------------------------------------------------------- def writeDoc(w: IndentWriter, doc: Doc) { doc.lines.length match { case 0 => case 1 => w.wl(s"/**${doc.lines.head} */") case _ => w.wl("/**") doc.lines.foreach (l => w.wl(s" *$l")) w.wl(" */") } } }
aijiekj/djinni
src/source/generator.scala
Scala
apache-2.0
13,390
package com.datastax.spark.connector.demo import com.datastax.spark.connector.cql.CassandraConnector object BasicReadWriteDemo extends DemoApp { CassandraConnector(conf).withSessionDo { session => session.execute("CREATE KEYSPACE IF NOT EXISTS test WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': 1 }") session.execute("CREATE TABLE IF NOT EXISTS test.key_value (key INT PRIMARY KEY, value VARCHAR)") session.execute("TRUNCATE test.key_value") session.execute("INSERT INTO test.key_value(key, value) VALUES (1, 'first row')") session.execute("INSERT INTO test.key_value(key, value) VALUES (2, 'second row')") session.execute("INSERT INTO test.key_value(key, value) VALUES (3, 'third row')") } import com.datastax.spark.connector._ // Read table test.kv and print its contents: val rdd = sc.cassandraTable("test", "key_value").select("key", "value") rdd.collect().foreach(row => log.info(s"Existing Data: $row")) // Write two new rows to the test.kv table: val col = sc.parallelize(Seq((4, "fourth row"), (5, "fifth row"))) col.saveToCassandra("test", "key_value", SomeColumns("key", "value")) // Assert the two new rows were stored in test.kv table: assert(col.collect().length == 2) col.collect().foreach(row => log.info(s"New Data: $row")) log.info(s"Work completed, stopping the Spark context.") sc.stop() }
Stratio/spark-cassandra-connector
spark-cassandra-connector-demos/simple-demos/src/main/scala/com/datastax/spark/connector/demo/BasicReadWriteDemo.scala
Scala
apache-2.0
1,392
/** * Copyright (C) 2012 Klout Inc. <http://www.klout.com> */ package com.klout.akkamemcached import akka.util.ByteString import akka.actor._ import com.google.common.hash.Hashing._ import Protocol._ import ActorTypes._ import akka.actor.IO._ /** * Object sent to the IoActor indicating that a multiget request is complete. */ object Finished /** * Object sent to the IoActor indicating that the connection to the client is alive. */ object Alive /** * Objects of this class parse the output from Memcached and return * the cache hits and misses to the IoActor that manages the connection */ class Iteratees(ioActor: ActorRef) { import Constants._ /** * Skip over whitespace */ private def notWhitespace(byte: Byte): Boolean = { !whitespaceBytes.contains(byte) } val readInput = { (IO takeWhile notWhitespace) flatMap { /** * Cache hit */ case Value => processValue /** * The cached values from a multiget have been returned */ case End => IO takeUntil CRLF map { _ => ioActor ! Finished None } /** * Confirms that the IoActor's connection to Memcached is alive */ case Version => { IO takeUntil CRLF map { _ => ioActor ! Alive None } } case Error => IO takeUntil CRLF map (_ => None) case other => IO takeUntil CRLF map (_ => None) } } /** * Processes a cache hit from Memcached * Each item sent by the server looks like this: * * VALUE <key> <flags> <bytes>\\r\\n * <data block>\\r\\n * */ val processValue = { for { whitespace <- IO takeUntil Space; key <- IO takeUntil Space; id <- IO takeUntil Space; length <- IO takeUntil CRLF map (ascii(_).toInt); value <- byteArray(length); newline <- IO takeUntil CRLF } yield { val found = Found(ascii(key), value) IO Done found } } /** * This iteratee generates a byte array result from Memcached. Because a byte * array is stored sequentially in memory, this result is easier to deserialize than * a ByteString */ def byteArray(length: Int): Iteratee[Array[Byte]] = { /** * Copies bytes from the input ByteString and returns an Iteratee with the * byte array of the result and the rest of the input or an iteratee with that * needs more bytes to generate the result */ def continue(array: Array[Byte], total: Int, current: Int)(input: Input): (Iteratee[Array[Byte]], Input) = { input match { case Chunk(byteString) => val bytes = byteString.toArray val numBytesToCopy = min(total - current, bytes.size) Array.copy(bytes, 0, array, current, numBytesToCopy) val chunk = if (numBytesToCopy == bytes.size) { Chunk.empty } else { Chunk(byteString drop numBytesToCopy) } if (total == current + numBytesToCopy) { (Done(array), chunk) } else { (Cont(continue(array, total, current + numBytesToCopy)), chunk) } case EOF(cause) => throw new Exception("EOF") case _ => throw new Exception("Iteratee error while processing value from Memcached") } } /** * Allocates a byte-array for the result and creates an iteratee using continue * that will read the bytes from the input */ Cont(continue(new Array(length), length, 0)) } /** * Consumes all of the input from the Iteratee and sends the results * to the appropriate IoActor. */ val processInput = { IO repeat { readInput map { case IO.Done(found) => { ioActor ! found } case _ => {} } } } private def min(a: Int, b: Int) = { if (a < b) a else b } } object Constants { val whitespace = List(' ', '\\r', '\\n', '\\t') val whitespaceBytes = whitespace map (_.toByte) val Error = ByteString("ERROR") val Space = ByteString(" ") val CRLF = ByteString("\\r\\n") val CRLFString = "\\r\\n" val Value = ByteString("VALUE") val End = ByteString("END") val Version = ByteString("VERSION") } object Protocol { import Constants._ /** * Generates a human-readable ASCII representation of a ByteString */ def ascii(bytes: ByteString): String = bytes.decodeString("US-ASCII").trim /** * This trait is for a command that the MemcachedClient will send to Memcached via an IoActor */ sealed trait Command { /** * Renders a ByteString that can be directly written to the connection * to a Memcached server */ def toByteString: ByteString } /** * This command instructs Memcached to set multiple key-value pairs with a given ttl */ case class SetCommand(keyValueMap: Map[String, ByteString], ttl: Long) extends Command { /** * Creates one memcached "set" instruction for each key-value pair, and concatenates the instructions * to be sent to the memcached server. * * A set instruction looks like: * set <key> <flags> <exptime> <bytes> [noreply]\\r\\n */ override def toByteString = { val instructions = keyValueMap map { case (key, value) => if (key.isEmpty) throw new RuntimeException("An empty string is not a valid key") if (!(key intersect whitespace).isEmpty) throw new RuntimeException("Keys cannot have whitespace") /* Single set instruction */ ByteString("set " + key + " 0 " + ttl + " " + value.size + " noreply") ++ CRLF ++ value ++ CRLF } /* Concatenated instructions */ instructions.foldLeft(ByteString())(_ ++ _) } } /** * This commands instructs Memcached to delete one or more keys */ case class DeleteCommand(keys: String*) extends Command { /** * Creates on memcached "delete" instruction for each key, and concatenates the instructions * to be sent to the memcached server. * * A delete instruction looks like: * delete <key> [noreply]\\r\\n */ override def toByteString = { val instructions = keys map { /* Single delete instruction */ "delete " + _ + " noreply" + CRLFString } /* Concatenated instructions */ ByteString(instructions mkString "") } } /** * This command instructs Memcached to get the value for one or more keys */ case class GetCommand(keys: Set[String]) extends Command { /** * Creates a single Memcached multiget instruction to get all of the keys * * A get instruction looks like: * get <key>*\\r\\n */ override def toByteString = { if (keys.size > 0) ByteString("get " + (keys mkString " ")) ++ CRLF else ByteString() } } /** * This command instructs Memcached to display it's version. */ case object VersionCommand { val byteString = ByteString("version\\r\\n") } /** * Stores the result of a Memcached Get */ sealed trait GetResult { def key: String } /** * Contains a set of GetResults. This case class is necessary to compensate * for JVM type erasure */ case class GetResults(results: Set[GetResult]) /** * Cache Hit */ case class Found(key: String, value: Array[Byte]) extends GetResult /** * Cache Miss */ case class NotFound(key: String) extends GetResult /** * Represents the possible responses from the actor. */ sealed trait Response /** * Contains the GetResults for each key. */ case class GetResponse(results: List[GetResult]) extends Response }
klout/akka-memcached
src/main/scala/Protocol.scala
Scala
apache-2.0
8,624
package org.allenai.common.indexing import org.allenai.common.testkit.UnitSpec class BarronsDocumentReaderSpec extends UnitSpec { val sentences = (0 to 20).map("sentence " + _) val sampleLines = Seq( s"5.1.1.1.1\t${sentences(0)}", s"5.1.1.1.2\t${sentences(1)}", s"5.1.1.1.3\t${sentences(2)}", s"5.1.1.2.1\t${sentences(3)}", s"5.1.1.2.2\t${sentences(4)}", s"5.1.1.2.3\t${sentences(5)}", s"5.1.1.1.1.1\t${sentences(6)}", s"5.1.1.1.2.1\t${sentences(7)}", s"5.1.1.1.2.2\t${sentences(8)}", s"5.1.1.1.2.3\t${sentences(9)}", s"5.1.1.1.3.1\t${sentences(10)}", s"5.1.1.1.3.2\t${sentences(11)}" ) "read" should "get paragraphs out" in { val readDocument = new BarronsDocumentReader(null, "UTF-8")._readLines(sampleLines) val expectedDocument = new SegmentedDocument(sampleLines.mkString("\n"), Seq( NonTerminalSegment("paragraph", Seq( TerminalSegment("sentence", sentences(0)), TerminalSegment("sentence", sentences(1)), TerminalSegment("sentence", sentences(2)) )), NonTerminalSegment("paragraph", Seq( TerminalSegment("sentence", sentences(3)), TerminalSegment("sentence", sentences(4)), TerminalSegment("sentence", sentences(5)) )), NonTerminalSegment("paragraph", Seq( TerminalSegment("sentence", sentences(6)) )), NonTerminalSegment("paragraph", Seq( TerminalSegment("sentence", sentences(7)), TerminalSegment("sentence", sentences(8)), TerminalSegment("sentence", sentences(9)) )), NonTerminalSegment("paragraph", Seq( TerminalSegment("sentence", sentences(10)), TerminalSegment("sentence", sentences(11)) )) )) readDocument should be(expectedDocument) } }
jkinkead/common
indexing/src/test/scala/org/allenai/common/indexing/BarronsDocumentReaderSpec.scala
Scala
apache-2.0
1,789
package controllers import play.api.mvc.Controller import java.util.UUID import models.user.SkimboToken import services.endpoints.Endpoints import views.html.defaultpages.badRequest import services.commands.CmdFromUser import models.command.NewToken import services.actors.UserInfosActor import play.api.libs.concurrent.Execution.Implicits.defaultContext import scala.concurrent.Future import play.api.mvc.Action import play.api.Logger import play.api.libs.iteratee.Concurrent import play.api.libs.json.JsValue import services.commands.CmdToUser import services.actors.PingActor import play.api.libs.Comet import play.api.libs.EventSource import services.auth.ProviderDispatcher import play.api.mvc.Call import play.api.mvc.Cookie object Mobile extends Controller { def end() = Action { implicit request => val token = session.get("id").getOrElse("Error in id !") Ok(views.html.mobileEndAuthentication(token)).withCookies( Cookie("tokenSkimbo", token, httpOnly=false) ) } def authenticate(providerName: String) = Action { implicit request => ProviderDispatcher(providerName).map(provider => provider.auth(routes.Mobile.end).withCookies(Cookie("isMobile", "true"))) .getOrElse(BadRequest) } def connect(idUser: String) = Action { Logger("Mobile").info("MOBILE ==> " + idUser) val (out, channelClient) = Concurrent.broadcast[JsValue] CmdToUser.userConnected(idUser, channelClient).map { preferedChannel => UserInfosActor.create(idUser) PingActor.create(idUser, preferedChannel) services.dao.UserDao.updateLastUse(idUser) } Ok.stream(out &> EventSource()).as(play.api.http.ContentTypes.EVENT_STREAM) } def command(idUser: String) = Action { implicit request => import play.api.libs.concurrent.Execution.Implicits.defaultContext request.body.asJson.map(js => { println("####################") println(js) println("####################") CmdFromUser.interpret(idUser, js) }) PingActor.ping(idUser) Ok("ok") } }
Froggies/Skimbo
app/controllers/Mobile.scala
Scala
agpl-3.0
2,057
package scalacookbook.chapter03 import java.io.{IOException, FileNotFoundException} /** * Created by liguodong on 2016/6/28. */ object MatchOneOrMoreException extends App{ val s = "Foo" //一个 try { val i = s.toInt } catch { case e: Exception => e.printStackTrace } def openAndReadAFile(filename:String): Unit ={ println("this is a test method...") } //多个 try { openAndReadAFile("test") } catch { case e: FileNotFoundException => println("Couldn't find that file.") case e: IOException => println("Had an IOException trying to read that file") } //Discussion //不关心具体的,捕获所有的异常。 try { openAndReadAFile("foo") } catch { case t: Throwable => t.printStackTrace() } //捕获所有异常,然后忽略他们。 //You can also catch them all and ignore them like this: try { val i = s.toInt } catch { case _: Throwable => println("exception ignored") } //As with Java, you can throw an exception from a catch clause, but because Scala doesn’t //have checked exceptions, you don’t need to specify that a method throws the exception. //This is demonstrated in the following example, where the method isn’t annotated in any way // nothing required here def toInt(s: String): Option[Int] = try { Some(s.toInt) } catch { case e: Exception => throw e } //If you prefer to declare the exceptions that your method throws, // or you need to interact with Java, // add the @throws annotation to your method definition @throws(classOf[NumberFormatException]) def toInt2(s: String): Option[Int] = try { Some(s.toInt) } catch { case e: NumberFormatException => throw e } }
liguodongIOT/java-scala-mix-sbt
src/main/scala/scalacookbook/chapter03/MatchOneOrMoreException.scala
Scala
apache-2.0
1,759
/* * Copyright (c) 2017 Andrzej Jozwik * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package pl.jozwik.smtp.util object Constants { val OPEN_BRACKET = '<' val CLOSE_BRACKET = '>' val OPEN_BRACKET_STRING = '<'.toString val CLOSE_BRACKET_STRING = '>'.toString val SPACE = ' ' val delimiter = "\\n" val crLf = s"\\r$delimiter" val Subject = "Subject" val START_MAIL_INPUT = 354 val CLOSING_TERMINATION_CHANNEL = 221 val SERVICE_READY = 220 val REQUEST_COMPLETE = 250 val CANNOT_VRFY = 252 val SERVICE_NOT_AVAILABLE = 421 val REQUEST_ACTION_ABORTED = 451 val INSUFFICIENT_SYSTEM_STORAGE = 452 val TLS_NOT_SUPPORTED = 454 val COMMAND_NOT_IMPLEMENTED = 500 val SYNTAX_ERROR = 501 val BAD_SEQUENCE_OF_COMMANDS = 503 val USER_UNKNOWN = 550 val SIZE_EXCEEDS_MAXIMUM = 552 val REQUEST_ACTION_NOT_ALLOWED = 553 val TRANSACTION_FAILED = 554 val PARAMETER_UNRECOGNIZED = 555 val NEED_HELLO = true val HELO = "HELO" val EHLO = "EHLO" val DATA = "DATA" val MAIL = "MAIL" val FROM = "FROM" val QUIT = "QUIT" val RCPT = "RCPT" val STARTTLS = "STARTTLS" val TO = "TO" val RSET = "RSET" val NOOP = "NOOP" val VRFY = "VRFY" val NOOP_OK = s"$REQUEST_COMPLETE 2.0.0 OK" val RESET_OK = s"$REQUEST_COMPLETE 2.0.0 Reset state" val SMTP_OK = s"$REQUEST_COMPLETE OK" val MAIL_FROM = s"$MAIL $FROM" val RCPT_TO = s"$RCPT $TO" val OK_8_BIT = s"$REQUEST_COMPLETE-8BITMIME" val OK_SIZE = s"$REQUEST_COMPLETE-SIZE" val OK_PIPELINE = s"$SMTP_OK PIPELINE" val END_DATA = "." val FOUR = HELO.length val maximumFrameLength = 1024 }
ajozwik/akka-smtp-server
smtp-util/src/main/scala/pl/jozwik/smtp/util/Constants.scala
Scala
mit
2,629
/* * TreeModel.scala * (TreeTable) * * Copyright (c) 2013-2020 Hanns Holger Rutz. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.treetable import scala.swing.Publisher //object TreeModel { // def wrap[A](peer: jtree.TreeModel): TreeModel[A] = { // val _peer = peer // new TreeModel[A] { // val peer = _peer // } // } //} trait TreeModel[A] extends Publisher { def root: A def getChildCount(parent: A): Int def getChild(parent: A, index: Int): A def isLeaf(node: A): Boolean // val peer: jtree.TreeModel def valueForPathChanged(path: TreeTable.Path[A], newValue: A): Unit def getIndexOfChild(parent: A, child: A): Int // final case class NodesChanged(parentPath: TreeTable.Path[A], children: (Int, A)*) extends Event }
Sciss/TreeTable
scala/src/main/scala/de/sciss/treetable/TreeModel.scala
Scala
lgpl-3.0
1,590
/* * Copyright 2010-2011 Vilius Normantas <code@norma.lt> * * This file is part of Crossbow library. * * Crossbow is free software: you can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * Crossbow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with Crossbow. If not, * see <http://www.gnu.org/licenses/>. */ package lt.norma.crossbow.indicators /** Stores first known value of the target indicator. */ class FirstValue[Value](target: Indicator[Value]) extends FunctionalIndicator[Value] { def name = "First value of " + target.name val firstValue = new Variable[Value] def dependencies = Set(firstValue, target) def calculate = { (firstValue(), target()) match { case (None, Some(t)) => firstValue.set(target()) case _ => } firstValue() } }
ViliusN/Crossbow
crossbow-core/src/lt/norma/crossbow/indicators/FirstValue.scala
Scala
gpl-3.0
1,211
trait ConstantOps { def exprs = ( 1 << 2L : Int, // was: error: type mismatch; found : Long(4L) 64 >> 2L : Int, // was: error: type mismatch; found : Long(4L) 64 >>> 2L : Int, // was: error: type mismatch; found : Long(4L) 'a' << 2L : Int, 'a' >> 2L : Int, 'a'>>> 2L : Int ) }
loskutov/intellij-scala
testdata/scalacTests/pos/t8462.scala
Scala
apache-2.0
318
package org.http4s package bench import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ @BenchmarkMode(Array(Mode.AverageTime)) @OutputTimeUnit(TimeUnit.MICROSECONDS) @State(Scope.Benchmark) class HttpHeadersBench { @Benchmark def apply(in: HeadersInput) = Headers(in.headerSeq.toList) @Benchmark def add(in: HeadersInput) = { var target: Headers = Headers.empty for (header <- in.headers) { target = target.put(header) } } @Benchmark def replace(in: HeadersInput) = in.headers.put(in.replacement) } @State(Scope.Thread) class HeadersInput { @Param(Array("2", "4", "8", "16", "32", "64")) var size: Int = _ var headerSeq: Seq[Header] = _ var headers: Headers = _ var replacement: Header = _ @Setup def setup(): Unit = { headerSeq = (0 until size).map { i => Header(s"X-Headers-Benchmark-$i", i.toString) } headers = Headers(headerSeq.toList) replacement = Header(s"X-Headers-Benchmark-${headers.size / 2}", "replacement") } }
ChristopherDavenport/http4s
bench/src/main/scala/org/http4s/bench/HttpHeadersBench.scala
Scala
apache-2.0
1,030
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.aggregate import java.util.concurrent.TimeUnit._ import scala.collection.mutable import org.apache.spark.TaskContext import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager} import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.errors._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.BindReferences.bindReferences import org.apache.spark.sql.catalyst.expressions.aggregate._ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_MILLIS import org.apache.spark.sql.catalyst.util.truncatedString import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics} import org.apache.spark.sql.execution.vectorized.MutableColumnarRow import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{CalendarIntervalType, DecimalType, StringType, StructType} import org.apache.spark.unsafe.KVIterator import org.apache.spark.util.Utils /** * Hash-based aggregate operator that can also fallback to sorting when data exceeds memory size. */ case class HashAggregateExec( requiredChildDistributionExpressions: Option[Seq[Expression]], groupingExpressions: Seq[NamedExpression], aggregateExpressions: Seq[AggregateExpression], aggregateAttributes: Seq[Attribute], initialInputBufferOffset: Int, resultExpressions: Seq[NamedExpression], child: SparkPlan) extends BaseAggregateExec with BlockingOperatorWithCodegen { require(HashAggregateExec.supportsAggregate(aggregateBufferAttributes)) override lazy val allAttributes: AttributeSeq = child.output ++ aggregateBufferAttributes ++ aggregateAttributes ++ aggregateExpressions.flatMap(_.aggregateFunction.inputAggBufferAttributes) override lazy val metrics = Map( "numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"), "peakMemory" -> SQLMetrics.createSizeMetric(sparkContext, "peak memory"), "spillSize" -> SQLMetrics.createSizeMetric(sparkContext, "spill size"), "aggTime" -> SQLMetrics.createTimingMetric(sparkContext, "time in aggregation build"), "avgHashProbe" -> SQLMetrics.createAverageMetric(sparkContext, "avg hash probe bucket list iters")) // This is for testing. We force TungstenAggregationIterator to fall back to the unsafe row hash // map and/or the sort-based aggregation once it has processed a given number of input rows. private val testFallbackStartsAt: Option[(Int, Int)] = { sqlContext.getConf("spark.sql.TungstenAggregate.testFallbackStartsAt", null) match { case null | "" => None case fallbackStartsAt => val splits = fallbackStartsAt.split(",").map(_.trim) Some((splits.head.toInt, splits.last.toInt)) } } protected override def doExecute(): RDD[InternalRow] = attachTree(this, "execute") { val numOutputRows = longMetric("numOutputRows") val peakMemory = longMetric("peakMemory") val spillSize = longMetric("spillSize") val avgHashProbe = longMetric("avgHashProbe") val aggTime = longMetric("aggTime") child.execute().mapPartitionsWithIndex { (partIndex, iter) => val beforeAgg = System.nanoTime() val hasInput = iter.hasNext val res = if (!hasInput && groupingExpressions.nonEmpty) { // This is a grouped aggregate and the input iterator is empty, // so return an empty iterator. Iterator.empty } else { val aggregationIterator = new TungstenAggregationIterator( partIndex, groupingExpressions, aggregateExpressions, aggregateAttributes, initialInputBufferOffset, resultExpressions, (expressions, inputSchema) => MutableProjection.create(expressions, inputSchema), inputAttributes, iter, testFallbackStartsAt, numOutputRows, peakMemory, spillSize, avgHashProbe) if (!hasInput && groupingExpressions.isEmpty) { numOutputRows += 1 Iterator.single[UnsafeRow](aggregationIterator.outputForEmptyGroupingKeyWithoutInput()) } else { aggregationIterator } } aggTime += NANOSECONDS.toMillis(System.nanoTime() - beforeAgg) res } } // all the mode of aggregate expressions private val modes = aggregateExpressions.map(_.mode).distinct override def usedInputs: AttributeSet = inputSet override def supportCodegen: Boolean = { // ImperativeAggregate and filter predicate are not supported right now // TODO: SPARK-30027 Support codegen for filter exprs in HashAggregateExec !(aggregateExpressions.exists(_.aggregateFunction.isInstanceOf[ImperativeAggregate]) || aggregateExpressions.exists(_.filter.isDefined)) } override def inputRDDs(): Seq[RDD[InternalRow]] = { child.asInstanceOf[CodegenSupport].inputRDDs() } protected override def doProduce(ctx: CodegenContext): String = { if (groupingExpressions.isEmpty) { doProduceWithoutKeys(ctx) } else { doProduceWithKeys(ctx) } } override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = { if (groupingExpressions.isEmpty) { doConsumeWithoutKeys(ctx, input) } else { doConsumeWithKeys(ctx, input) } } // The variables are used as aggregation buffers and each aggregate function has one or more // ExprCode to initialize its buffer slots. Only used for aggregation without keys. private var bufVars: Seq[Seq[ExprCode]] = _ private def doProduceWithoutKeys(ctx: CodegenContext): String = { val initAgg = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "initAgg") // The generated function doesn't have input row in the code context. ctx.INPUT_ROW = null // generate variables for aggregation buffer val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate]) val initExpr = functions.map(f => f.initialValues) bufVars = initExpr.map { exprs => exprs.map { e => val isNull = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "bufIsNull") val value = ctx.addMutableState(CodeGenerator.javaType(e.dataType), "bufValue") // The initial expression should not access any column val ev = e.genCode(ctx) val initVars = code""" |$isNull = ${ev.isNull}; |$value = ${ev.value}; """.stripMargin ExprCode( ev.code + initVars, JavaCode.isNullGlobal(isNull), JavaCode.global(value, e.dataType)) } } val flatBufVars = bufVars.flatten val initBufVar = evaluateVariables(flatBufVars) // generate variables for output val (resultVars, genResult) = if (modes.contains(Final) || modes.contains(Complete)) { // evaluate aggregate results ctx.currentVars = flatBufVars val aggResults = bindReferences( functions.map(_.evaluateExpression), aggregateBufferAttributes).map(_.genCode(ctx)) val evaluateAggResults = evaluateVariables(aggResults) // evaluate result expressions ctx.currentVars = aggResults val resultVars = bindReferences(resultExpressions, aggregateAttributes).map(_.genCode(ctx)) (resultVars, s""" |$evaluateAggResults |${evaluateVariables(resultVars)} """.stripMargin) } else if (modes.contains(Partial) || modes.contains(PartialMerge)) { // output the aggregate buffer directly (flatBufVars, "") } else { // no aggregate function, the result should be literals val resultVars = resultExpressions.map(_.genCode(ctx)) (resultVars, evaluateVariables(resultVars)) } val doAgg = ctx.freshName("doAggregateWithoutKey") val doAggFuncName = ctx.addNewFunction(doAgg, s""" |private void $doAgg() throws java.io.IOException { | // initialize aggregation buffer | $initBufVar | | ${child.asInstanceOf[CodegenSupport].produce(ctx, this)} |} """.stripMargin) val numOutput = metricTerm(ctx, "numOutputRows") val aggTime = metricTerm(ctx, "aggTime") val beforeAgg = ctx.freshName("beforeAgg") s""" |while (!$initAgg) { | $initAgg = true; | long $beforeAgg = System.nanoTime(); | $doAggFuncName(); | $aggTime.add((System.nanoTime() - $beforeAgg) / $NANOS_PER_MILLIS); | | // output the result | ${genResult.trim} | | $numOutput.add(1); | ${consume(ctx, resultVars).trim} |} """.stripMargin } // Splits aggregate code into small functions because the most of JVM implementations // can not compile too long functions. Returns None if we are not able to split the given code. // // Note: The difference from `CodeGenerator.splitExpressions` is that we define an individual // function for each aggregation function (e.g., SUM and AVG). For example, in a query // `SELECT SUM(a), AVG(a) FROM VALUES(1) t(a)`, we define two functions // for `SUM(a)` and `AVG(a)`. private def splitAggregateExpressions( ctx: CodegenContext, aggNames: Seq[String], aggBufferUpdatingExprs: Seq[Seq[Expression]], aggCodeBlocks: Seq[Block], subExprs: Map[Expression, SubExprEliminationState]): Option[String] = { val exprValsInSubExprs = subExprs.flatMap { case (_, s) => s.value :: s.isNull :: Nil } if (exprValsInSubExprs.exists(_.isInstanceOf[SimpleExprValue])) { // `SimpleExprValue`s cannot be used as an input variable for split functions, so // we give up splitting functions if it exists in `subExprs`. None } else { val inputVars = aggBufferUpdatingExprs.map { aggExprsForOneFunc => val inputVarsForOneFunc = aggExprsForOneFunc.map( CodeGenerator.getLocalInputVariableValues(ctx, _, subExprs)).reduce(_ ++ _).toSeq val paramLength = CodeGenerator.calculateParamLengthFromExprValues(inputVarsForOneFunc) // Checks if a parameter length for the `aggExprsForOneFunc` does not go over the JVM limit if (CodeGenerator.isValidParamLength(paramLength)) { Some(inputVarsForOneFunc) } else { None } } // Checks if all the aggregate code can be split into pieces. // If the parameter length of at lease one `aggExprsForOneFunc` goes over the limit, // we totally give up splitting aggregate code. if (inputVars.forall(_.isDefined)) { val splitCodes = inputVars.flatten.zipWithIndex.map { case (args, i) => val doAggFunc = ctx.freshName(s"doAggregate_${aggNames(i)}") val argList = args.map { v => s"${CodeGenerator.typeName(v.javaType)} ${v.variableName}" }.mkString(", ") val doAggFuncName = ctx.addNewFunction(doAggFunc, s""" |private void $doAggFunc($argList) throws java.io.IOException { | ${aggCodeBlocks(i)} |} """.stripMargin) val inputVariables = args.map(_.variableName).mkString(", ") s"$doAggFuncName($inputVariables);" } Some(splitCodes.mkString("\n").trim) } else { val errMsg = "Failed to split aggregate code into small functions because the parameter " + "length of at least one split function went over the JVM limit: " + CodeGenerator.MAX_JVM_METHOD_PARAMS_LENGTH if (Utils.isTesting) { throw new IllegalStateException(errMsg) } else { logInfo(errMsg) None } } } } private def doConsumeWithoutKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = { // only have DeclarativeAggregate val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate]) val inputAttrs = functions.flatMap(_.aggBufferAttributes) ++ inputAttributes // To individually generate code for each aggregate function, an element in `updateExprs` holds // all the expressions for the buffer of an aggregation function. val updateExprs = aggregateExpressions.map { e => e.mode match { case Partial | Complete => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions case PartialMerge | Final => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions } } ctx.currentVars = bufVars.flatten ++ input val boundUpdateExprs = updateExprs.map { updateExprsForOneFunc => bindReferences(updateExprsForOneFunc, inputAttrs) } val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExprs.flatten) val effectiveCodes = subExprs.codes.mkString("\n") val bufferEvals = boundUpdateExprs.map { boundUpdateExprsForOneFunc => ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExprsForOneFunc.map(_.genCode(ctx)) } } val aggNames = functions.map(_.prettyName) val aggCodeBlocks = bufferEvals.zipWithIndex.map { case (bufferEvalsForOneFunc, i) => val bufVarsForOneFunc = bufVars(i) // All the update code for aggregation buffers should be placed in the end // of each aggregation function code. val updates = bufferEvalsForOneFunc.zip(bufVarsForOneFunc).map { case (ev, bufVar) => s""" |${bufVar.isNull} = ${ev.isNull}; |${bufVar.value} = ${ev.value}; """.stripMargin } code""" |${ctx.registerComment(s"do aggregate for ${aggNames(i)}")} |${ctx.registerComment("evaluate aggregate function")} |${evaluateVariables(bufferEvalsForOneFunc)} |${ctx.registerComment("update aggregation buffers")} |${updates.mkString("\n").trim} """.stripMargin } val codeToEvalAggFunc = if (conf.codegenSplitAggregateFunc && aggCodeBlocks.map(_.length).sum > conf.methodSplitThreshold) { val maybeSplitCode = splitAggregateExpressions( ctx, aggNames, boundUpdateExprs, aggCodeBlocks, subExprs.states) maybeSplitCode.getOrElse { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } } else { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } s""" |// do aggregate |// common sub-expressions |$effectiveCodes |// evaluate aggregate functions and update aggregation buffers |$codeToEvalAggFunc """.stripMargin } private val groupingAttributes = groupingExpressions.map(_.toAttribute) private val groupingKeySchema = StructType.fromAttributes(groupingAttributes) private val declFunctions = aggregateExpressions.map(_.aggregateFunction) .filter(_.isInstanceOf[DeclarativeAggregate]) .map(_.asInstanceOf[DeclarativeAggregate]) private val bufferSchema = StructType.fromAttributes(aggregateBufferAttributes) // The name for Fast HashMap private var fastHashMapTerm: String = _ private var isFastHashMapEnabled: Boolean = false // whether a vectorized hashmap is used instead // we have decided to always use the row-based hashmap, // but the vectorized hashmap can still be switched on for testing and benchmarking purposes. private var isVectorizedHashMapEnabled: Boolean = false // The name for UnsafeRow HashMap private var hashMapTerm: String = _ private var sorterTerm: String = _ /** * This is called by generated Java class, should be public. */ def createHashMap(): UnsafeFixedWidthAggregationMap = { // create initialized aggregate buffer val initExpr = declFunctions.flatMap(f => f.initialValues) val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow) // create hashMap new UnsafeFixedWidthAggregationMap( initialBuffer, bufferSchema, groupingKeySchema, TaskContext.get(), 1024 * 16, // initial capacity TaskContext.get().taskMemoryManager().pageSizeBytes ) } def getTaskMemoryManager(): TaskMemoryManager = { TaskContext.get().taskMemoryManager() } def getEmptyAggregationBuffer(): InternalRow = { val initExpr = declFunctions.flatMap(f => f.initialValues) val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow) initialBuffer } /** * This is called by generated Java class, should be public. */ def createUnsafeJoiner(): UnsafeRowJoiner = { GenerateUnsafeRowJoiner.create(groupingKeySchema, bufferSchema) } /** * Called by generated Java class to finish the aggregate and return a KVIterator. */ def finishAggregate( hashMap: UnsafeFixedWidthAggregationMap, sorter: UnsafeKVExternalSorter, peakMemory: SQLMetric, spillSize: SQLMetric, avgHashProbe: SQLMetric): KVIterator[UnsafeRow, UnsafeRow] = { // update peak execution memory val mapMemory = hashMap.getPeakMemoryUsedBytes val sorterMemory = Option(sorter).map(_.getPeakMemoryUsedBytes).getOrElse(0L) val maxMemory = Math.max(mapMemory, sorterMemory) val metrics = TaskContext.get().taskMetrics() peakMemory.add(maxMemory) metrics.incPeakExecutionMemory(maxMemory) // Update average hashmap probe avgHashProbe.set(hashMap.getAvgHashProbeBucketListIterations) if (sorter == null) { // not spilled return hashMap.iterator() } // merge the final hashMap into sorter sorter.merge(hashMap.destructAndCreateExternalSorter()) hashMap.free() val sortedIter = sorter.sortedIterator() // Create a KVIterator based on the sorted iterator. new KVIterator[UnsafeRow, UnsafeRow] { // Create a MutableProjection to merge the rows of same key together val mergeExpr = declFunctions.flatMap(_.mergeExpressions) val mergeProjection = MutableProjection.create( mergeExpr, aggregateBufferAttributes ++ declFunctions.flatMap(_.inputAggBufferAttributes)) val joinedRow = new JoinedRow() var currentKey: UnsafeRow = null var currentRow: UnsafeRow = null var nextKey: UnsafeRow = if (sortedIter.next()) { sortedIter.getKey } else { null } override def next(): Boolean = { if (nextKey != null) { currentKey = nextKey.copy() currentRow = sortedIter.getValue.copy() nextKey = null // use the first row as aggregate buffer mergeProjection.target(currentRow) // merge the following rows with same key together var findNextGroup = false while (!findNextGroup && sortedIter.next()) { val key = sortedIter.getKey if (currentKey.equals(key)) { mergeProjection(joinedRow(currentRow, sortedIter.getValue)) } else { // We find a new group. findNextGroup = true nextKey = key } } true } else { spillSize.add(sorter.getSpillSize) false } } override def getKey: UnsafeRow = currentKey override def getValue: UnsafeRow = currentRow override def close(): Unit = { sortedIter.close() } } } /** * Generate the code for output. * @return function name for the result code. */ private def generateResultFunction(ctx: CodegenContext): String = { val funcName = ctx.freshName("doAggregateWithKeysOutput") val keyTerm = ctx.freshName("keyTerm") val bufferTerm = ctx.freshName("bufferTerm") val numOutput = metricTerm(ctx, "numOutputRows") val body = if (modes.contains(Final) || modes.contains(Complete)) { // generate output using resultExpressions ctx.currentVars = null ctx.INPUT_ROW = keyTerm val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateKeyVars = evaluateVariables(keyVars) ctx.INPUT_ROW = bufferTerm val bufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateBufferVars = evaluateVariables(bufferVars) // evaluate the aggregation result ctx.currentVars = bufferVars val aggResults = bindReferences( declFunctions.map(_.evaluateExpression), aggregateBufferAttributes).map(_.genCode(ctx)) val evaluateAggResults = evaluateVariables(aggResults) // generate the final result ctx.currentVars = keyVars ++ aggResults val inputAttrs = groupingAttributes ++ aggregateAttributes val resultVars = bindReferences[Expression]( resultExpressions, inputAttrs).map(_.genCode(ctx)) val evaluateNondeterministicResults = evaluateNondeterministicVariables(output, resultVars, resultExpressions) s""" |$evaluateKeyVars |$evaluateBufferVars |$evaluateAggResults |$evaluateNondeterministicResults |${consume(ctx, resultVars)} """.stripMargin } else if (modes.contains(Partial) || modes.contains(PartialMerge)) { // resultExpressions are Attributes of groupingExpressions and aggregateBufferAttributes. assert(resultExpressions.forall(_.isInstanceOf[Attribute])) assert(resultExpressions.length == groupingExpressions.length + aggregateBufferAttributes.length) ctx.currentVars = null ctx.INPUT_ROW = keyTerm val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateKeyVars = evaluateVariables(keyVars) ctx.INPUT_ROW = bufferTerm val resultBufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateResultBufferVars = evaluateVariables(resultBufferVars) ctx.currentVars = keyVars ++ resultBufferVars val inputAttrs = resultExpressions.map(_.toAttribute) val resultVars = bindReferences[Expression]( resultExpressions, inputAttrs).map(_.genCode(ctx)) s""" |$evaluateKeyVars |$evaluateResultBufferVars |${consume(ctx, resultVars)} """.stripMargin } else { // generate result based on grouping key ctx.INPUT_ROW = keyTerm ctx.currentVars = null val resultVars = bindReferences[Expression]( resultExpressions, groupingAttributes).map(_.genCode(ctx)) val evaluateNondeterministicResults = evaluateNondeterministicVariables(output, resultVars, resultExpressions) s""" |$evaluateNondeterministicResults |${consume(ctx, resultVars)} """.stripMargin } ctx.addNewFunction(funcName, s""" |private void $funcName(UnsafeRow $keyTerm, UnsafeRow $bufferTerm) | throws java.io.IOException { | $numOutput.add(1); | $body |} """.stripMargin) } /** * A required check for any fast hash map implementation (basically the common requirements * for row-based and vectorized). * Currently fast hash map is supported for primitive data types during partial aggregation. * This list of supported use-cases should be expanded over time. */ private def checkIfFastHashMapSupported(ctx: CodegenContext): Boolean = { val isSupported = (groupingKeySchema ++ bufferSchema).forall(f => CodeGenerator.isPrimitiveType(f.dataType) || f.dataType.isInstanceOf[DecimalType] || f.dataType.isInstanceOf[StringType] || f.dataType.isInstanceOf[CalendarIntervalType]) && bufferSchema.nonEmpty && modes.forall(mode => mode == Partial || mode == PartialMerge) // For vectorized hash map, We do not support byte array based decimal type for aggregate values // as ColumnVector.putDecimal for high-precision decimals doesn't currently support in-place // updates. Due to this, appending the byte array in the vectorized hash map can turn out to be // quite inefficient and can potentially OOM the executor. // For row-based hash map, while decimal update is supported in UnsafeRow, we will just act // conservative here, due to lack of testing and benchmarking. val isNotByteArrayDecimalType = bufferSchema.map(_.dataType).filter(_.isInstanceOf[DecimalType]) .forall(!DecimalType.isByteArrayDecimalType(_)) isSupported && isNotByteArrayDecimalType } private def enableTwoLevelHashMap(ctx: CodegenContext): Unit = { if (!checkIfFastHashMapSupported(ctx)) { if (modes.forall(mode => mode == Partial || mode == PartialMerge) && !Utils.isTesting) { logInfo(s"${SQLConf.ENABLE_TWOLEVEL_AGG_MAP.key} is set to true, but" + " current version of codegened fast hashmap does not support this aggregate.") } } else { isFastHashMapEnabled = true // This is for testing/benchmarking only. // We enforce to first level to be a vectorized hashmap, instead of the default row-based one. isVectorizedHashMapEnabled = sqlContext.conf.enableVectorizedHashMap } } private def doProduceWithKeys(ctx: CodegenContext): String = { val initAgg = ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "initAgg") if (sqlContext.conf.enableTwoLevelAggMap) { enableTwoLevelHashMap(ctx) } else if (sqlContext.conf.enableVectorizedHashMap) { logWarning("Two level hashmap is disabled but vectorized hashmap is enabled.") } val bitMaxCapacity = sqlContext.conf.fastHashAggregateRowMaxCapacityBit val thisPlan = ctx.addReferenceObj("plan", this) // Create a name for the iterator from the fast hash map, and the code to create fast hash map. val (iterTermForFastHashMap, createFastHashMap) = if (isFastHashMapEnabled) { // Generates the fast hash map class and creates the fast hash map term. val fastHashMapClassName = ctx.freshName("FastHashMap") if (isVectorizedHashMapEnabled) { val generatedMap = new VectorizedHashMapGenerator(ctx, aggregateExpressions, fastHashMapClassName, groupingKeySchema, bufferSchema, bitMaxCapacity).generate() ctx.addInnerClass(generatedMap) // Inline mutable state since not many aggregation operations in a task fastHashMapTerm = ctx.addMutableState( fastHashMapClassName, "vectorizedFastHashMap", forceInline = true) val iter = ctx.addMutableState( "java.util.Iterator<InternalRow>", "vectorizedFastHashMapIter", forceInline = true) val create = s"$fastHashMapTerm = new $fastHashMapClassName();" (iter, create) } else { val generatedMap = new RowBasedHashMapGenerator(ctx, aggregateExpressions, fastHashMapClassName, groupingKeySchema, bufferSchema, bitMaxCapacity).generate() ctx.addInnerClass(generatedMap) // Inline mutable state since not many aggregation operations in a task fastHashMapTerm = ctx.addMutableState( fastHashMapClassName, "fastHashMap", forceInline = true) val iter = ctx.addMutableState( "org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>", "fastHashMapIter", forceInline = true) val create = s"$fastHashMapTerm = new $fastHashMapClassName(" + s"$thisPlan.getTaskMemoryManager(), $thisPlan.getEmptyAggregationBuffer());" (iter, create) } } else ("", "") // Create a name for the iterator from the regular hash map. // Inline mutable state since not many aggregation operations in a task val iterTerm = ctx.addMutableState(classOf[KVIterator[UnsafeRow, UnsafeRow]].getName, "mapIter", forceInline = true) // create hashMap val hashMapClassName = classOf[UnsafeFixedWidthAggregationMap].getName hashMapTerm = ctx.addMutableState(hashMapClassName, "hashMap", forceInline = true) sorterTerm = ctx.addMutableState(classOf[UnsafeKVExternalSorter].getName, "sorter", forceInline = true) val doAgg = ctx.freshName("doAggregateWithKeys") val peakMemory = metricTerm(ctx, "peakMemory") val spillSize = metricTerm(ctx, "spillSize") val avgHashProbe = metricTerm(ctx, "avgHashProbe") val finishRegularHashMap = s"$iterTerm = $thisPlan.finishAggregate(" + s"$hashMapTerm, $sorterTerm, $peakMemory, $spillSize, $avgHashProbe);" val finishHashMap = if (isFastHashMapEnabled) { s""" |$iterTermForFastHashMap = $fastHashMapTerm.rowIterator(); |$finishRegularHashMap """.stripMargin } else { finishRegularHashMap } val doAggFuncName = ctx.addNewFunction(doAgg, s""" |private void $doAgg() throws java.io.IOException { | ${child.asInstanceOf[CodegenSupport].produce(ctx, this)} | $finishHashMap |} """.stripMargin) // generate code for output val keyTerm = ctx.freshName("aggKey") val bufferTerm = ctx.freshName("aggBuffer") val outputFunc = generateResultFunction(ctx) def outputFromFastHashMap: String = { if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { outputFromVectorizedMap } else { outputFromRowBasedMap } } else "" } def outputFromRowBasedMap: String = { s""" |while ($iterTermForFastHashMap.next()) { | UnsafeRow $keyTerm = (UnsafeRow) $iterTermForFastHashMap.getKey(); | UnsafeRow $bufferTerm = (UnsafeRow) $iterTermForFastHashMap.getValue(); | $outputFunc($keyTerm, $bufferTerm); | | if (shouldStop()) return; |} |$fastHashMapTerm.close(); """.stripMargin } // Iterate over the aggregate rows and convert them from InternalRow to UnsafeRow def outputFromVectorizedMap: String = { val row = ctx.freshName("fastHashMapRow") ctx.currentVars = null ctx.INPUT_ROW = row val generateKeyRow = GenerateUnsafeProjection.createCode(ctx, groupingKeySchema.toAttributes.zipWithIndex .map { case (attr, i) => BoundReference(i, attr.dataType, attr.nullable) } ) val generateBufferRow = GenerateUnsafeProjection.createCode(ctx, bufferSchema.toAttributes.zipWithIndex.map { case (attr, i) => BoundReference(groupingKeySchema.length + i, attr.dataType, attr.nullable) }) s""" |while ($iterTermForFastHashMap.hasNext()) { | InternalRow $row = (InternalRow) $iterTermForFastHashMap.next(); | ${generateKeyRow.code} | ${generateBufferRow.code} | $outputFunc(${generateKeyRow.value}, ${generateBufferRow.value}); | | if (shouldStop()) return; |} | |$fastHashMapTerm.close(); """.stripMargin } def outputFromRegularHashMap: String = { s""" |while ($limitNotReachedCond $iterTerm.next()) { | UnsafeRow $keyTerm = (UnsafeRow) $iterTerm.getKey(); | UnsafeRow $bufferTerm = (UnsafeRow) $iterTerm.getValue(); | $outputFunc($keyTerm, $bufferTerm); | if (shouldStop()) return; |} |$iterTerm.close(); |if ($sorterTerm == null) { | $hashMapTerm.free(); |} """.stripMargin } val aggTime = metricTerm(ctx, "aggTime") val beforeAgg = ctx.freshName("beforeAgg") s""" |if (!$initAgg) { | $initAgg = true; | $createFastHashMap | $hashMapTerm = $thisPlan.createHashMap(); | long $beforeAgg = System.nanoTime(); | $doAggFuncName(); | $aggTime.add((System.nanoTime() - $beforeAgg) / $NANOS_PER_MILLIS); |} |// output the result |$outputFromFastHashMap |$outputFromRegularHashMap """.stripMargin } private def doConsumeWithKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = { // create grouping key val unsafeRowKeyCode = GenerateUnsafeProjection.createCode( ctx, bindReferences[Expression](groupingExpressions, child.output)) val fastRowKeys = ctx.generateExpressions( bindReferences[Expression](groupingExpressions, child.output)) val unsafeRowKeys = unsafeRowKeyCode.value val unsafeRowKeyHash = ctx.freshName("unsafeRowKeyHash") val unsafeRowBuffer = ctx.freshName("unsafeRowAggBuffer") val fastRowBuffer = ctx.freshName("fastAggBuffer") // To individually generate code for each aggregate function, an element in `updateExprs` holds // all the expressions for the buffer of an aggregation function. val updateExprs = aggregateExpressions.map { e => // only have DeclarativeAggregate e.mode match { case Partial | Complete => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions case PartialMerge | Final => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions } } val (checkFallbackForGeneratedHashMap, checkFallbackForBytesToBytesMap, resetCounter, incCounter) = if (testFallbackStartsAt.isDefined) { val countTerm = ctx.addMutableState(CodeGenerator.JAVA_INT, "fallbackCounter") (s"$countTerm < ${testFallbackStartsAt.get._1}", s"$countTerm < ${testFallbackStartsAt.get._2}", s"$countTerm = 0;", s"$countTerm += 1;") } else { ("true", "true", "", "") } val oomeClassName = classOf[SparkOutOfMemoryError].getName val findOrInsertRegularHashMap: String = s""" |// generate grouping key |${unsafeRowKeyCode.code} |int $unsafeRowKeyHash = ${unsafeRowKeyCode.value}.hashCode(); |if ($checkFallbackForBytesToBytesMap) { | // try to get the buffer from hash map | $unsafeRowBuffer = | $hashMapTerm.getAggregationBufferFromUnsafeRow($unsafeRowKeys, $unsafeRowKeyHash); |} |// Can't allocate buffer from the hash map. Spill the map and fallback to sort-based |// aggregation after processing all input rows. |if ($unsafeRowBuffer == null) { | if ($sorterTerm == null) { | $sorterTerm = $hashMapTerm.destructAndCreateExternalSorter(); | } else { | $sorterTerm.merge($hashMapTerm.destructAndCreateExternalSorter()); | } | $resetCounter | // the hash map had be spilled, it should have enough memory now, | // try to allocate buffer again. | $unsafeRowBuffer = $hashMapTerm.getAggregationBufferFromUnsafeRow( | $unsafeRowKeys, $unsafeRowKeyHash); | if ($unsafeRowBuffer == null) { | // failed to allocate the first page | throw new $oomeClassName("No enough memory for aggregation"); | } |} """.stripMargin val findOrInsertHashMap: String = { if (isFastHashMapEnabled) { // If fast hash map is on, we first generate code to probe and update the fast hash map. // If the probe is successful the corresponding fast row buffer will hold the mutable row. s""" |if ($checkFallbackForGeneratedHashMap) { | ${fastRowKeys.map(_.code).mkString("\n")} | if (${fastRowKeys.map("!" + _.isNull).mkString(" && ")}) { | $fastRowBuffer = $fastHashMapTerm.findOrInsert( | ${fastRowKeys.map(_.value).mkString(", ")}); | } |} |// Cannot find the key in fast hash map, try regular hash map. |if ($fastRowBuffer == null) { | $findOrInsertRegularHashMap |} """.stripMargin } else { findOrInsertRegularHashMap } } val inputAttr = aggregateBufferAttributes ++ inputAttributes // Here we set `currentVars(0)` to `currentVars(numBufferSlots)` to null, so that when // generating code for buffer columns, we use `INPUT_ROW`(will be the buffer row), while // generating input columns, we use `currentVars`. ctx.currentVars = new Array[ExprCode](aggregateBufferAttributes.length) ++ input val aggNames = aggregateExpressions.map(_.aggregateFunction.prettyName) // Computes start offsets for each aggregation function code // in the underlying buffer row. val bufferStartOffsets = { val offsets = mutable.ArrayBuffer[Int]() var curOffset = 0 updateExprs.foreach { exprsForOneFunc => offsets += curOffset curOffset += exprsForOneFunc.length } offsets.toArray } val updateRowInRegularHashMap: String = { ctx.INPUT_ROW = unsafeRowBuffer val boundUpdateExprs = updateExprs.map { updateExprsForOneFunc => bindReferences(updateExprsForOneFunc, inputAttr) } val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExprs.flatten) val effectiveCodes = subExprs.codes.mkString("\n") val unsafeRowBufferEvals = boundUpdateExprs.map { boundUpdateExprsForOneFunc => ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExprsForOneFunc.map(_.genCode(ctx)) } } val aggCodeBlocks = updateExprs.indices.map { i => val rowBufferEvalsForOneFunc = unsafeRowBufferEvals(i) val boundUpdateExprsForOneFunc = boundUpdateExprs(i) val bufferOffset = bufferStartOffsets(i) // All the update code for aggregation buffers should be placed in the end // of each aggregation function code. val updateRowBuffers = rowBufferEvalsForOneFunc.zipWithIndex.map { case (ev, j) => val updateExpr = boundUpdateExprsForOneFunc(j) val dt = updateExpr.dataType val nullable = updateExpr.nullable CodeGenerator.updateColumn(unsafeRowBuffer, dt, bufferOffset + j, ev, nullable) } code""" |${ctx.registerComment(s"evaluate aggregate function for ${aggNames(i)}")} |${evaluateVariables(rowBufferEvalsForOneFunc)} |${ctx.registerComment("update unsafe row buffer")} |${updateRowBuffers.mkString("\n").trim} """.stripMargin } val codeToEvalAggFunc = if (conf.codegenSplitAggregateFunc && aggCodeBlocks.map(_.length).sum > conf.methodSplitThreshold) { val maybeSplitCode = splitAggregateExpressions( ctx, aggNames, boundUpdateExprs, aggCodeBlocks, subExprs.states) maybeSplitCode.getOrElse { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } } else { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } s""" |// common sub-expressions |$effectiveCodes |// evaluate aggregate functions and update aggregation buffers |$codeToEvalAggFunc """.stripMargin } val updateRowInHashMap: String = { if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { ctx.INPUT_ROW = fastRowBuffer val boundUpdateExprs = updateExprs.map { updateExprsForOneFunc => bindReferences(updateExprsForOneFunc, inputAttr) } val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExprs.flatten) val effectiveCodes = subExprs.codes.mkString("\n") val fastRowEvals = boundUpdateExprs.map { boundUpdateExprsForOneFunc => ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExprsForOneFunc.map(_.genCode(ctx)) } } val aggCodeBlocks = fastRowEvals.zipWithIndex.map { case (fastRowEvalsForOneFunc, i) => val boundUpdateExprsForOneFunc = boundUpdateExprs(i) val bufferOffset = bufferStartOffsets(i) // All the update code for aggregation buffers should be placed in the end // of each aggregation function code. val updateRowBuffer = fastRowEvalsForOneFunc.zipWithIndex.map { case (ev, j) => val updateExpr = boundUpdateExprsForOneFunc(j) val dt = updateExpr.dataType val nullable = updateExpr.nullable CodeGenerator.updateColumn(fastRowBuffer, dt, bufferOffset + j, ev, nullable, isVectorized = true) } code""" |${ctx.registerComment(s"evaluate aggregate function for ${aggNames(i)}")} |${evaluateVariables(fastRowEvalsForOneFunc)} |${ctx.registerComment("update fast row")} |${updateRowBuffer.mkString("\n").trim} """.stripMargin } val codeToEvalAggFunc = if (conf.codegenSplitAggregateFunc && aggCodeBlocks.map(_.length).sum > conf.methodSplitThreshold) { val maybeSplitCode = splitAggregateExpressions( ctx, aggNames, boundUpdateExprs, aggCodeBlocks, subExprs.states) maybeSplitCode.getOrElse { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } } else { aggCodeBlocks.fold(EmptyBlock)(_ + _).code } // If vectorized fast hash map is on, we first generate code to update row // in vectorized fast hash map, if the previous loop up hit vectorized fast hash map. // Otherwise, update row in regular hash map. s""" |if ($fastRowBuffer != null) { | // common sub-expressions | $effectiveCodes | // evaluate aggregate functions and update aggregation buffers | $codeToEvalAggFunc |} else { | $updateRowInRegularHashMap |} """.stripMargin } else { // If row-based hash map is on and the previous loop up hit fast hash map, // we reuse regular hash buffer to update row of fast hash map. // Otherwise, update row in regular hash map. s""" |// Updates the proper row buffer |if ($fastRowBuffer != null) { | $unsafeRowBuffer = $fastRowBuffer; |} |$updateRowInRegularHashMap """.stripMargin } } else { updateRowInRegularHashMap } } val declareRowBuffer: String = if (isFastHashMapEnabled) { val fastRowType = if (isVectorizedHashMapEnabled) { classOf[MutableColumnarRow].getName } else { "UnsafeRow" } s""" |UnsafeRow $unsafeRowBuffer = null; |$fastRowType $fastRowBuffer = null; """.stripMargin } else { s"UnsafeRow $unsafeRowBuffer = null;" } // We try to do hash map based in-memory aggregation first. If there is not enough memory (the // hash map will return null for new key), we spill the hash map to disk to free memory, then // continue to do in-memory aggregation and spilling until all the rows had been processed. // Finally, sort the spilled aggregate buffers by key, and merge them together for same key. s""" |$declareRowBuffer |$findOrInsertHashMap |$incCounter |$updateRowInHashMap """.stripMargin } override def verboseString(maxFields: Int): String = toString(verbose = true, maxFields) override def simpleString(maxFields: Int): String = toString(verbose = false, maxFields) private def toString(verbose: Boolean, maxFields: Int): String = { val allAggregateExpressions = aggregateExpressions testFallbackStartsAt match { case None => val keyString = truncatedString(groupingExpressions, "[", ", ", "]", maxFields) val functionString = truncatedString(allAggregateExpressions, "[", ", ", "]", maxFields) val outputString = truncatedString(output, "[", ", ", "]", maxFields) if (verbose) { s"HashAggregate(keys=$keyString, functions=$functionString, output=$outputString)" } else { s"HashAggregate(keys=$keyString, functions=$functionString)" } case Some(fallbackStartsAt) => s"HashAggregateWithControlledFallback $groupingExpressions " + s"$allAggregateExpressions $resultExpressions fallbackStartsAt=$fallbackStartsAt" } } } object HashAggregateExec { def supportsAggregate(aggregateBufferAttributes: Seq[Attribute]): Boolean = { val aggregationBufferSchema = StructType.fromAttributes(aggregateBufferAttributes) UnsafeFixedWidthAggregationMap.supportsAggregationBufferSchema(aggregationBufferSchema) } }
rednaxelafx/apache-spark
sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala
Scala
apache-2.0
45,611
import java.io.File import java.util.Scanner import java.util.TreeMap import scala.collection.mutable.Map import scala.collection.JavaConversions.mapAsScalaMap val in = new Scanner(new File("example.txt")) val counts: Map[String, Int] = new TreeMap[String, Int] while (in.hasNext) { val key = in.next counts(key) = if (counts.contains(key)) counts(key) + 1 else 1 } for ((word, count) <- counts) println(word + ": " + count)
demiazz/scala-impatient
chapter-04/exercise-05/main.scala
Scala
unlicense
455
package org.terkwood.pathfinding.core package object finders { val Sqrt2 = math.sqrt(2) }
felixt-cake/Pathfinding.scala
src/main/scala/org/terkwood/pathfinding/core/finders/package.scala
Scala
mit
93
package com.bryghts.kissnumber import java.lang.{Double => JavaDouble} trait IntegerNumberIsIntegralDef extends Integral[IntegerNumber] { def plus (x: IntegerNumber, y: IntegerNumber): IntegerNumber = x + y def minus (x: IntegerNumber, y: IntegerNumber): IntegerNumber = x - y def times (x: IntegerNumber, y: IntegerNumber): IntegerNumber = x * y def quot (x: IntegerNumber, y: IntegerNumber): IntegerNumber = IntegerNumber(x.v / y.v) def rem (x: IntegerNumber, y: IntegerNumber): IntegerNumber = x % y def negate (x: IntegerNumber): IntegerNumber = -x def fromInt (x: Int): IntegerNumber = IntegerNumber(x) def toInt (x: IntegerNumber): Int = x.toInt def toLong (x: IntegerNumber): Long = x.v def toFloat (x: IntegerNumber): Float = x.toFloat def toDouble (x: IntegerNumber): Double = x.toDouble } trait IntegerNumberOrderingDef extends Ordering[IntegerNumber] { def compare(x: IntegerNumber, y: IntegerNumber) = if (x < y) -1 else if (x == y) 0 else 1 } trait RealNumberIsConflictedDef extends Numeric[RealNumber] { def plus (x: RealNumber, y: RealNumber): RealNumber = x + y def minus (x: RealNumber, y: RealNumber): RealNumber = x - y def times (x: RealNumber, y: RealNumber): RealNumber = x * y def negate (x: RealNumber): RealNumber = -x def fromInt (x: Int): RealNumber = x def toInt (x: RealNumber): Int = x.toInt def toLong (x: RealNumber): Long = x.toLong def toFloat (x: RealNumber): Float = x.toFloat def toDouble (x: RealNumber): Double = x } trait RealNumberIsFractionalDef extends RealNumberIsConflictedDef with Fractional[RealNumber] { def div (x: RealNumber, y: RealNumber): RealNumber = x / y } trait RealNumberAsIfIntegralDef extends RealNumberIsConflictedDef with Integral[RealNumber] { def quot (x: RealNumber, y: RealNumber): RealNumber = RealNumber((BigDecimal(x.v) / BigDecimal(y.v)).doubleValue) def rem (x: RealNumber, y: RealNumber): RealNumber = RealNumber((BigDecimal(x.v) remainder BigDecimal(y.v)).doubleValue) } trait NumberIsConflictedDef extends Numeric[Number] { def plus (x: Number, y: Number): Number = x + y def minus (x: Number, y: Number): Number = x - y def times (x: Number, y: Number): Number = x * y def negate (x: Number): Number = -x def fromInt (x: Int): Number = IntegerNumber(x) def toInt (x: Number): Int = x.toInt def toLong (x: Number): Long = x.toLong def toFloat (x: Number): Float = x.toFloat def toDouble (x: Number): Double = x.toDouble } trait NumberIsFractionalDef extends NumberIsConflictedDef with Fractional[Number] { def div (x: Number, y: Number): Number = x / y } trait NumberAsIfIntegralDef extends NumberIsConflictedDef with Integral[Number] { private def bd(x: Number): BigDecimal = x match {case n: IntegerNumber => BigDecimal(n.v) case n: RealNumber => BigDecimal(n.v)} def quot (x: Number, y: Number): Number = Number((bd(x) / bd(y)).doubleValue) def rem (x: Number, y: Number): Number = Number((bd(x) remainder bd(y)).doubleValue) } trait RealNumberOrderingDef extends Ordering[RealNumber] {outer => def compare (x: RealNumber, y: RealNumber): Int = JavaDouble.compare(x.v, y.v) override def lteq (x: RealNumber, y: RealNumber): Boolean = x <= y override def gteq (x: RealNumber, y: RealNumber): Boolean = x >= y override def lt (x: RealNumber, y: RealNumber): Boolean = x < y override def gt (x: RealNumber, y: RealNumber): Boolean = x > y override def equiv (x: RealNumber, y: RealNumber): Boolean = x == y override def max (x: RealNumber, y: RealNumber): RealNumber = RealNumber(math.max(x.v, y.v)) override def min (x: RealNumber, y: RealNumber): RealNumber = RealNumber(math.min(x.v, y.v)) override def reverse: Ordering[RealNumber] = new RealNumberOrderingDef { override def reverse = outer override def compare (x: RealNumber, y: RealNumber): Int = outer.compare(y, x) override def lteq (x: RealNumber, y: RealNumber): Boolean = outer.lteq(y, x) override def gteq (x: RealNumber, y: RealNumber): Boolean = outer.gteq(y, x) override def lt (x: RealNumber, y: RealNumber): Boolean = outer.lt(y, x) override def gt (x: RealNumber, y: RealNumber): Boolean = outer.gt(y, x) } } trait NumberOrderingDef extends Ordering[Number] {outer => def compare (x: Number, y: Number): Int = (x, y) match { case (x: IntegerNumber, y: IntegerNumber) => IntegerNumberOrdering.compare(x, y) case (x: RealNumber, y: RealNumber) => RealNumberOrdering.compare(x, y) case (x: RealNumber, y: IntegerNumber) => BigDecimal(x.v).compare(BigDecimal(y.v)) case (x: IntegerNumber, y: RealNumber) => BigDecimal(x.v).compare(BigDecimal(y.v)) } override def max (x: Number, y: Number): Number = (x, y) match { case (x: IntegerNumber, y: IntegerNumber) => IntegerNumberOrdering.max(x, y) case (x: RealNumber, y: RealNumber) => RealNumberOrdering.max(x, y) case (x: RealNumber, y: IntegerNumber) => Number(BigDecimal(x.v).max(BigDecimal(y.v))) case (x: IntegerNumber, y: RealNumber) => Number(BigDecimal(x.v).max(BigDecimal(y.v))) } override def min (x: Number, y: Number): Number = (x, y) match { case (x: IntegerNumber, y: IntegerNumber) => IntegerNumberOrdering.min(x, y) case (x: RealNumber, y: RealNumber) => RealNumberOrdering.min(x, y) case (x: RealNumber, y: IntegerNumber) => Number(BigDecimal(x.v).min(BigDecimal(y.v))) case (x: IntegerNumber, y: RealNumber) => Number(BigDecimal(x.v).min(BigDecimal(y.v))) } override def lteq (x: Number, y: Number): Boolean = x <= y override def gteq (x: Number, y: Number): Boolean = x >= y override def lt (x: Number, y: Number): Boolean = x < y override def gt (x: Number, y: Number): Boolean = x > y override def equiv (x: Number, y: Number): Boolean = x == y override def reverse: Ordering[Number] = new NumberOrderingDef { override def reverse = outer override def compare (x: Number, y: Number): Int = outer.compare(y, x) override def lteq (x: Number, y: Number): Boolean = outer.lteq(y, x) override def gteq (x: Number, y: Number): Boolean = outer.gteq(y, x) override def lt (x: Number, y: Number): Boolean = outer.lt(y, x) override def gt (x: Number, y: Number): Boolean = outer.gt(y, x) } }
marcesquerra/KissNumber
src/main/scala/com/bryghts/kissnumber/numerics.scala
Scala
mit
6,720
package timezra.dropbox.core import java.text.DateFormat import java.text.SimpleDateFormat import java.util.Date import scala.annotation.implicitNotFound import scala.concurrent.Future import scala.concurrent.duration.DurationInt import akka.actor.ActorRef import akka.actor.ActorSystem import akka.io.IO import akka.pattern.ask import akka.util.Timeout import akka.util.Timeout.durationToTimeout import spray.can.Http import spray.client.pipelining.Get import spray.client.pipelining.Put import spray.client.pipelining.WithTransformerConcatenation import spray.client.pipelining.addHeader import spray.client.pipelining.sendReceive import spray.client.pipelining.unmarshal import spray.http.ContentTypeRange.apply import spray.http.HttpCharsets import spray.http.HttpData import spray.http.HttpEntity.NonEmpty import spray.http.HttpRequest import spray.http.HttpResponse import spray.http.MediaType import spray.http.MediaTypes import spray.httpx.unmarshalling.FromResponseUnmarshaller import spray.httpx.unmarshalling.Unmarshaller import spray.json.DefaultJsonProtocol import spray.json.DeserializationException import spray.json.JsArray import spray.json.JsString import spray.json.JsValue import spray.json.JsonParser import spray.json.RootJsonFormat import spray.json.RootJsonReader import spray.json.jsonReader import spray.json.pimpString import spray.util.pimpFuture import spray.http.Uri import spray.http.Uri.Query import java.util.Locale case class QuotaInfo(datastores: Int, shared: Long, quota: Long, normal: Long) case class AccountInfo(referral_link: String, display_name: String, uid: Long, country: Option[String], quota_info: QuotaInfo, email: String) object AccountInfoJsonProtocol extends DefaultJsonProtocol { implicit val quotaInfoFormat = jsonFormat4(QuotaInfo) implicit def accountInfoFormat = jsonFormat6(AccountInfo) } object JsonImplicits { implicit object DateJsonFormat extends RootJsonFormat[Date] { def write(date: Date) = { JsString(formatter format date) } def read(value: JsValue) = value match { case null ⇒ null case JsString(date) ⇒ formatter parse date case _ ⇒ throw new DeserializationException("Date Expected with format %a, %d %b %Y %H:%M:%S %z") } def formatter: DateFormat = new SimpleDateFormat("EEE, d MMM yyyy HH:mm:ss Z") } implicit object UriJsonFormat extends RootJsonFormat[Uri] { def write(uri: Uri) = JsString(uri.toString) def read(value: JsValue) = value match { case null ⇒ null case JsString(uri) ⇒ Uri(uri) case _ ⇒ throw new DeserializationException("Expected URI") } } } case class ContentMetadata(size: String, bytes: Long, path: String, is_dir: Boolean, is_deleted: Option[Boolean], rev: Option[String], hash: Option[String], thumb_exists: Boolean, icon: String, modified: Option[Date], client_mtime: Option[Date], root: String, mime_type: Option[String], revision: Option[Long], contents: Option[List[ContentMetadata]]) object ContentMetadataJsonProtocol extends DefaultJsonProtocol { import JsonImplicits._ implicit def contentMetadataFormat: RootJsonFormat[ContentMetadata] = rootFormat(lazyFormat(jsonFormat15(ContentMetadata))) } case class DeltaMetadata(entries: List[Tuple2[String, ContentMetadata]], reset: Boolean, cursor: String, has_more: Boolean) object DeltaMetadataJsonProtocol extends DefaultJsonProtocol { import ContentMetadataJsonProtocol.contentMetadataFormat implicit def deltaMetadataFormat = jsonFormat4(DeltaMetadata) } case class LongpollMetadata(changes: Boolean, backoff: Option[Int]) object LongpollMetadataJsonProtocol extends DefaultJsonProtocol { implicit def longpollMetadataFormat = jsonFormat2(LongpollMetadata) } case class LinkWithExpiry(url: Uri, expires: Date) object LinkWithExpiryJsonProtocol extends DefaultJsonProtocol { import spray.http.Uri import JsonImplicits._ implicit def linkWithExpiryFormat: RootJsonFormat[LinkWithExpiry] = jsonFormat2(LinkWithExpiry) } case class ReferenceWithExpiry(copy_ref: String, expires: Date) object ReferenceWithExpiryJsonProtocol extends DefaultJsonProtocol { import JsonImplicits._ implicit def referenceWithExpiryFormat: RootJsonFormat[ReferenceWithExpiry] = jsonFormat2(ReferenceWithExpiry) } case class UploadWithExpiry(upload_id: String, offset: Long, expires: Date) object UploadWithExpiryJsonProtocol extends DefaultJsonProtocol { import JsonImplicits._ implicit def uploadWithExpiryFormat: RootJsonFormat[UploadWithExpiry] = jsonFormat3(UploadWithExpiry) } case class ByteRange(start: Option[Long], end: Option[Long]) { require(start.isDefined || end.isDefined) override def toString = s"""${start.getOrElse("")}-${end.getOrElse("")}""" } object ContentTypes { import spray.http.MediaType import spray.http.MediaTypes val `text/javascript` = MediaType custom ("text", "javascript", true, true) val `image/png` = MediaType custom ("image", "png", true, true) val `image/jpeg` = MediaType custom ("image", "jpeg", true, true) MediaTypes register `text/javascript` MediaTypes register `image/png` MediaTypes register `image/jpeg` } object Format extends Enumeration { type Format = Value val jpeg = Value("jpeg") val png = Value("png") } object Size extends Enumeration { type Size = Value val xs = Value("xs") val s = Value("s") val m = Value("m") val l = Value("l") val xl = Value("xl") } object Dropbox { def apply(clientIdentifier: String, accessToken: String): Dropbox = new Dropbox(clientIdentifier, accessToken) } class Dropbox(clientIdentifier: String, accessToken: String) { import scala.util.{ Failure, Success } import akka.actor.ActorSystem import akka.io.IO import spray.can.Http import scala.concurrent.Future import scala.concurrent.duration.DurationInt import spray.client.pipelining._ import spray.httpx.unmarshalling.Unmarshaller import spray.httpx.unmarshalling.FromResponseUnmarshaller import spray.http.HttpEntity import spray.http.HttpResponse implicit lazy val system = ActorSystem("dropbox-sdk-scala") import system.dispatcher def addUserAgent = addHeader("User-Agent", s"${clientIdentifier} Dropbox-Scala-SDK/1.0") def addAuthorization = addHeader("Authorization", s"Bearer ${accessToken}") def accountInfo(conduit: ActorRef = IO(Http))(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[AccountInfo] = { import AccountInfoJsonProtocol.accountInfoFormat import SprayJsonSupport.sprayJsonUnmarshaller val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[AccountInfo] ) val q = Seq(locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) pipeline { Get(Uri("https://api.dropbox.com/1/account/info") withQuery (q: _*)) } } def getFile(conduit: ActorRef = IO(Http), root: String = "auto", path: String, rev: Option[String] = None, range: Option[Seq[ByteRange]] = None)(implicit timeout: Timeout = 15 minutes, maxChunkSize: Long = 1048576): Future[Tuple2[ContentMetadata, Stream[HttpData]]] = { implicit val FileUnmarshaller = new FromResponseUnmarshaller[Tuple2[ContentMetadata, Stream[HttpData]]] { import spray.json._ import DefaultJsonProtocol._ import ContentMetadataJsonProtocol.contentMetadataFormat def apply(response: HttpResponse) = { val metadataHeader = response.headers.find(_.name == "x-dropbox-metadata") Right(Tuple2(metadataHeader.get.value.asJson.convertTo, response.entity.data.toChunkStream(maxChunkSize))) } } val pipeline = ( addUserAgent ~> addAuthorization ~> range.fold(identity[HttpRequest]_)(r ⇒ addHeader("Range", s"""bytes=${r mkString ("", ",", "")}""")) ~> sendReceive(conduit) ~> unmarshal[(ContentMetadata, Stream[HttpData])] ) val q = Seq(rev map ("rev" ->)) flatMap (f ⇒ f) pipeline { Get(Uri(s"https://api-content.dropbox.com/1/files/$root/$path") withQuery (q: _*)) } } import scalaz.effect.IoExceptionOr import scalaz.iteratee.EnumeratorT import scalaz.effect.{ IO ⇒ zIO } def putFile(conduit: ActorRef = IO(Http), root: String = "auto", path: String, contents: EnumeratorT[IoExceptionOr[(Array[Byte], Int)], zIO], length: Int, rev: Option[String] = None, parent_rev: Option[String] = None, overwrite: Option[Boolean] = None)(implicit timeout: Timeout = 15 minutes, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import MetaMarshallers._ import Arrays._ implicit def boundArray2HttpData(t: (Array[Byte], Int)): HttpData = HttpData(t._1 takeT t._2) val pipeline = ( addUserAgent ~> addAuthorization ~> addHeader("Content-Length", String valueOf (length)) ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val q = Seq(parent_rev map ("parent_rev" ->), overwrite map ("overwrite" -> _.toString), locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) pipeline { Put(Uri(s"https://api-content.dropbox.com/1/files_put/$root/$path") withQuery (q: _*), contents) } } import java.io.File import spray.http.BodyPart def postFile(conduit: ActorRef = IO(Http), root: String = "auto", path: String, file: File, filename: Option[String] = None, parent_rev: Option[String] = None, overwrite: Option[Boolean] = None)(implicit timeout: Timeout = 15 minutes, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.MultipartFormData import spray.http.HttpHeaders.`Content-Disposition` import spray.httpx.marshalling.MultipartMarshallers._ val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val payload = MultipartFormData(Map( "dropbox-file" -> BodyPart( HttpEntity(HttpData(file)), `Content-Disposition`("form-data", Map("name" -> "file", "filename" -> filename.getOrElse(file getName))) :: Nil ) )) val q = Seq(parent_rev map ("parent_rev" ->), overwrite map ("overwrite" -> _.toString), locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) pipeline { Post(Uri(s"https://api-content.dropbox.com/1/files/$root/$path") withQuery (q: _*), payload) } } def metadata(conduit: ActorRef = IO(Http), root: String = "auto", path: String, file_limit: Option[Int] = None, hash: Option[String] = None, list: Option[Boolean] = None, include_deleted: Option[Boolean] = None, rev: Option[String] = None)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[Either[Boolean, ContentMetadata]] = { import spray.http.StatusCodes import spray.json._ import DefaultJsonProtocol._ import ContentMetadataJsonProtocol.contentMetadataFormat implicit val NotModifiedOrResultUnmarshaller = new FromResponseUnmarshaller[Either[Boolean, ContentMetadata]] { def apply(response: HttpResponse) = response.status match { case StatusCodes.NotModified ⇒ Right(Left(true)) case StatusCodes.Success(_) ⇒ Right(Right(response.entity.asString.asJson.convertTo)) case _ ⇒ throw new spray.httpx.UnsuccessfulResponseException(response) } } val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[Either[Boolean, ContentMetadata]] ) val q = Seq(locale map ("locale" -> _.toLanguageTag), file_limit map ("file_limit" -> _.toString), hash map ("hash" ->), list map ("list" -> _.toString), include_deleted map ("include_deleted" -> _.toString), rev map ("rev" ->)) flatMap (f ⇒ f) pipeline { Get(Uri(s"https://api.dropbox.com/1/metadata/$root/$path") withQuery (q: _*)) } } def delta(conduit: ActorRef = IO(Http), path_prefix: Option[String] = None, cursor: Option[String] = None)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[DeltaMetadata] = { import DeltaMetadataJsonProtocol.deltaMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[DeltaMetadata] ) val payload = Seq( path_prefix map ("path_prefix" ->), cursor map ("cursor" ->), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri(s"https://api.dropbox.com/1/delta"), FormData(payload)) } } def longpoll_delta(conduit: ActorRef = IO(Http), cursor: String, timeout: Option[Int] = None)(implicit futureTimeout: Timeout = timeout getOrElse 30 seconds, locale: Option[Locale] = None): Future[LongpollMetadata] = { import LongpollMetadataJsonProtocol.longpollMetadataFormat import SprayJsonSupport.sprayPlainTextJsonUnmarshaller val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[LongpollMetadata] ) val q = Seq(Some("cursor", cursor), timeout map ("timeout" -> _.toString)) flatMap (f ⇒ f) pipeline { Get(Uri("https://api-notify.dropbox.com/1/longpoll_delta") withQuery (q: _*)) } } def revisions(conduit: ActorRef = IO(Http), root: String = "auto", path: String, rev_limit: Option[Int] = None)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[List[ContentMetadata]] = { import spray.json.CollectionFormats import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import DefaultJsonProtocol._ val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[List[ContentMetadata]] ) val q = Seq(rev_limit map ("rev_limit" -> _.toString), locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) pipeline { Get(Uri(s"https://api.dropbox.com/1/revisions/$root/$path") withQuery (q: _*)) } } def restore(conduit: ActorRef = IO(Http), root: String = "auto", path: String, rev: String)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val payload = Seq( Some("rev", rev), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri(s"https://api.dropbox.com/1/restore/$root/$path"), FormData(payload)) } } import spray.http.HttpMethod import spray.http.HttpMethods.GET def search(conduit: ActorRef = IO(Http), root: String = "auto", path: Option[String] = None, query: String, file_limit: Option[Int] = None, include_deleted: Option[Boolean] = None, method: HttpMethod = GET)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[List[ContentMetadata]] = { import spray.http.FormData import spray.http.HttpMethods.POST import spray.json.CollectionFormats import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import DefaultJsonProtocol._ val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[List[ContentMetadata]] ) val payload = Seq(Some("query", query), file_limit map ("file_limit" -> _.toString), include_deleted map ("include_deleted" -> _.toString), locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) val searchUri = Uri(Seq(Some("https://api.dropbox.com/1/search"), Some(root), path).flatten.mkString("/")) pipeline { method match { case GET ⇒ Get(searchUri withQuery (payload: _*)) case POST ⇒ Post(searchUri, FormData(payload)) } } } def shares(conduit: ActorRef = IO(Http), root: String = "auto", path: String, short_url: Option[Boolean] = None)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[LinkWithExpiry] = { import LinkWithExpiryJsonProtocol.linkWithExpiryFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[LinkWithExpiry] ) val payload = Seq( short_url map ("short_url" -> _.toString), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri(s"https://api.dropbox.com/1/shares/$root/$path"), FormData(payload)) } } def media(conduit: ActorRef = IO(Http), root: String = "auto", path: String)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[LinkWithExpiry] = { import LinkWithExpiryJsonProtocol.linkWithExpiryFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[LinkWithExpiry] ) val payload = Seq(locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) pipeline { Post(Uri(s"https://api.dropbox.com/1/media/$root/$path"), FormData(payload)) } } def copy_ref(conduit: ActorRef = IO(Http), root: String = "auto", path: String)(implicit timeout: Timeout = 60 seconds): Future[ReferenceWithExpiry] = { import ReferenceWithExpiryJsonProtocol.referenceWithExpiryFormat import SprayJsonSupport.sprayJsonUnmarshaller import DefaultJsonProtocol._ val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ReferenceWithExpiry] ) pipeline { Get(Uri(s"https://api.dropbox.com/1/copy_ref/$root/$path")) } } import Format._ import Size._ def thumbnails(conduit: ActorRef = IO(Http), root: String = "auto", path: String, format: Option[Format] = None, size: Option[Size] = None)(implicit timeout: Timeout = 15 minutes, maxChunkSize: Long = 1048576): Future[Tuple2[ContentMetadata, Stream[HttpData]]] = { implicit val FileUnmarshaller = new FromResponseUnmarshaller[Tuple2[ContentMetadata, Stream[HttpData]]] { import spray.json._ import DefaultJsonProtocol._ import ContentMetadataJsonProtocol.contentMetadataFormat def apply(response: HttpResponse) = { val metadataHeader = response.headers.find(_.name == "x-dropbox-metadata") Right(Tuple2(metadataHeader.get.value.asJson.convertTo, response.entity.data.toChunkStream(maxChunkSize))) } } val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[(ContentMetadata, Stream[HttpData])] ) val q = Seq(format map ("format" -> _.toString), size map ("size" -> _.toString)) flatMap (f ⇒ f) pipeline { Get(Uri(s"https://api-content.dropbox.com/1/thumbnails/$root/$path") withQuery (q: _*)) } } import scalaz.effect.IoExceptionOr import scalaz.iteratee.EnumeratorT import scalaz.effect.{ IO ⇒ zIO } def chunked_upload(conduit: ActorRef = IO(Http), contents: EnumeratorT[IoExceptionOr[(Array[Byte], Int)], zIO], idAndOffset: Option[Tuple2[String, Long]] = None)(implicit timeout: Timeout = 15 minutes): Future[UploadWithExpiry] = { import UploadWithExpiryJsonProtocol.uploadWithExpiryFormat import SprayJsonSupport.sprayJsonUnmarshaller import MetaMarshallers._ import Arrays._ implicit def boundArray2HttpData(t: (Array[Byte], Int)): HttpData = HttpData(t._1 takeT t._2) val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[UploadWithExpiry] ) val q = Seq(idAndOffset map ("upload_id" -> _._1), idAndOffset map ("offset" -> _._2.toString)) flatMap (f ⇒ f) pipeline { Put(Uri(s"https://api-content.dropbox.com/1/chunked_upload") withQuery (q: _*), contents) } } def commit_chunked_upload(conduit: ActorRef = IO(Http), root: String = "auto", path: String, upload_id: String, parent_rev: Option[String] = None, overwrite: Option[Boolean] = None)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val q = Seq(Some("upload_id", upload_id), parent_rev map ("parent_rev" ->), overwrite map ("overwrite" -> _.toString), locale map ("locale" -> _.toLanguageTag)) flatMap (f ⇒ f) pipeline { Post(Uri(s"https://api-content.dropbox.com/1/commit_chunked_upload/$root/$path") withQuery (q: _*)) } } def copy(conduit: ActorRef = IO(Http), root: String = "auto", to_path: String, from_path: Option[String] = None, from_copy_ref: Option[String] = None)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val payload = Seq( Some("root", root), Some("to_path", to_path), from_path map ("from_path" ->), from_copy_ref map ("from_copy_ref" ->), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri("https://api.dropbox.com/1/fileops/copy"), FormData(payload)) } } def create_folder(conduit: ActorRef = IO(Http), root: String = "auto", path: String)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val payload = Seq( Some("root", root), Some("path", path), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri("https://api.dropbox.com/1/fileops/create_folder"), FormData(payload)) } } def delete(conduit: ActorRef = IO(Http), root: String = "auto", path: String)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val payload = Seq( Some("root", root), Some("path", path), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri("https://api.dropbox.com/1/fileops/delete"), FormData(payload)) } } def move(conduit: ActorRef = IO(Http), root: String = "auto", to_path: String, from_path: String)(implicit timeout: Timeout = 60 seconds, locale: Option[Locale] = None): Future[ContentMetadata] = { import ContentMetadataJsonProtocol.contentMetadataFormat import SprayJsonSupport.sprayJsonUnmarshaller import spray.http.FormData val pipeline = ( addUserAgent ~> addAuthorization ~> sendReceive(conduit) ~> unmarshal[ContentMetadata] ) val payload = Seq( Some("root", root), Some("to_path", to_path), Some("from_path", from_path), locale map ("locale" -> _.toLanguageTag) ) flatMap (f ⇒ f) pipeline { Post(Uri("https://api.dropbox.com/1/fileops/move"), FormData(payload)) } } def shutdown(): Unit = { import akka.pattern.ask import spray.util.pimpFuture IO(Http).ask(Http.CloseAll)(3 seconds).await system shutdown } }
timezra/dropbox-sdk-scala
dropbox-sdk-scala/src/main/scala/timezra/dropbox/core/Dropbox.scala
Scala
mit
24,676
package com.datastax.spark.connector.writer import com.datastax.driver.core._ import com.datastax.spark.connector.types.{ColumnType, Unset} import com.datastax.spark.connector.util.{CodecRegistryUtil, Logging} /** * Class for binding row-like objects into prepared statements. prefixVals * is used for binding constant values into each bound statement. This supports parametrized * .where clauses in [[com.datastax.spark.connector.rdd.CassandraJoinRDD]] */ private[connector] class BoundStatementBuilder[T]( val rowWriter: RowWriter[T], val preparedStmt: PreparedStatement, val prefixVals: Seq[Any] = Seq.empty, val ignoreNulls: Boolean = false, val protocolVersion: ProtocolVersion) extends Logging { private val columnNames = rowWriter.columnNames.toIndexedSeq private val columnTypes = columnNames.map(preparedStmt.getVariables.getType) private val converters = columnTypes.map(ColumnType.converterToCassandra(_)) private val buffer = Array.ofDim[Any](columnNames.size) require(ignoreNulls == false || protocolVersion.toInt >= ProtocolVersion.V4.toInt, s""" |Protocol Version $protocolVersion does not support ignoring null values and leaving |parameters unset. This is only supported in ${ProtocolVersion.V4} and greater. """.stripMargin) var logUnsetToNullWarning = false val UnsetToNullWarning = s"""Unset values can only be used with C* >= 2.2. They have been replaced |with nulls. Found protocol version ${protocolVersion}. |${ProtocolVersion.V4} or greater required" """.stripMargin private def maybeLeaveUnset( boundStatement: BoundStatement, columnName: String): Unit = protocolVersion match { case pv if pv.toInt <= ProtocolVersion.V3.toInt => { boundStatement.setToNull(columnName) logUnsetToNullWarning = true } case _ => } private def bindColumnNull( boundStatement: BoundStatement, columnName: String, columnType: DataType, columnValue: AnyRef): Unit = { if (columnValue == Unset || (ignoreNulls && columnValue == null)) { boundStatement.setToNull(columnName) logUnsetToNullWarning = true } else { val codec = CodecRegistryUtil.codecFor(columnType, columnValue) boundStatement.set(columnName, columnValue, codec) } } private def bindColumnUnset( boundStatement: BoundStatement, columnName: String, columnType: DataType, columnValue: AnyRef): Unit = { if (columnValue == Unset || (ignoreNulls && columnValue == null)) { //Do not bind } else { val codec = CodecRegistryUtil.codecFor(columnType, columnValue) boundStatement.set(columnName, columnValue, codec) } } /** * If the protocol version is greater than V3 (C* 2.2 and Greater) then * we can leave values in the prepared statement unset. If the version is * less than V3 then we need to place a `null` in the bound statement. */ val bindColumn: (BoundStatement, String, DataType, AnyRef) => Unit = protocolVersion match { case pv if pv.toInt <= ProtocolVersion.V3.toInt => bindColumnNull case _ => bindColumnUnset } private val prefixConverted = for { prefixIndex: Int <- 0 until prefixVals.length prefixVal = prefixVals(prefixIndex) prefixType = preparedStmt.getVariables.getType(prefixIndex) prefixConverter = ColumnType.converterToCassandra(prefixType) } yield prefixConverter.convert(prefixVal) /** Creates `BoundStatement` from the given data item */ def bind(row: T): RichBoundStatement = { val boundStatement = new RichBoundStatement(preparedStmt) boundStatement.bind(prefixConverted: _*) rowWriter.readColumnValues(row, buffer) var bytesCount = 0 for (i <- 0 until columnNames.size) { val converter = converters(i) val columnName = columnNames(i) val columnType = columnTypes(i) val columnValue = converter.convert(buffer(i)) bindColumn(boundStatement, columnName, columnType, columnValue) val serializedValue = boundStatement.getBytesUnsafe(i) if (serializedValue != null) bytesCount += serializedValue.remaining() } boundStatement.bytesCount = bytesCount boundStatement } } private[connector] object BoundStatementBuilder { /** Calculate bound statement size in bytes. */ def calculateDataSize(stmt: BoundStatement): Int = { var size = 0 for (i <- 0 until stmt.preparedStatement().getVariables.size()) if (!stmt.isNull(i)) size += stmt.getBytesUnsafe(i).remaining() size } }
ponkin/spark-cassandra-connector
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/writer/BoundStatementBuilder.scala
Scala
apache-2.0
4,554
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package featureswitch.core.config import javax.inject.Singleton import play.api.inject.{Binding, Module} import play.api.{Configuration, Environment} import featureswitch.core.models.FeatureSwitch @Singleton class FeatureSwitchingModule extends Module with FeatureSwitchRegistry { val switches = Seq( StubIncorpIdJourney, StubPersonalDetailsValidation, StubEmailVerification, StubAlf, StubIcl, StubSoleTraderIdentification, StubUpscan, StubBars, StubPartnershipIdentification, StubMinorEntityIdentification, TrafficManagementPredicate, UseSoleTraderIdentification, UseUpscan, SaveAndContinueLater, ShortOrgName, MultipleRegistrations, LandAndProperty, FullAgentJourney ) override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] = { Seq( bind[FeatureSwitchRegistry].to(this).eagerly() ) } } case object StubIncorpIdJourney extends FeatureSwitch { val configName = "feature-switch.stub-incorp-id" val displayName = "Stub incorporated entity identification flow" } case object StubEmailVerification extends FeatureSwitch { val configName = "feature-switch.stub-email-verification" val displayName = "Stub email verification flow" } case object StubPersonalDetailsValidation extends FeatureSwitch { val configName = "feature-switch.stub-personal-details-validation" val displayName = "Stub personal details validation flow" } case object StubIcl extends FeatureSwitch { val configName = "feature-switch.stub-icl" val displayName = "Stub ICL flow" } case object StubSoleTraderIdentification extends FeatureSwitch { val configName = "feature-switch.stub-sole-trader-identification" val displayName = "Stub sole trader identification journey" } case object StubUpscan extends FeatureSwitch { val configName = "feature-switch.stub-upscan" val displayName = "Stub Upscan flow" } case object StubAlf extends FeatureSwitch { val configName = "feature-switch.stub-alf" val displayName = "Stub Address Lookup Frontend" } case object StubBars extends FeatureSwitch { val configName = "feature-switch.stub-bars" val displayName = "Stub Bank Account Reputation" } case object StubPartnershipIdentification extends FeatureSwitch { val configName = "feature-switch.partnership-identification" val displayName = "Stub Partnership Identification" } case object StubMinorEntityIdentification extends FeatureSwitch { val configName = "feature-switch.minor-entity-identification" val displayName = "Stub Minor Entity Identification" } case object TrafficManagementPredicate extends FeatureSwitch { val configName = "feature-switch.traffic-management-predicate" val displayName = "Enable traffic management check in auth predicate (Must match the \"Use traffic management\" feature switch)" } case object UseSoleTraderIdentification extends FeatureSwitch { val configName = "feature-switch.use-sole-trader-identification" val displayName = "Use sole trader identification journey" } case object UseUpscan extends FeatureSwitch { val configName = "feature-switch.use-upscan" val displayName = "Use Upscan flow" } case object SaveAndContinueLater extends FeatureSwitch { val configName = "feature-switch.save-and-continue-later" val displayName = "Enable Save and Continue Later" } case object ShortOrgName extends FeatureSwitch { val configName: String = "feature-switch.short-org-name" val displayName: String = "Enable Short Org Name page (use with BE FS)" } case object MultipleRegistrations extends FeatureSwitch { val configName: String = "feature-switch.multiple-registrations" val displayName: String = "Enable multiple registrations" } case object LandAndProperty extends FeatureSwitch { override val configName: String = "feature-switch.land-and-property-fe" override val displayName: String = "Enable land and property page (USE WITH ELIGIBILITY L&P FEATURE)" } case object FullAgentJourney extends FeatureSwitch { override val configName: String = "feature-switch.full-agent-journey" override val displayName: String = "Enable full agent journey" }
hmrc/vat-registration-frontend
app/featureswitch/core/config/FeatureSwitchingModule.scala
Scala
apache-2.0
4,765
package korolev import korolev.VDom._ import scala.annotation.tailrec import scala.collection.mutable import scala.language.implicitConversions trait Shtml { import ShtmlMisc._ implicit def toTextNode(text: String): Text = Text(text) implicit def toOptionNode(opt: Option[VDom]): VDom = opt match { case Some(nl) => nl case None => <> } implicit def toVDoms(xs: Iterable[VDom]): VDoms = VDoms(xs.toList) implicit def ShtmlSymbolOps(name: Symbol): ShtmlSymbolOps = new ShtmlSymbolOps(name) val <> = VDom.Empty } private[korolev] object ShtmlMisc { // Should be concurrent val nameCache = mutable.Map.empty[Symbol, String] val twoWayBindingDefaultEvents = Seq("input", "change") def htmlName(x: Symbol): String = { nameCache.getOrElseUpdate( x, x.name.replaceAll("([A-Z]+)", "-$1").toLowerCase) } final class ShtmlSymbolOps(val self: Symbol) extends AnyVal { def apply(vdom: VDom*): Node = { @tailrec def loop(children: List[NodeLike], attrs: List[Attr], misc: List[Misc], tl: List[VDom]): Node = tl match { case Nil => Node(htmlName(self), children.reverse, attrs.reverse, misc.reverse) case (x: NodeLike) :: xs => loop(x :: children, attrs, misc, xs) case (x: Attr) :: xs => loop(children, x :: attrs, misc, xs) case (x: Misc) :: xs => loop(children, attrs, x :: misc, xs) case VDoms(nodes) :: xs => loop(children, attrs, misc, nodes ::: xs) case _ :: xs => loop(children, attrs, misc, xs) } loop(Nil, Nil, Nil, vdom.toList) } def :=(value: Any): Attr = Attr(self.name, value) def when(value: Boolean): VDom = if (value) Attr(htmlName(self), "true", isProperty = false) else VDom.Empty def /=(value: String): Attr = Attr(htmlName(self), value, isProperty = false) } }
PhilAndrew/JumpMicro
JMSangriaGraphql/src/main/scala/korolev/Shtml.scala
Scala
mit
1,909
package tifmo.document import tifmo.dcstree.{ SemRole, Quantifier, Relation, Selection } import scala.collection.{ mutable, immutable } class TokenNode(val token: Token) { /** * Set to `false` if the word is negated. */ var sign = true /** * The calculation order of the semantic roles of the token. */ var rseq: immutable.Seq[SemRole] = null /** * The output semantic role. */ var outRole = null: SemRole /** * The universal quantifier. If none set it to `null`. */ var quantifier = null: Quantifier /** * The relation with its parent. If none set it to `null`. * The direction of this relation is determined by `relationFromParent`. */ var relation = null: Relation /** * Indicate the direction of relation. * If set to true, the parent of the current node would be the first argument of the relation, * while the node itself being second, and vice versa. */ var relationFromParent: Boolean = false /** * The selection operator. If none set it to `null`. */ var selection = null: Selection /** * Whether the node is in conjunction with its parent. */ var conj = false /** * If this is the root node, setting `rootNeg` to `true` can negate the whole sentence. */ var rootNeg = false /////////////////////////////////////////////////////// private var prt = null: TokenNode private val cs = mutable.Set.empty[(SemRole, TokenNode)] /** * Get parent node. */ def parent = prt /** * Get all children (including conjunction). */ def children = cs.toSet def locRoles: Set[SemRole] = { var tmp = for ((r, n) <- children; if !n.conj) yield r if (outRole != null) tmp += outRole if (selection != null) selection match { case SelSup(nm, r) => tmp += r case SelNum(nm, r) => tmp += r case _ => {} } if (conj) tmp ++= parent.locRoles tmp } /** * Cut this node from its parent. */ def cut() { if (prt != null) { prt.cs.retain(_._2 != this) } prt = null } /** * Add a child. */ def addChild(r: SemRole, n: TokenNode) { if (n == this) { // ignore } else if (n.parent == this) { cs.retain(_._2 != n) cs += ((r, n)) } else { assert(n.parent == null) cs += ((r, n)) n.prt = this } } /** * Add a conjunction. */ def addConjunction(n: TokenNode) { if (n == this) { // ignore } else if (n.parent == this) { cs.retain(_._2 != n) cs += ((null, n)) n.conj = true } else { assert(n.parent == null) cs += ((null, n)) n.prt = this n.conj = true } } }
tomtung/tifmo
src/main/scala/tifmo/document/TokenNode.scala
Scala
bsd-2-clause
2,686
/** * The peloba zero-knowledge library * Copyright (C) 2013 peloba UG & Co. KG * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package de.peloba package util import de.peloba.math.Matrix import java.util.zip.ZipFile import java.io.InputStream import it.unisa.dia.gas.jpbc.{Element, Field} object Helper { //def elementFromFile(fileName: String, field: Field): Element = field.newElement().getImmutable def vectorFromZipFile(file: ZipFile, entryName: String, vectorSize: Int, field: Field[_ <: Element]) = { //val res = new Matrix(1, vectorSize, field) val res = new Matrix(vectorSize, 1, field) bytesFromZipFile(file, entryName).map(res.setFromBytes(_)) getOrElse res } def curveElementFromBytes(field: Field[_ <: Element], data: Array[Byte]) = { val element = field.newElement() element.setFromBytes(data) element } def curveElementFromZipFile(file: ZipFile, entryName: String, field: Field[_ <: Element]): Option[Element] = bytesFromZipFile(file, entryName).map(curveElementFromBytes(field, _)) def bytesFromInputStream(is: InputStream, length: Int): Option[Array[Byte]] = { val res = new Array[Byte](length) var offset = 0 var numRead = 0 while (offset < res.length && { numRead = is.read(res, offset, res.length - offset); numRead} >= 0) offset += numRead if (offset < res.length) return None Some(res) } def bytesFromZipFile(file: ZipFile, entryName: String): Option[Array[Byte]] = { val entry = file.getEntry(entryName) if(entry == null) return None if(entry.getSize > Int.MaxValue) return None val is = file.getInputStream(entry) val res = bytesFromInputStream(is, entry.getSize.toInt) is.close() res } def stringToInt(data: String, defaultValue: Int): Int = { try { data.toInt } catch { case _: Throwable => defaultValue } } def stringToInt(data: String): Int = stringToInt(data, 0) }
peloba/zk-library
src/main/scala/de/peloba/util/Helper.scala
Scala
agpl-3.0
2,518
package adt import scala.language.higherKinds package object bson { @deprecated("Use adt.bson.mongodb.DBCompanion instead.", "1.3.0") type DBCompanion[-A, T] = mongodb.DBCompanion[A, T] @deprecated("Use adt.bson.mongodb.DBExtractor instead.", "1.3.0") type DBExtractor[T] = mongodb.DBExtractor[T] @deprecated("Use adt.bson.mongodb.DBValue instead.", "1.3.0") type DBValue = mongodb.DBValue @deprecated("Use adt.bson.mongodb.DBValueCompanion instead.", "1.3.0") type DBValueCompanion = mongodb.DBValueCompanion @deprecated("Use adt.bson.mongodb.DBValueExtractors instead.", "1.3.0") type DBValueExtractors = mongodb.DBValueExtractors @deprecated("Use adt.bson.mongodb.DefaultDBValueExtractor instead.", "1.3.0") type DefaultDBValueExtractor = mongodb.DefaultDBValueExtractor }
jeffmay/bson-adt
bson-adt-mongo2/src/main/scala/adt/bson/package.scala
Scala
apache-2.0
808
/** * Copyright 2014 Gianluca Amato <gamato@unich.it> * * This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains * JANDOM is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * JANDOM is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of a * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with JANDOM. If not, see <http://www.gnu.org/licenses/>. */ package it.unich.jandom.domains.objects /** * A test trait for object domains with precise operators. * @author Gianluca Amato <gamato@unich.it> * */ trait PreciseObjectDomain extends ObjectDomainSuite { describe("The assignVariableToField method") { it("returns bottom when the destination variable is definitively null") { forAll(someAssignVarToField) { (prop, dst, field, src) => whenever(prop.mustBeNull(dst)) { assert(prop.assignVariableToField(dst, field, src).isBottom) } } } } describe("The assignFieldToVariable method") { it("returns bottom when the source variable is definitively null") { forAll(someAssignFieldToVar) { (prop, dst, src, field) => whenever(prop.mustBeNull(src)) { assert(prop.assignFieldToVariable(dst, src, field).isBottom) } } } } describe("The testNotNull method") { it("returns bottom if applied to a definite null variable") { forAll(somePropertiesAndVars) { (prop, v) => if (prop.mustBeNull(v)) assert(prop.testNotNull(v).isBottom) } } } describe("The testNull method") { it("is identitity if applied to a definite null variable") { forAll(somePropertiesAndVars) { (prop, v) => if (prop.mustBeNull(v)) assert(prop.testNull(v) === prop) } } it("is equivalent to assignNull for top") { forAll(someFibersAndVars) { (fiber, v) => assert(dom.top(fiber).testNull(v) === dom.top(fiber).assignNull(v)) } } } }
amato-gianluca/Jandom
core/src/test/scala/it/unich/jandom/domains/objects/PreciseObjectDomain.scala
Scala
lgpl-3.0
2,303
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js API ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ /** * All doc-comments marked as "MDN" are by Mozilla Contributors, * distributed under the Creative Commons Attribution-ShareAlike license from * https://developer.mozilla.org/en-US/docs/Web/Reference/API */ package scala.scalajs.js /** * The JSON object contains methods for converting values to JavaScript Object * Notation (JSON) and for converting JSON to values. * * MDN */ @native object JSON extends Object { /** * Parse a string as JSON, optionally transforming the value produced by parsing. * @param text The string to parse as JSON. See the JSON object for a * description of JSON syntax. * @param reviver If a function, prescribes how the value originally produced * by parsing is transformed, before being returned. * * MDN */ def parse(text: String, reviver: Function2[Any, Any, Any] = ???): Dynamic = native /** * Convert a value to JSON, optionally replacing values if a replacer function * is specified, or optionally including only the specified properties if a * replacer array is specified. * * @param value The value to convert to a JSON string. * @param replacer If a function, transforms values and properties encountered * while stringifying; if an array, specifies the set of * properties included in objects in the final string. * @param space Causes the resulting string to be pretty-printed. * * MDN */ def stringify(value: Any, replacer: Function2[String, Any, Any] = ???, space: Any = ???): String = native def stringify(value: Any, replacer: Array[Any]): String = native def stringify(value: Any, replacer: Array[Any], space: Any): String = native }
lrytz/scala-js
library/src/main/scala/scala/scalajs/js/JSON.scala
Scala
bsd-3-clause
2,252
package spark.rdd import java.io.{ObjectOutputStream, IOException} import spark._ private[spark] class CartesianPartition( idx: Int, @transient rdd1: RDD[_], @transient rdd2: RDD[_], s1Index: Int, s2Index: Int ) extends Partition { var s1 = rdd1.partitions(s1Index) var s2 = rdd2.partitions(s2Index) override val index: Int = idx @throws(classOf[IOException]) private def writeObject(oos: ObjectOutputStream) { // Update the reference to parent split at the time of task serialization s1 = rdd1.partitions(s1Index) s2 = rdd2.partitions(s2Index) oos.defaultWriteObject() } } private[spark] class CartesianRDD[T: ClassManifest, U:ClassManifest]( sc: SparkContext, var rdd1 : RDD[T], var rdd2 : RDD[U]) extends RDD[Pair[T, U]](sc, Nil) with Serializable { val numPartitionsInRdd2 = rdd2.partitions.size override def getPartitions: Array[Partition] = { // create the cross product split val array = new Array[Partition](rdd1.partitions.size * rdd2.partitions.size) for (s1 <- rdd1.partitions; s2 <- rdd2.partitions) { val idx = s1.index * numPartitionsInRdd2 + s2.index array(idx) = new CartesianPartition(idx, rdd1, rdd2, s1.index, s2.index) } array } override def getPreferredLocations(split: Partition): Seq[String] = { val currSplit = split.asInstanceOf[CartesianPartition] rdd1.preferredLocations(currSplit.s1) ++ rdd2.preferredLocations(currSplit.s2) } override def compute(split: Partition, context: TaskContext) = { val currSplit = split.asInstanceOf[CartesianPartition] for (x <- rdd1.iterator(currSplit.s1, context); y <- rdd2.iterator(currSplit.s2, context)) yield (x, y) } override def getDependencies: Seq[Dependency[_]] = List( new NarrowDependency(rdd1) { def getParents(id: Int): Seq[Int] = List(id / numPartitionsInRdd2) }, new NarrowDependency(rdd2) { def getParents(id: Int): Seq[Int] = List(id % numPartitionsInRdd2) } ) override def clearDependencies() { super.clearDependencies() rdd1 = null rdd2 = null } }
koeninger/spark
core/src/main/scala/spark/rdd/CartesianRDD.scala
Scala
bsd-3-clause
2,119
/** * Copyright 2011-2016 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.charts.template import io.gatling.commons.stats.ErrorStats import io.gatling.commons.util.StringHelper._ import io.gatling.charts.component.Statistics import io.gatling.charts.component.Statistics.printable import io.gatling.charts.component.{ GroupedCount, RequestStatistics } import io.gatling.core.stats.writer.ConsoleErrorsWriter import io.gatling.core.stats.writer.ConsoleSummary._ import com.dongxiguo.fastring.Fastring.Implicits._ private[charts] object ConsoleTemplate { def writeRequestCounters[T: Numeric](statistics: Statistics[T]): Fastring = { import statistics._ fast"> ${name.rightPad(OutputLength - 32)} ${printable(total).leftPad(7)} (OK=${printable(success).rightPad(6)} KO=${printable(failure).rightPad(6)})" } def writeGroupedCounters(groupedCount: GroupedCount): Fastring = { import groupedCount._ fast"> ${name.rightPad(OutputLength - 32)} ${count.toString.leftPad(7)} (${percentage.toString.leftPad(3)}%)" } def writeErrorsAndEndBlock(errors: Seq[ErrorStats]): Fastring = { if (errors.isEmpty) fast"$NewBlock" else fast"""${writeSubTitle("Errors")} ${errors.map(ConsoleErrorsWriter.writeError).mkFastring(Eol)} $NewBlock""" } def println(requestStatistics: RequestStatistics, errors: Seq[ErrorStats]): String = { import requestStatistics._ fast""" $NewBlock ${writeSubTitle("Global Information")} ${writeRequestCounters(numberOfRequestsStatistics)} ${writeRequestCounters(minResponseTimeStatistics)} ${writeRequestCounters(maxResponseTimeStatistics)} ${writeRequestCounters(meanStatistics)} ${writeRequestCounters(stdDeviationStatistics)} ${writeRequestCounters(percentiles1)} ${writeRequestCounters(percentiles2)} ${writeRequestCounters(percentiles3)} ${writeRequestCounters(percentiles4)} ${writeRequestCounters(meanNumberOfRequestsPerSecondStatistics)} ${writeSubTitle("Response Time Distribution")} ${groupedCounts.map(writeGroupedCounters).mkFastring(Eol)} ${writeErrorsAndEndBlock(errors)} """.toString } }
GabrielPlassard/gatling
gatling-charts/src/main/scala/io/gatling/charts/template/ConsoleTemplate.scala
Scala
apache-2.0
2,640
/* * Copyright 2016 Tamer AbdulRadi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package troy package macros import java.util.UUID import com.datastax.driver.core.Session import org.scalatest.FreeSpec class SelectValidationSpec extends FreeSpec { implicit def session: Session = ??? // Required by generated code import troy.driver.DSL._ import troy.dsl._ case class Post(id: UUID, author_name: String, title: String) "The Macro should" - { "refuse SELECT DISTINCT on non partition level columns" in { assertTypeError(""" withSchema { (postId: UUID) => cql"SELECT DISTINCT post_rating FROM test.posts;".prepared } """) assertTypeError(""" withSchema { (postId: UUID) => cql"SELECT DISTINCT author_name, post_rating FROM test.posts;".prepared } """) assertCompiles(""" withSchema { (postId: UUID) => cql"SELECT DISTINCT author_name FROM test.posts;".prepared } """) assertCompiles(""" withSchema { (postId: UUID) => cql"SELECT DISTINCT author_id FROM test.posts;".prepared } """) assertCompiles(""" withSchema { (postId: UUID) => cql"SELECT DISTINCT author_id, author_name FROM test.posts;".prepared } """) } // // "allow query by partition key" in { // assertCompiles(""" // withSchema { (authorId: UUID) => // cql"SELECT post_id FROM test.posts WHERE author_id = $authorId;".prepared // } // """) // } // // "refuse query by clustering column only" in { // assertTypeError(""" // withSchema { (postId: UUID) => // cql"SELECT post_id FROM test.posts WHERE post_id = $postId;".prepared // } // """) // } // // "allow query by clustering column only if allow filtering was enabled" in { // assertCompiles(""" // withSchema { (postId: UUID) => // cql"SELECT post_id FROM test.posts WHERE post_id = $postId ALLOW FILTERING;".prepared // } // """) // } // // "allow query by whole primary key" in { // assertCompiles(""" // withSchema { (authorId: UUID, postId: UUID) => // cql"SELECT post_id FROM test.posts WHERE author_id = $authorId AND post_id = $postId;".prepared // } // """) // } // // "refuse query by normal unindexed column" in { // assertTypeError(""" // withSchema { (authorName: String) => // cql"SELECT post_id FROM test.posts WHERE author_name = $authorName;".prepared // } // """) // // assertTypeError(""" // withSchema { (rating: Int) => // cql"SELECT post_id FROM test.posts WHERE post_rating = $rating;".prepared // } // """) // } // // "allow query by normal unindexed column if allow filtering was enabled" in { // assertCompiles(""" // withSchema { (authorName: String) => // cql"SELECT post_id FROM test.posts WHERE author_name = $authorName ALLOW FILTERING;".prepared // } // """) // // assertTypeError(""" // withSchema { (rating: Int) => // cql"SELECT post_id FROM test.posts WHERE post_rating = $rating ALLOW FILTERING;".prepared // } // """) // } // // "refuse query by normal unindexed column, even if whole primary key is specified" in { // assertTypeError(""" // withSchema { (authorId: UUID, postId: UUID, authorName: String) => // cql"SELECT post_id FROM test.posts WHERE author_id = $authorId AND post_id = $postId AND author_name = $authorName;".prepared // } // """) // } // // "allow query by unindexed column + whole primary key if allow filtering was enabled" in { // assertCompiles(""" // withSchema { (authorId: UUID, postId: UUID, authorName: String) => // cql"SELECT post_id FROM test.posts WHERE author_id = $authorId AND post_id = $postId AND author_name = $authorName ALLOW FILTERING;".prepared // } // """) // } // // "allow query on indexed columns" in { // assertCompiles(""" // withSchema { (title: String) => // cql"SELECT post_id FROM test.posts WHERE post_title = $title;".prepared // } // """) // } // // "allow query on indexed columns, combined with partition key" in { // assertCompiles(""" // withSchema { (title: String, authorId: UUID) => // cql"SELECT post_id FROM test.posts WHERE post_title = $title AND author_id = $authorId;".prepared // } // """) // } // // "allow query by indexed column + whole primary key" in { // assertCompiles(""" // withSchema { (authorId: UUID, postId: UUID, title: String) => // cql"SELECT post_id FROM test.posts WHERE author_id = $authorId AND post_id = $postId AND post_title = $title;".prepared // } // """) // } } }
schemasafe/troy
troy-macro/src/test/scala/troy/macros/SelectValidationSpec.scala
Scala
apache-2.0
5,844
/* * Copyright (C)2014 D. Plaindoux. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; either version 2, or (at your option) any * later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; see the file COPYING. If not, write to * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ package @OPT[|@USE::package.|]core object Type { type ToType[E] = JSon => E def list[E](f: ToType[E]): ToType[List[E]] = _.asInstanceOf[ArrayData].data.map(f) def primitive[E]: ToType[E] = _.toRaw.asInstanceOf[E] def map: ToType[Map[String, Any]] = primitive[Map[String, Any]] def integer: ToType[Int] = primitive[Int] def string: ToType[String] = primitive[String] def boolean: ToType[Boolean] = primitive[Boolean] def data[E](f: ToType[E]): ToType[E] = f }
d-plaindoux/rapido
src/main/resources/scala/core/Type.scala
Scala
lgpl-2.1
1,251
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.partial import org.apache.commons.math3.distribution.{NormalDistribution, TDistribution} import org.apache.spark.util.StatCounter /** * An ApproximateEvaluator for sums. It estimates the mean and the count and multiplies them * together, then uses the formula for the variance of two independent random variables to get * a variance for the result and compute a confidence interval. */ private[spark] class SumEvaluator(totalOutputs: Int, confidence: Double) extends ApproximateEvaluator[StatCounter, BoundedDouble] { // modified in merge var outputsMerged = 0 val counter = new StatCounter override def merge(outputId: Int, taskResult: StatCounter) { outputsMerged += 1 counter.merge(taskResult) } override def currentResult(): BoundedDouble = { if (outputsMerged == totalOutputs) { new BoundedDouble(counter.sum, 1.0, counter.sum, counter.sum) } else if (outputsMerged == 0 || counter.count == 0) { new BoundedDouble(0, 0.0, Double.NegativeInfinity, Double.PositiveInfinity) } else { val p = outputsMerged.toDouble / totalOutputs val meanEstimate = counter.mean val countEstimate = (counter.count + 1 - p) / p val sumEstimate = meanEstimate * countEstimate val meanVar = counter.sampleVariance / counter.count // branch at this point because counter.count == 1 implies counter.sampleVariance == Nan // and we don't want to ever return a bound of NaN if (meanVar.isNaN || counter.count == 1) { new BoundedDouble(sumEstimate, confidence, Double.NegativeInfinity, Double.PositiveInfinity) } else { val countVar = (counter.count + 1) * (1 - p) / (p * p) val sumVar = (meanEstimate * meanEstimate * countVar) + (countEstimate * countEstimate * meanVar) + (meanVar * countVar) val sumStdev = math.sqrt(sumVar) val confFactor = if (counter.count > 100) { new NormalDistribution().inverseCumulativeProbability(1 - (1 - confidence) / 2) } else { // note that if this goes to 0, TDistribution will throw an exception. // Hence special casing 1 above. val degreesOfFreedom = (counter.count - 1).toInt new TDistribution(degreesOfFreedom).inverseCumulativeProbability(1 - (1 - confidence) / 2) } val low = sumEstimate - confFactor * sumStdev val high = sumEstimate + confFactor * sumStdev new BoundedDouble(sumEstimate, confidence, low, high) } } } }
gioenn/xSpark
core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala
Scala
apache-2.0
3,342
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming.state import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.execution.ObjectOperator import org.apache.spark.sql.execution.streaming.GroupStateImpl.NO_TIMESTAMP import org.apache.spark.sql.types._ object FlatMapGroupsWithStateExecHelper { val supportedVersions = Seq(1, 2) val legacyVersion = 1 /** * Class to capture deserialized state and timestamp return by the state manager. * This is intended for reuse. */ case class StateData( var keyRow: UnsafeRow = null, var stateRow: UnsafeRow = null, var stateObj: Any = null, var timeoutTimestamp: Long = -1) { private[FlatMapGroupsWithStateExecHelper] def withNew( newKeyRow: UnsafeRow, newStateRow: UnsafeRow, newStateObj: Any, newTimeout: Long): this.type = { keyRow = newKeyRow stateRow = newStateRow stateObj = newStateObj timeoutTimestamp = newTimeout this } } /** Interface for interacting with state data of FlatMapGroupsWithState */ sealed trait StateManager extends Serializable { def stateSchema: StructType def getState(store: StateStore, keyRow: UnsafeRow): StateData def putState(store: StateStore, keyRow: UnsafeRow, state: Any, timeoutTimestamp: Long): Unit def removeState(store: StateStore, keyRow: UnsafeRow): Unit def getAllState(store: StateStore): Iterator[StateData] } def createStateManager( stateEncoder: ExpressionEncoder[Any], shouldStoreTimestamp: Boolean, stateFormatVersion: Int): StateManager = { stateFormatVersion match { case 1 => new StateManagerImplV1(stateEncoder, shouldStoreTimestamp) case 2 => new StateManagerImplV2(stateEncoder, shouldStoreTimestamp) case _ => throw new IllegalArgumentException(s"Version $stateFormatVersion is invalid") } } // =============================================================================================== // =========================== Private implementations of StateManager =========================== // =============================================================================================== /** Common methods for StateManager implementations */ private abstract class StateManagerImplBase(shouldStoreTimestamp: Boolean) extends StateManager { protected def stateSerializerExprs: Seq[Expression] protected def stateDeserializerExpr: Expression protected def timeoutTimestampOrdinalInRow: Int /** Get deserialized state and corresponding timeout timestamp for a key */ override def getState(store: StateStore, keyRow: UnsafeRow): StateData = { val stateRow = store.get(keyRow) stateDataForGets.withNew(keyRow, stateRow, getStateObject(stateRow), getTimestamp(stateRow)) } /** Put state and timeout timestamp for a key */ override def putState(store: StateStore, key: UnsafeRow, state: Any, timestamp: Long): Unit = { val stateRow = getStateRow(state) setTimestamp(stateRow, timestamp) store.put(key, stateRow) } override def removeState(store: StateStore, keyRow: UnsafeRow): Unit = { store.remove(keyRow) } override def getAllState(store: StateStore): Iterator[StateData] = { val stateData = StateData() store.getRange(None, None).map { p => stateData.withNew(p.key, p.value, getStateObject(p.value), getTimestamp(p.value)) } } private lazy val stateSerializerFunc = ObjectOperator.serializeObjectToRow(stateSerializerExprs) private lazy val stateDeserializerFunc = { ObjectOperator.deserializeRowToObject(stateDeserializerExpr, stateSchema.toAttributes) } private lazy val stateDataForGets = StateData() protected def getStateObject(row: UnsafeRow): Any = { if (row != null) stateDeserializerFunc(row) else null } protected def getStateRow(obj: Any): UnsafeRow = { stateSerializerFunc(obj) } /** Returns the timeout timestamp of a state row is set */ private def getTimestamp(stateRow: UnsafeRow): Long = { if (shouldStoreTimestamp && stateRow != null) { stateRow.getLong(timeoutTimestampOrdinalInRow) } else NO_TIMESTAMP } /** Set the timestamp in a state row */ private def setTimestamp(stateRow: UnsafeRow, timeoutTimestamps: Long): Unit = { if (shouldStoreTimestamp) stateRow.setLong(timeoutTimestampOrdinalInRow, timeoutTimestamps) } } /** * Version 1 of the StateManager which stores the user-defined state as flattened columns in * the UnsafeRow. Say the user-defined state has 3 fields - col1, col2, col3. The * unsafe rows will look like this. * * UnsafeRow[ col1 | col2 | col3 | timestamp ] * * The limitation of this format is that timestamp cannot be set when the user-defined * state has been removed. This is because the columns cannot be collectively marked to be * empty/null. */ private class StateManagerImplV1( stateEncoder: ExpressionEncoder[Any], shouldStoreTimestamp: Boolean) extends StateManagerImplBase(shouldStoreTimestamp) { private val timestampTimeoutAttribute = AttributeReference("timeoutTimestamp", dataType = IntegerType, nullable = false)() private val stateAttributes: Seq[Attribute] = { val encSchemaAttribs = stateEncoder.schema.toAttributes if (shouldStoreTimestamp) encSchemaAttribs :+ timestampTimeoutAttribute else encSchemaAttribs } override val stateSchema: StructType = stateAttributes.toStructType override val timeoutTimestampOrdinalInRow: Int = { stateAttributes.indexOf(timestampTimeoutAttribute) } override val stateSerializerExprs: Seq[Expression] = { val encoderSerializer = stateEncoder.namedExpressions if (shouldStoreTimestamp) { encoderSerializer :+ Literal(NO_TIMESTAMP) } else { encoderSerializer } } override val stateDeserializerExpr: Expression = { // Note that this must be done in the driver, as resolving and binding of deserializer // expressions to the encoded type can be safely done only in the driver. stateEncoder.resolveAndBind().deserializer } override protected def getStateRow(obj: Any): UnsafeRow = { require(obj != null, "State object cannot be null") super.getStateRow(obj) } } /** * Version 2 of the StateManager which stores the user-defined state as a nested struct * in the UnsafeRow. Say the user-defined state has 3 fields - col1, col2, col3. The * unsafe rows will look like this. * ___________________________ * | | * | V * UnsafeRow[ nested-struct | timestamp | UnsafeRow[ col1 | col2 | col3 ] ] * * This allows the entire user-defined state to be collectively marked as empty/null, * thus allowing timestamp to be set without requiring the state to be present. */ private class StateManagerImplV2( stateEncoder: ExpressionEncoder[Any], shouldStoreTimestamp: Boolean) extends StateManagerImplBase(shouldStoreTimestamp) { /** Schema of the state rows saved in the state store */ override val stateSchema: StructType = { var schema = new StructType().add("groupState", stateEncoder.schema, nullable = true) if (shouldStoreTimestamp) schema = schema.add("timeoutTimestamp", LongType, nullable = false) schema } // Ordinals of the information stored in the state row private val nestedStateOrdinal = 0 override val timeoutTimestampOrdinalInRow = 1 override val stateSerializerExprs: Seq[Expression] = { val boundRefToSpecificInternalRow = BoundReference( 0, stateEncoder.serializer.head.collect { case b: BoundReference => b.dataType }.head, true) val nestedStateSerExpr = CreateNamedStruct(stateEncoder.namedExpressions.flatMap(e => Seq(Literal(e.name), e))) val nullSafeNestedStateSerExpr = { val nullLiteral = Literal(null, nestedStateSerExpr.dataType) CaseWhen(Seq(IsNull(boundRefToSpecificInternalRow) -> nullLiteral), nestedStateSerExpr) } if (shouldStoreTimestamp) { Seq(nullSafeNestedStateSerExpr, Literal(NO_TIMESTAMP)) } else { Seq(nullSafeNestedStateSerExpr) } } override val stateDeserializerExpr: Expression = { // Note that this must be done in the driver, as resolving and binding of deserializer // expressions to the encoded type can be safely done only in the driver. val boundRefToNestedState = BoundReference(nestedStateOrdinal, stateEncoder.schema, nullable = true) val deserExpr = stateEncoder.resolveAndBind().deserializer.transformUp { case BoundReference(ordinal, _, _) => GetStructField(boundRefToNestedState, ordinal) } val nullLiteral = Literal(null, deserExpr.dataType) CaseWhen(Seq(IsNull(boundRefToNestedState) -> nullLiteral), elseValue = deserExpr) } } }
wangmiao1981/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/FlatMapGroupsWithStateExecHelper.scala
Scala
apache-2.0
9,962
package codecheck.github package events import org.json4s.jackson.JsonMethods trait PullRequestEventJson { val pullRequestEventJson = JsonMethods.parse( """{ | "action": "opened", | "number": 1, | "pull_request": { | "url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1", | "id": 34778301, | "html_url": "https://github.com/baxterthehacker/public-repo/pull/1", | "diff_url": "https://github.com/baxterthehacker/public-repo/pull/1.diff", | "patch_url": "https://github.com/baxterthehacker/public-repo/pull/1.patch", | "issue_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/1", | "number": 1, | "state": "open", | "locked": false, | "title": "Update the README with new information", | "user": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | }, | "body": "This is a pretty simple change that we need to pull into master.", | "created_at": "2015-05-05T23:40:27Z", | "updated_at": "2015-05-05T23:40:27Z", | "closed_at": null, | "merged_at": null, | "merge_commit_sha": null, | "assignee": null, | "milestone": null, | "commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/commits", | "review_comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/comments", | "review_comment_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/comments{/number}", | "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/1/comments", | "statuses_url": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c", | "head": { | "label": "baxterthehacker:changes", | "ref": "changes", | "sha": "0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c", | "user": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | }, | "repo": { | "id": 35129377, | "name": "public-repo", | "full_name": "baxterthehacker/public-repo", | "owner": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | }, | "private": false, | "html_url": "https://github.com/baxterthehacker/public-repo", | "description": "", | "fork": false, | "url": "https://api.github.com/repos/baxterthehacker/public-repo", | "forks_url": "https://api.github.com/repos/baxterthehacker/public-repo/forks", | "keys_url": "https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}", | "collaborators_url": "https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}", | "teams_url": "https://api.github.com/repos/baxterthehacker/public-repo/teams", | "hooks_url": "https://api.github.com/repos/baxterthehacker/public-repo/hooks", | "issue_events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}", | "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/events", | "assignees_url": "https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}", | "branches_url": "https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}", | "tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/tags", | "blobs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}", | "git_tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}", | "git_refs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}", | "trees_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}", | "statuses_url": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}", | "languages_url": "https://api.github.com/repos/baxterthehacker/public-repo/languages", | "stargazers_url": "https://api.github.com/repos/baxterthehacker/public-repo/stargazers", | "contributors_url": "https://api.github.com/repos/baxterthehacker/public-repo/contributors", | "subscribers_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscribers", | "subscription_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscription", | "commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}", | "git_commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}", | "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}", | "issue_comment_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}", | "contents_url": "https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}", | "compare_url": "https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}", | "merges_url": "https://api.github.com/repos/baxterthehacker/public-repo/merges", | "archive_url": "https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}", | "downloads_url": "https://api.github.com/repos/baxterthehacker/public-repo/downloads", | "issues_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}", | "pulls_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}", | "milestones_url": "https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}", | "notifications_url": "https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}", | "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}", | "releases_url": "https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}", | "created_at": "2015-05-05T23:40:12Z", | "updated_at": "2015-05-05T23:40:12Z", | "pushed_at": "2015-05-05T23:40:26Z", | "git_url": "git://github.com/baxterthehacker/public-repo.git", | "ssh_url": "git@github.com:baxterthehacker/public-repo.git", | "clone_url": "https://github.com/baxterthehacker/public-repo.git", | "svn_url": "https://github.com/baxterthehacker/public-repo", | "homepage": null, | "size": 0, | "stargazers_count": 0, | "watchers_count": 0, | "language": null, | "has_issues": true, | "has_downloads": true, | "has_wiki": true, | "has_pages": true, | "forks_count": 0, | "mirror_url": null, | "open_issues_count": 1, | "forks": 0, | "open_issues": 1, | "watchers": 0, | "default_branch": "master" | } | }, | "base": { | "label": "baxterthehacker:master", | "ref": "master", | "sha": "9049f1265b7d61be4a8904a9a27120d2064dab3b", | "user": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | }, | "repo": { | "id": 35129377, | "name": "public-repo", | "full_name": "baxterthehacker/public-repo", | "owner": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | }, | "private": false, | "html_url": "https://github.com/baxterthehacker/public-repo", | "description": "", | "fork": false, | "url": "https://api.github.com/repos/baxterthehacker/public-repo", | "forks_url": "https://api.github.com/repos/baxterthehacker/public-repo/forks", | "keys_url": "https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}", | "collaborators_url": "https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}", | "teams_url": "https://api.github.com/repos/baxterthehacker/public-repo/teams", | "hooks_url": "https://api.github.com/repos/baxterthehacker/public-repo/hooks", | "issue_events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}", | "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/events", | "assignees_url": "https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}", | "branches_url": "https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}", | "tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/tags", | "blobs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}", | "git_tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}", | "git_refs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}", | "trees_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}", | "statuses_url": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}", | "languages_url": "https://api.github.com/repos/baxterthehacker/public-repo/languages", | "stargazers_url": "https://api.github.com/repos/baxterthehacker/public-repo/stargazers", | "contributors_url": "https://api.github.com/repos/baxterthehacker/public-repo/contributors", | "subscribers_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscribers", | "subscription_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscription", | "commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}", | "git_commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}", | "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}", | "issue_comment_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}", | "contents_url": "https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}", | "compare_url": "https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}", | "merges_url": "https://api.github.com/repos/baxterthehacker/public-repo/merges", | "archive_url": "https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}", | "downloads_url": "https://api.github.com/repos/baxterthehacker/public-repo/downloads", | "issues_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}", | "pulls_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}", | "milestones_url": "https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}", | "notifications_url": "https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}", | "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}", | "releases_url": "https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}", | "created_at": "2015-05-05T23:40:12Z", | "updated_at": "2015-05-05T23:40:12Z", | "pushed_at": "2015-05-05T23:40:26Z", | "git_url": "git://github.com/baxterthehacker/public-repo.git", | "ssh_url": "git@github.com:baxterthehacker/public-repo.git", | "clone_url": "https://github.com/baxterthehacker/public-repo.git", | "svn_url": "https://github.com/baxterthehacker/public-repo", | "homepage": null, | "size": 0, | "stargazers_count": 0, | "watchers_count": 0, | "language": null, | "has_issues": true, | "has_downloads": true, | "has_wiki": true, | "has_pages": true, | "forks_count": 0, | "mirror_url": null, | "open_issues_count": 1, | "forks": 0, | "open_issues": 1, | "watchers": 0, | "default_branch": "master" | } | }, | "_links": { | "self": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1" | }, | "html": { | "href": "https://github.com/baxterthehacker/public-repo/pull/1" | }, | "issue": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/issues/1" | }, | "comments": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/issues/1/comments" | }, | "review_comments": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/comments" | }, | "review_comment": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/comments{/number}" | }, | "commits": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/commits" | }, | "statuses": { | "href": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c" | } | }, | "merged": false, | "mergeable": null, | "mergeable_state": "unknown", | "merged_by": null, | "comments": 0, | "review_comments": 0, | "commits": 1, | "additions": 1, | "deletions": 1, | "changed_files": 1 | }, | "repository": { | "id": 35129377, | "name": "public-repo", | "full_name": "baxterthehacker/public-repo", | "owner": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | }, | "private": false, | "html_url": "https://github.com/baxterthehacker/public-repo", | "description": "", | "fork": false, | "url": "https://api.github.com/repos/baxterthehacker/public-repo", | "forks_url": "https://api.github.com/repos/baxterthehacker/public-repo/forks", | "keys_url": "https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}", | "collaborators_url": "https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}", | "teams_url": "https://api.github.com/repos/baxterthehacker/public-repo/teams", | "hooks_url": "https://api.github.com/repos/baxterthehacker/public-repo/hooks", | "issue_events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}", | "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/events", | "assignees_url": "https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}", | "branches_url": "https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}", | "tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/tags", | "blobs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}", | "git_tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}", | "git_refs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}", | "trees_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}", | "statuses_url": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}", | "languages_url": "https://api.github.com/repos/baxterthehacker/public-repo/languages", | "stargazers_url": "https://api.github.com/repos/baxterthehacker/public-repo/stargazers", | "contributors_url": "https://api.github.com/repos/baxterthehacker/public-repo/contributors", | "subscribers_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscribers", | "subscription_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscription", | "commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}", | "git_commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}", | "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}", | "issue_comment_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}", | "contents_url": "https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}", | "compare_url": "https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}", | "merges_url": "https://api.github.com/repos/baxterthehacker/public-repo/merges", | "archive_url": "https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}", | "downloads_url": "https://api.github.com/repos/baxterthehacker/public-repo/downloads", | "issues_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}", | "pulls_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}", | "milestones_url": "https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}", | "notifications_url": "https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}", | "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}", | "releases_url": "https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}", | "created_at": "2015-05-05T23:40:12Z", | "updated_at": "2015-05-05T23:40:12Z", | "pushed_at": "2015-05-05T23:40:26Z", | "git_url": "git://github.com/baxterthehacker/public-repo.git", | "ssh_url": "git@github.com:baxterthehacker/public-repo.git", | "clone_url": "https://github.com/baxterthehacker/public-repo.git", | "svn_url": "https://github.com/baxterthehacker/public-repo", | "homepage": null, | "size": 0, | "stargazers_count": 0, | "watchers_count": 0, | "language": null, | "has_issues": true, | "has_downloads": true, | "has_wiki": true, | "has_pages": true, | "forks_count": 0, | "mirror_url": null, | "open_issues_count": 1, | "forks": 0, | "open_issues": 1, | "watchers": 0, | "default_branch": "master" | }, | "sender": { | "login": "baxterthehacker", | "id": 6752317, | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", | "gravatar_id": "", | "url": "https://api.github.com/users/baxterthehacker", | "html_url": "https://github.com/baxterthehacker", | "followers_url": "https://api.github.com/users/baxterthehacker/followers", | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", | "repos_url": "https://api.github.com/users/baxterthehacker/repos", | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", | "type": "User", | "site_admin": false | } |}""".stripMargin) }
code-check/github-api-scala
src/test/scala/events/PullRequestEventJson.scala
Scala
mit
27,764
package troy package macros import troy.cql.ast.CqlParser import troy.schema._ object CqlOps { import VParseResultImplicits._ def parseQuery(queryString: String) = CqlParser .parseDML(queryString) .toV(f => Messages.QueryParseFailure(f.msg, f.next.pos.line, f.next.pos.column)) }
schemasafe/troy
troy-macro/src/main/scala/troy/macros/CqlOps.scala
Scala
apache-2.0
302
package com.datastax.spark.connector.embedded import java.net.{ConnectException, Socket} import java.nio.file.{FileAlreadyExistsException, Files, Paths} import scala.annotation.tailrec import scala.util.{Failure, Success, Try} case class DynamicCassandraPorts(basePort: Int) extends CassandraPorts { import DynamicCassandraPorts._ def getRpcPort(index: Int): Int = basePort + RPC_OFFSET + index def getJmxPort(index: Int): Int = basePort + JMX_OFFSET + index def getSslStoragePort(index: Int): Int = basePort + SSL_STORAGE_OFFSET + index def getStoragePort(index: Int): Int = basePort + STORAGE_OFFSET + index def release(): Unit = DynamicCassandraPorts.releaseBasePort(basePort) } object DynamicCassandraPorts { val MaxInstances = 5 val RPC_OFFSET = 0 val JMX_OFFSET = MaxInstances val SSL_STORAGE_OFFSET = MaxInstances * 2 val STORAGE_OFFSET = MaxInstances * 3 private lazy val basePath = Paths.get(sys.props("baseDir"), "target", "ports") private lazy val basePorts = 10000 until 20000 by 50 private def portLockFile(port: Int) = basePath.resolve(s"port_$port.lock") def apply(): DynamicCassandraPorts = DynamicCassandraPorts(chooseBasePort()) def areAllPortsAvailable(host: String, ports: Seq[Int]): Boolean = { ports.par.forall { port => Try(new Socket(host, port)) match { case Success(socket) => socket.close() false case Failure(ex: ConnectException) => true case _ => false } } } @tailrec def chooseBasePort(): Int = { Files.createDirectories(basePath) val foundPort = basePorts.find { basePort => val file = portLockFile(basePort) Try(Files.createFile(file)) match { case Success(filePath) => require(filePath == file) if (areAllPortsAvailable("127.0.0.1", basePort until (basePort + 100))) { true } else { releaseBasePort(basePort) false } case Failure(ex: FileAlreadyExistsException) => false case Failure(ex) => throw ex } } if (foundPort.isDefined) foundPort.get else { Thread.sleep(1000) chooseBasePort() } } def releaseBasePort(basePort: Int): Unit = { val file = portLockFile(basePort) Files.deleteIfExists(file) } }
ponkin/spark-cassandra-connector
spark-cassandra-connector-embedded/src/main/scala/com/datastax/spark/connector/embedded/DynamicCassandraPorts.scala
Scala
apache-2.0
2,365
/* * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.spark.compiler package spi import java.util.ServiceLoader import scala.collection.mutable import scala.collection.JavaConversions._ import org.objectweb.asm.Type import com.asakusafw.lang.compiler.model.graph.{ Operator, UserOperator } import com.asakusafw.spark.compiler.operator.ViewFields import com.asakusafw.spark.tools.asm.ClassBuilder trait AggregationCompiler { def of: Class[_] def compile( operator: UserOperator)( implicit context: AggregationCompiler.Context): Type } object AggregationCompiler { trait Context extends CompilerContext with ClassLoaderProvider with DataModelLoaderProvider with ViewFields.Context private def getCompiler( operator: Operator)( implicit context: AggregationCompiler.Context): Option[AggregationCompiler] = { operator match { case op: UserOperator => apply(context.classLoader) .get(op.getAnnotation.resolve(context.classLoader).annotationType) case _ => None } } def support( operator: Operator)( implicit context: AggregationCompiler.Context): Boolean = { getCompiler(operator).isDefined } def compile( operator: Operator)( implicit context: AggregationCompiler.Context): Type = { getCompiler(operator) match { case Some(compiler) => compiler.compile(operator.asInstanceOf[UserOperator]) case _ => throw new AssertionError() } } private[this] val aggregationCompilers: mutable.Map[ClassLoader, Map[Class[_], AggregationCompiler]] = // scalastyle:ignore mutable.WeakHashMap.empty private[this] def apply(classLoader: ClassLoader): Map[Class[_], AggregationCompiler] = { aggregationCompilers.getOrElse(classLoader, reload(classLoader)) } private[this] def reload(classLoader: ClassLoader): Map[Class[_], AggregationCompiler] = { val ors = ServiceLoader.load(classOf[AggregationCompiler], classLoader).map { resolver => resolver.of -> resolver }.toMap[Class[_], AggregationCompiler] aggregationCompilers(classLoader) = ors ors } }
asakusafw/asakusafw-spark
compiler/src/main/scala/com/asakusafw/spark/compiler/spi/AggregationCompiler.scala
Scala
apache-2.0
2,696
/******************************************************************************* * Copyright (c) 2015 Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr>. * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v3.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/gpl.html * * Contributors: * Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr> - initial API and implementation ******************************************************************************/ package graphs.P89 import org.scalatest.Args import fr.dubuissonduplessis.graph.impl.graph.DefaultGraphs class sol01Graph extends P89Graph with DefaultGraphs { def isBipartite(g: Graph): Boolean = g.isBipartite }
GuillaumeDD/scala99problems
src/main/scala/graphs/P89/sol01Graph.scala
Scala
gpl-3.0
856
package demoproduct import scala.concurrent.duration._ import io.gatling.core.Predef._ import io.gatling.http.Predef._ import io.gatling.jdbc.Predef._ object RefSEA { val headers_0 = Map("Upgrade-Insecure-Requests" -> "1") val refSEA = //exec(flushCookieJar) exec(http("get refSEA") .get("/refSEA/v1") .check(responseTimeInMillis.lessThan(1000)) .headers(headers_0)) .pause(2) .exec(http("create account") .post("/refSEA/v1/accounts/") .check(status.is(200)) .headers(headers_0) .formParam("name", "account") .formParam("iban", "DE00000000000000000000")) .pause(1) .exec(http("get refSEA (JSON)") .get("/refSEA/v1?media=json") .check(responseTimeInMillis.lessThan(500)) .headers(headers_0)) .pause(2) }
evandor/skysail
skysail.product.demo.e2e.gatling/src/gatling/scala/demoproduct/RefSEA.scala
Scala
apache-2.0
879
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js API ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ package scala.scalajs.js import scala.language.implicitConversions /** Value of type A or B (union type). * * Scala does not have union types, but they are important to many * interoperability scenarios. This type provides a (partial) encoding of * union types using implicit evidences. */ @scala.scalajs.js.annotation.RawJSType // Don't do this at home! sealed trait |[A, B] // scalastyle:ignore object | { // scalastyle:ignore /** Evidence that `A <: B`, taking top-level `|`-types into account. */ sealed trait Evidence[-A, +B] /** A unique (and typically dead-code-eliminated away) instance of * `Evidence`. */ private object ReusableEvidence extends Evidence[scala.Any, scala.Any] abstract sealed class EvidenceLowestPrioImplicits { /** If `A <: B2`, then `A <: B1 | B2`. */ implicit def right[A, B1, B2](implicit ev: Evidence[A, B2]): Evidence[A, B1 | B2] = ReusableEvidence.asInstanceOf[Evidence[A, B1 | B2]] } abstract sealed class EvidenceLowPrioImplicits extends EvidenceLowestPrioImplicits { /** `Int <: Double`, because that's true in Scala.js. */ implicit def intDouble: Evidence[Int, Double] = ReusableEvidence.asInstanceOf[Evidence[Int, Double]] /** If `A <: B1`, then `A <: B1 | B2`. */ implicit def left[A, B1, B2](implicit ev: Evidence[A, B1]): Evidence[A, B1 | B2] = ReusableEvidence.asInstanceOf[Evidence[A, B1 | B2]] } object Evidence extends EvidenceLowPrioImplicits { /** `A <: A`. */ implicit def base[A]: Evidence[A, A] = ReusableEvidence.asInstanceOf[Evidence[A, A]] /** If `A1 <: B` and `A2 <: B`, then `A1 | A2 <: B`. */ implicit def allSubtypes[A1, A2, B]( implicit ev1: Evidence[A1, B], ev2: Evidence[A2, B]): Evidence[A1 | A2, B] = ReusableEvidence.asInstanceOf[Evidence[A1 | A2, B]] } /** Upcast `A` to `B1 | B2`. * * This needs evidence that `A <: B1 | B2`. */ implicit def from[A, B1, B2](a: A)(implicit ev: Evidence[A, B1 | B2]): B1 | B2 = a.asInstanceOf[B1 | B2] /** Operations on union types. */ implicit class UnionOps[A <: _ | _](val self: A) extends AnyVal { /** Explicitly merge a union type to a supertype (which might not be a * union type itself). * * This needs evidence that `A <: B`. */ def merge[B](implicit ev: |.Evidence[A, B]): B = self.asInstanceOf[B] } }
CapeSepias/scala-js
library/src/main/scala/scala/scalajs/js/Union.scala
Scala
bsd-3-clause
2,919
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.jdbc import java.util.Properties import scala.collection.mutable.ArrayBuffer import org.apache.spark.Partition import org.apache.spark.rdd.RDD import org.apache.spark.sql.sources._ import org.apache.spark.sql.types.StructType import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode} /** * Instructions on how to partition the table among workers. */ private[sql] case class JDBCPartitioningInfo( column: String, lowerBound: Long, upperBound: Long, numPartitions: Int) private[sql] object JDBCRelation { /** * Given a partitioning schematic (a column of integral type, a number of * partitions, and upper and lower bounds on the column's value), generate * WHERE clauses for each partition so that each row in the table appears * exactly once. The parameters minValue and maxValue are advisory in that * incorrect values may cause the partitioning to be poor, but no data * will fail to be represented. */ def columnPartition(partitioning: JDBCPartitioningInfo): Array[Partition] = { if (partitioning == null) return Array[Partition](JDBCPartition(null, 0)) val numPartitions = partitioning.numPartitions val column = partitioning.column if (numPartitions == 1) return Array[Partition](JDBCPartition(null, 0)) // Overflow and silliness can happen if you subtract then divide. // Here we get a little roundoff, but that's (hopefully) OK. val stride: Long = (partitioning.upperBound / numPartitions - partitioning.lowerBound / numPartitions) var i: Int = 0 var currentValue: Long = partitioning.lowerBound var ans = new ArrayBuffer[Partition]() while (i < numPartitions) { val lowerBound = if (i != 0) s"$column >= $currentValue" else null currentValue += stride val upperBound = if (i != numPartitions - 1) s"$column < $currentValue" else null val whereClause = if (upperBound == null) { lowerBound } else if (lowerBound == null) { upperBound } else { s"$lowerBound AND $upperBound" } ans += JDBCPartition(whereClause, i) i = i + 1 } ans.toArray } } private[sql] case class JDBCRelation( url: String, table: String, parts: Array[Partition], properties: Properties = new Properties())(@transient val sqlContext: SQLContext) extends BaseRelation with PrunedFilteredScan with InsertableRelation { override val needConversion: Boolean = false override val schema: StructType = JDBCRDD.resolveTable(url, table, properties) override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = { // Rely on a type erasure hack to pass RDD[InternalRow] back as RDD[Row] JDBCRDD.scanTable( sqlContext.sparkContext, schema, url, properties, table, requiredColumns, filters, parts).asInstanceOf[RDD[Row]] } override def insert(data: DataFrame, overwrite: Boolean): Unit = { data.write .mode(if (overwrite) SaveMode.Overwrite else SaveMode.Append) .jdbc(url, table, properties) } }
chenc10/Spark-PAF
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRelation.scala
Scala
apache-2.0
3,988
/** * Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com> */ package achilles.backend.services import akka.actor.Actor trait RequireMsg case class QueryRecom(uid: String, content: String, loc: String) extends RequireMsg case class checkDB() extends RequireMsg trait RecResult case class BasicRecResult(uid: String, rec: String, rank: Int) extends RecResult
yinxusen/achilles
src/main/scala/achilles/backend/services/RequireMsg.scala
Scala
apache-2.0
377
package org.tobster.dpp.datamodel import scala.collection.mutable.HashMap import org.joda.time.DateTime object FlightLookup { print("FlightLookup: ") private val FileNames = List( "cfi_all_spo.csv", "cfi_all_spo_delta.csv" ) // Immutable Map: Origin -> List[Flight] private val allFlights: Map[String, List[Flight]] = { println("Initialize available flights...") val flights = new HashMap[String, List[Flight]] // mutable HashMap! def insertFlights(unsortedFlights: List[Flight]) = { def insert(flight: Flight) = { if (flights.contains(flight.orig)) { flights(flight.orig) = flight :: flights(flight.orig) } else { flights(flight.orig) = List(flight) } } unsortedFlights.foreach(insert) } FileNames.foreach(f => insertFlights(CSVParser.parse(f))) flights.toMap } private def getFlightsByOrigin(orig: String): List[Flight] = allFlights.contains(orig) match { case true => allFlights(orig) case _ => Nil } def searchFlight(orig: String, dest: String, pax: Int, date: DateTime, accuracy: Int) = getFlightsByOrigin(orig) .filter(_.dest == dest) .filter(_.seats >= pax) .filter(_.compareDate(date, accuracy)) def totalOrigins = allFlights.keys.size def totalDestinations = allFlights.values.map(xs => xs.map(_.dest).toSet).reduce(_ ++ _) def orig2Dest = allFlights.map(t => (t._1, t._2.map(_.dest).toSet)) def totalFlights = allFlights.values.map(_.size).foldLeft(0)(_ + _) def prettyPrint = allFlights.map(t => (t._1, t._2.size)) }
tobeydee/dynamicpackagingserverprototype
src/main/scala/org/tobster/dpp/datamodel/FlightLookup.scala
Scala
mit
1,661
package c3.w2 import org.junit.runner.RunWith import org.scalameter.{Key, Warmer, config} import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner import scala.util.Random @RunWith(classOf[JUnitRunner]) class MergeSortSuite extends FunSuite { trait mergeSort { val maxDepth = 7 val xs = new Array[Int](1024 * 1024) def initialize(xs: Array[Int]) { var i = xs.length while (i != 0) { i -= 1 xs(i) = Random.nextInt() } } initialize(xs) val standardConfig = config( Key.exec.minWarmupRuns -> 20, Key.exec.maxWarmupRuns -> 60, Key.exec.benchRuns -> 60, Key.verbose -> true ) withWarmer new Warmer.Default val mergeSort = new MergeSort } test("Unit test sort") { new mergeSort { mergeSort.quickSort(xs, 0, xs.length) assert(xs(0) == xs.min) assert(xs(xs.length-1) == xs.max) } } test("Unit test parallel sort") { new mergeSort { mergeSort.parallelMergeSort(xs, maxDepth) assert(xs(0) == xs.min) assert(xs(xs.length-1) == xs.max) } } test("Merge sort measure sequential and parallel") { new mergeSort { val seqtime = standardConfig setUp { _ => initialize(xs) } measure { mergeSort.quickSort(xs, 0, xs.length) } println(s"sequential sum time: $seqtime ms") val partime = standardConfig setUp { _ => initialize(xs) } measure { mergeSort.parallelMergeSort(xs, maxDepth) } val speedup = seqtime.value / partime.value println(s"fork/join time: $partime ms") println(s"speedup: {$speedup}") } } }
lwo/lwo.github.io
src/test/scala/c3/w2/MergeSortSuite.scala
Scala
gpl-3.0
1,663
package se.ramn.bottfarmen.example.idle import collection.JavaConverters._ import collection.immutable.Seq import se.ramn.bottfarmen.api import se.ramn.bottfarmen.api.Command import se.ramn.bottfarmen.api.Move import se.ramn.bottfarmen.api.BotCommander import se.ramn.bottfarmen.api.GameState import se.ramn.bottfarmen.example.BaseCommander import se.ramn.bottfarmen.example.BaseBot class Bot(var underlying: api.Bot) extends BaseBot class IdleCommander extends BaseCommander[Bot] { val name = "Idle" override def makeBot(serverSideBot: api.Bot) = new Bot(serverSideBot) override def selectCommands(gameState: GameState): Seq[Command] = { Seq.empty[Command] } }
ramn/bottfarmen
common/src/main/scala/example/idle/IdleCommander.scala
Scala
gpl-3.0
682
package scaladex.data package maven private[maven] object PomConvert { def apply(model: org.apache.maven.model.Model): ArtifactModel = { import model._ import scala.jdk.CollectionConverters._ import scala.util.Try def list[T](l: java.util.List[T]): List[T] = Option(l).map(_.asScala.toList).getOrElse(List.empty[T]) def map(l: java.util.Properties): Map[String, String] = Option(l).map(_.asScala.toMap).getOrElse(Map()) def convert( contributor: org.apache.maven.model.Contributor ): Contributor = { import contributor._ Contributor( Option(getName), Option(getEmail), Option(getUrl), Option(getOrganization), Option(getOrganizationUrl), list(getRoles), Option(getTimezone), map(getProperties), Option(getId) ) } ArtifactModel( getGroupId, getArtifactId, getVersion, getPackaging, Option(getName), Option(getDescription), Try(getInceptionYear).flatMap(y => Try(y.toInt)).toOption, Option(getUrl), Option(getScm).map { scm => import scm._ SourceCodeManagment( Option(getConnection), Option(getDeveloperConnection), Option(getUrl), Option(getTag) ) }, Option(getIssueManagement).map { im => import im._ IssueManagement( getSystem, Option(getUrl) ) }, list(getMailingLists).map { ml => import ml._ MailingList( getName, Option(getSubscribe), Option(getUnsubscribe), Option(getPost), Option(getArchive), list(getOtherArchives) ) }, list(getContributors).map(convert), list(getDevelopers).map(convert), list(getLicenses).map { l => import l._ License( getName, Option(getUrl), Option(getDistribution), Option(getComments) ) }, list(getDependencies).map { d => import d._ Dependency( getGroupId, getArtifactId, getVersion, getProperties.asScala.toMap, Option(getScope), getExclusions.asScala .map(e => Exclusion(e.getGroupId, e.getArtifactId)) .toSet ) }, list(getRepositories).map { r => import r._ def bool(v: String) = if (v == null) false else v.toBoolean def convert(policy: org.apache.maven.model.RepositoryPolicy) = RepositoryPolicy( policy.getChecksumPolicy, bool(policy.getEnabled), policy.getUpdatePolicy ) Repository( getId, getLayout, getName, Option(getUrl), Option(getSnapshots).map(convert), Option(getReleases).map(convert) ) }, Option(getOrganization).map { o => import o._ Organization( getName, Option(getUrl) ) }, { val properties = getProperties.asScala.toMap for { scalaVersion <- properties.get("scalaVersion") sbtVersion <- properties.get("sbtVersion") } yield SbtPluginTarget(scalaVersion, sbtVersion) } ) } }
scalacenter/scaladex
modules/data/src/main/scala/scaladex/data/maven/PomConvert.scala
Scala
bsd-3-clause
3,369
package com.kalmanb.sbt.extra import scala.xml.Elem import scala.xml.Node import scala.xml.transform.RewriteRule import com.kalmanb.sbt.Jenkins import sbt.Keys._ import sbt._ object JenkinsGitPlugin { import com.kalmanb.sbt.JenkinsPlugin._ import Git._ val jenChangeJobBranch = InputKey[Unit]("jenChangeJobBranch", "<jobName> <branch> change a jobs git branch setting") val jenChangeViewBranch = InputKey[Unit]("jenChangeViewBranch", "<viewName> <branch> change all jobs in the view to a new git branch setting") val jenChangeJobsBranch = InputKey[Unit]("jenChangeJobsBranch", "<regex> <branch> change all jobs that match a regex to a new git branch setting") lazy val jenkinsGitSettings = Seq( jenChangeJobBranch <<= jenkinsTask(2, (baseUrl, args) ⇒ Jenkins(baseUrl).updateJob(args.head, changeJobGitBranch(args(1)))), jenChangeViewBranch <<= jenkinsTask(2, (baseUrl, args) ⇒ changeViewGitBranch(baseUrl, args.head, args(1))), jenChangeJobsBranch <<= jenkinsTask(2, (baseUrl, args) ⇒ changeJobsGitBranch(baseUrl, args.head, args(1))) ) object Git { def changeJobGitBranch(newBranch: String)(config: Seq[scala.xml.Node]): Seq[scala.xml.Node] = { val updated = new RewriteRule { override def transform(n: scala.xml.Node): Seq[scala.xml.Node] = n match { case Elem(prefix, "hudson.plugins.git.BranchSpec", attribs, scope, child @ _*) ⇒ Elem(prefix, "hudson.plugins.git.BranchSpec", attribs, scope, <name>{ newBranch }</name>: _*) case elem: Elem ⇒ elem copy (child = elem.child flatMap (this transform)) case other ⇒ other } } transform config updated } def changeViewGitBranch(baseUrl: String, view: String, newBranch: String): Unit = { val jenkins = Jenkins(baseUrl) jenkins.getJobsInView(view).foreach(jenkins.updateJob(_, changeJobGitBranch(newBranch))) } def changeJobsGitBranch(baseUrl: String, regex: String, newBranch: String): Unit = { val jenkins = Jenkins(baseUrl) val pattern = new scala.util.matching.Regex(regex) jenkins.getAllJobs().filter( job ⇒ pattern findFirstIn job isDefined).foreach { job ⇒ println("Changing branch to " + newBranch + " for job " + job + ".") jenkins.updateJob(job, changeJobGitBranch(newBranch)) } } } }
kalmanb/sbt-jenkins-manager
src/main/scala/com/kalmanb/sbt/extra/JenkinsGitPlugin.scala
Scala
apache-2.0
2,381
package scalajsreact.select.example.components import japgolly.scalajs.react._ import japgolly.scalajs.react.extra.Reusability import japgolly.scalajs.react.extra.router.RouterCtl import japgolly.scalajs.react.vdom.prefix_<^._ import scala.scalajs.js.{Any, UndefOr} import scalacss.Defaults._ import scalacss.ScalaCssReact._ import scalajsreact.select.example.routes.Item object LeftNav { object Style extends StyleSheet.Inline { import dsl._ val container = style(display.flex, flexDirection.column, listStyle := "none", padding.`0` ) val menuItem = styleF.bool(selected => styleS( lineHeight(48.px), padding :=! "0 25px", cursor.pointer, textDecoration := "none", mixinIfElse(selected)(color.red, fontWeight._500) (color.black, &.hover(color(c"#555555"), backgroundColor(c"#ecf0f1"))) )) } case class Props(menus: Vector[Item], selectedPage: Item, ctrl: RouterCtl[Item]) implicit val currentPageReuse = Reusability.by_==[Item] implicit val propsReuse = Reusability.by((_: Props).selectedPage) val component = ReactComponentB[Props]("LeftNav") .render_P { P => <.ul(Style.container)( P.menus.map(item => <.li(^.key := item.title, Style.menuItem(item == P.selectedPage), item.title, P.ctrl setOnClick item)) ) } .configure(Reusability.shouldComponentUpdate) .build def apply(props: Props, ref: UndefOr[String] = "", key: Any = {}) = component.set(key, ref)(props) }
lvitaly/scalajs-react-select-example
src/main/scala/scalajsreact/select/example/components/LeftNav.scala
Scala
apache-2.0
1,569
//偏函数 import java.util.Date object LogWithDate{ def main(args: Array[String]){ val date = new Date val LogWithDateBound = log(date, _:String) LogWithDateBound("message1") Thread.sleep(1000) //毫秒 LogWithDateBound("message2") Thread.sleep(1000) LogWithDateBound("message3") } def log(date: Date, message: String){ println(date + "------" + message); } } /* 偏函数: val f: PartialFunction[Char, Int] = {case '+' => 1; case '-' => -1} f('-') // 返回 -1 f.DefinedAt('0') //没有匹配到, 返回 false f('0') //没有匹配到,抛异常 */
PengLiangWang/Scala
function/LogWithDate.scala
Scala
gpl-3.0
664
/* * Copyright 2013 David Savage * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.chronologicalthought.modula.scratch import org.chronologicalthought.modula.Module import org.chronologicalthought.modula.ModuleContext._ import java.lang.IllegalStateException /** * User: dave */ class ServiceIdeas { def serviceUsage { class Foo { def bar = "bar" } val reg = this.whenRegistered(classOf[Foo]) { foo => println("New " + foo.bar) } this.registerMy(new Foo) println("Found " + this.withAny(classOf[Foo])(_.bar).getOrElse(throw new IllegalStateException("Missing foo service"))) reg.unregister() this.register(new Foo) } def moduleUsage { this.whenRegistered(classOf[Module]) { module => { module.resources(_ == "OSGI-INF/persistence.xml").foreach { url => { val node = xml.XML.load(url) node match { case <foo> {content} </foo> => println("Found " + content) } } } } } } }
davemssavage/modula
scratch/src/org/chronologicalthought/modula/scratch/ServiceIdeas.scala
Scala
apache-2.0
1,598
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services import connectors.PartnershipIdConnector import models.api.PartyType import models.external.PartnershipIdEntity import models.external.partnershipid.PartnershipIdJourneyConfig import uk.gov.hmrc.http.HeaderCarrier import javax.inject.{Inject, Singleton} import scala.concurrent.Future @Singleton class PartnershipIdService @Inject()(partnershipIdConnector: PartnershipIdConnector) { def createJourney(journeyConfig: PartnershipIdJourneyConfig, partyType: PartyType)(implicit hc: HeaderCarrier): Future[String] = { partnershipIdConnector.createJourney(journeyConfig, partyType) } def getDetails(journeyId: String)(implicit hc: HeaderCarrier): Future[PartnershipIdEntity] = { partnershipIdConnector.getDetails(journeyId) } }
hmrc/vat-registration-frontend
app/services/PartnershipIdService.scala
Scala
apache-2.0
1,367
package com.basdek.mailchimp_v3.operations.lists import com.basdek.mailchimp_v3.dto.MailChimpList import com.basdek.mailchimp_v3.{Config, MailChimpResultFuture, SimpleAuthenticate} import com.basdek.mailchimp_v3.operations.Operation import com.ning.http.client.Response import dispatch._, Defaults._ import org.json4s._, native.JsonMethods._, native.Serialization.{write => jsonWrite} /** * Implementation of the POST /lists operation. * @param cfg A Config instance. * @param data A list DTO object. */ class CreateListOperation(val cfg: Config, val data : MailChimpList) extends Operation with SimpleAuthenticate { private def transformer(res: Response) : MailChimpList = { val responseBody = res.getResponseBody parse(responseBody).extract[MailChimpList] } def execute : MailChimpResultFuture = { val req = addAuth(:/(s"${cfg.apiEndpoint}/lists").POST.secure) .setBody(jsonWrite(data)) .setContentType("application/json", "utf-8") httpToResult(req, transformer) } }
basdek/mailchimp_v3
src/main/scala/com/basdek/mailchimp_v3/operations/lists/CreateListOperation.scala
Scala
mit
1,021
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import java.util.Locale import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, UnresolvedException} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateFunction, DeclarativeAggregate, NoOp} import org.apache.spark.sql.types._ /** * The trait of the Window Specification (specified in the OVER clause or WINDOW clause) for * Window Functions. */ sealed trait WindowSpec /** * The specification for a window function. * * @param partitionSpec It defines the way that input rows are partitioned. * @param orderSpec It defines the ordering of rows in a partition. * @param frameSpecification It defines the window frame in a partition. */ case class WindowSpecDefinition( partitionSpec: Seq[Expression], orderSpec: Seq[SortOrder], frameSpecification: WindowFrame) extends Expression with WindowSpec with Unevaluable { override def children: Seq[Expression] = partitionSpec ++ orderSpec :+ frameSpecification override lazy val resolved: Boolean = childrenResolved && checkInputDataTypes().isSuccess && frameSpecification.isInstanceOf[SpecifiedWindowFrame] override def nullable: Boolean = true override def foldable: Boolean = false override def dataType: DataType = throw new UnsupportedOperationException("dataType") override def checkInputDataTypes(): TypeCheckResult = { frameSpecification match { case UnspecifiedFrame => TypeCheckFailure( "Cannot use an UnspecifiedFrame. This should have been converted during analysis. " + "Please file a bug report.") case f: SpecifiedWindowFrame if f.frameType == RangeFrame && !f.isUnbounded && orderSpec.isEmpty => TypeCheckFailure( "A range window frame cannot be used in an unordered window specification.") case f: SpecifiedWindowFrame if f.frameType == RangeFrame && f.isValueBound && orderSpec.size > 1 => TypeCheckFailure( s"A range window frame with value boundaries cannot be used in a window specification " + s"with multiple order by expressions: ${orderSpec.mkString(",")}") case f: SpecifiedWindowFrame if f.frameType == RangeFrame && f.isValueBound && !isValidFrameType(f.valueBoundary.head.dataType) => TypeCheckFailure( s"The data type '${orderSpec.head.dataType.simpleString}' used in the order " + "specification does not match the data type " + s"'${f.valueBoundary.head.dataType.simpleString}' which is used in the range frame.") case _ => TypeCheckSuccess } } override def sql: String = { def toSql(exprs: Seq[Expression], prefix: String): Seq[String] = { Seq(exprs).filter(_.nonEmpty).map(_.map(_.sql).mkString(prefix, ", ", "")) } val elements = toSql(partitionSpec, "PARTITION BY ") ++ toSql(orderSpec, "ORDER BY ") ++ Seq(frameSpecification.sql) elements.mkString("(", " ", ")") } private def isValidFrameType(ft: DataType): Boolean = (orderSpec.head.dataType, ft) match { case (DateType, IntegerType) => true case (TimestampType, CalendarIntervalType) => true case (a, b) => a == b } } /** * A Window specification reference that refers to the [[WindowSpecDefinition]] defined * under the name `name`. */ case class WindowSpecReference(name: String) extends WindowSpec /** * The trait used to represent the type of a Window Frame. */ sealed trait FrameType { def inputType: AbstractDataType def sql: String } /** * RowFrame treats rows in a partition individually. Values used in a row frame are considered * to be physical offsets. * For example, `ROW BETWEEN 1 PRECEDING AND 1 FOLLOWING` represents a 3-row frame, * from the row that precedes the current row to the row that follows the current row. */ case object RowFrame extends FrameType { override def inputType: AbstractDataType = IntegerType override def sql: String = "ROWS" } /** * RangeFrame treats rows in a partition as groups of peers. All rows having the same `ORDER BY` * ordering are considered as peers. Values used in a range frame are considered to be logical * offsets. * For example, assuming the value of the current row's `ORDER BY` expression `expr` is `v`, * `RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING` represents a frame containing rows whose values * `expr` are in the range of [v-1, v+1]. * * If `ORDER BY` clause is not defined, all rows in the partition are considered as peers * of the current row. */ case object RangeFrame extends FrameType { override def inputType: AbstractDataType = TypeCollection.NumericAndInterval override def sql: String = "RANGE" } /** * The trait used to represent special boundaries used in a window frame. */ sealed trait SpecialFrameBoundary extends Expression with Unevaluable { override def children: Seq[Expression] = Nil override def dataType: DataType = NullType override def foldable: Boolean = false override def nullable: Boolean = false } /** UNBOUNDED boundary. */ case object UnboundedPreceding extends SpecialFrameBoundary { override def sql: String = "UNBOUNDED PRECEDING" } case object UnboundedFollowing extends SpecialFrameBoundary { override def sql: String = "UNBOUNDED FOLLOWING" } /** CURRENT ROW boundary. */ case object CurrentRow extends SpecialFrameBoundary { override def sql: String = "CURRENT ROW" } /** * Represents a window frame. */ sealed trait WindowFrame extends Expression with Unevaluable { override def children: Seq[Expression] = Nil override def dataType: DataType = throw new UnsupportedOperationException("dataType") override def foldable: Boolean = false override def nullable: Boolean = false } /** Used as a placeholder when a frame specification is not defined. */ case object UnspecifiedFrame extends WindowFrame /** * A specified Window Frame. The val lower/uppper can be either a foldable [[Expression]] or a * [[SpecialFrameBoundary]]. */ case class SpecifiedWindowFrame( frameType: FrameType, lower: Expression, upper: Expression) extends WindowFrame { override def children: Seq[Expression] = lower :: upper :: Nil lazy val valueBoundary: Seq[Expression] = children.filterNot(_.isInstanceOf[SpecialFrameBoundary]) override def checkInputDataTypes(): TypeCheckResult = { // Check lower value. val lowerCheck = checkBoundary(lower, "lower") if (lowerCheck.isFailure) { return lowerCheck } // Check upper value. val upperCheck = checkBoundary(upper, "upper") if (upperCheck.isFailure) { return upperCheck } // Check combination (of expressions). (lower, upper) match { case (l: Expression, u: Expression) if !isValidFrameBoundary(l, u) => TypeCheckFailure(s"Window frame upper bound '$upper' does not followes the lower bound " + s"'$lower'.") case (l: SpecialFrameBoundary, _) => TypeCheckSuccess case (_, u: SpecialFrameBoundary) => TypeCheckSuccess case (l: Expression, u: Expression) if l.dataType != u.dataType => TypeCheckFailure( s"Window frame bounds '$lower' and '$upper' do no not have the same data type: " + s"'${l.dataType.catalogString}' <> '${u.dataType.catalogString}'") case (l: Expression, u: Expression) if isGreaterThan(l, u) => TypeCheckFailure( "The lower bound of a window frame must be less than or equal to the upper bound") case _ => TypeCheckSuccess } } override def sql: String = { val lowerSql = boundarySql(lower) val upperSql = boundarySql(upper) s"${frameType.sql} BETWEEN $lowerSql AND $upperSql" } def isUnbounded: Boolean = lower == UnboundedPreceding && upper == UnboundedFollowing def isValueBound: Boolean = valueBoundary.nonEmpty def isOffset: Boolean = (lower, upper) match { case (l: Expression, u: Expression) => frameType == RowFrame && l == u case _ => false } private def boundarySql(expr: Expression): String = expr match { case e: SpecialFrameBoundary => e.sql case UnaryMinus(n) => n.sql + " PRECEDING" case e: Expression => e.sql + " FOLLOWING" } private def isGreaterThan(l: Expression, r: Expression): Boolean = { GreaterThan(l, r).eval().asInstanceOf[Boolean] } private def checkBoundary(b: Expression, location: String): TypeCheckResult = b match { case _: SpecialFrameBoundary => TypeCheckSuccess case e: Expression if !e.foldable => TypeCheckFailure(s"Window frame $location bound '$e' is not a literal.") case e: Expression if !frameType.inputType.acceptsType(e.dataType) => TypeCheckFailure( s"The data type of the $location bound '${e.dataType.simpleString}' does not match " + s"the expected data type '${frameType.inputType.simpleString}'.") case _ => TypeCheckSuccess } private def isValidFrameBoundary(l: Expression, u: Expression): Boolean = { (l, u) match { case (UnboundedFollowing, _) => false case (_, UnboundedPreceding) => false case _ => true } } } case class UnresolvedWindowExpression( child: Expression, windowSpec: WindowSpecReference) extends UnaryExpression with Unevaluable { override def dataType: DataType = throw new UnresolvedException(this, "dataType") override def foldable: Boolean = throw new UnresolvedException(this, "foldable") override def nullable: Boolean = throw new UnresolvedException(this, "nullable") override lazy val resolved = false } case class WindowExpression( windowFunction: Expression, windowSpec: WindowSpecDefinition) extends Expression with Unevaluable { override def children: Seq[Expression] = windowFunction :: windowSpec :: Nil override def dataType: DataType = windowFunction.dataType override def foldable: Boolean = windowFunction.foldable override def nullable: Boolean = windowFunction.nullable override def toString: String = s"$windowFunction $windowSpec" override def sql: String = windowFunction.sql + " OVER " + windowSpec.sql } /** * A window function is a function that can only be evaluated in the context of a window operator. */ trait WindowFunction extends Expression { /** Frame in which the window operator must be executed. */ def frame: WindowFrame = UnspecifiedFrame } /** * Case objects that describe whether a window function is a SQL window function or a Python * user-defined window function. */ sealed trait WindowFunctionType object WindowFunctionType { case object SQL extends WindowFunctionType case object Python extends WindowFunctionType def functionType(windowExpression: NamedExpression): WindowFunctionType = { val t = windowExpression.collectFirst { case _: WindowFunction | _: AggregateFunction => SQL case udf: PythonUDF if PythonUDF.isWindowPandasUDF(udf) => Python } // Normally a window expression would either have a SQL window function, a SQL // aggregate function or a python window UDF. However, sometimes the optimizer will replace // the window function if the value of the window function can be predetermined. // For example, for query: // // select count(NULL) over () from values 1.0, 2.0, 3.0 T(a) // // The window function will be replaced by expression literal(0) // To handle this case, if a window expression doesn't have a regular window function, we // consider its type to be SQL as literal(0) is also a SQL expression. t.getOrElse(SQL) } } /** * An offset window function is a window function that returns the value of the input column offset * by a number of rows within the partition. For instance: an OffsetWindowfunction for value x with * offset -2, will get the value of x 2 rows back in the partition. */ abstract class OffsetWindowFunction extends Expression with WindowFunction with Unevaluable with ImplicitCastInputTypes { /** * Input expression to evaluate against a row which a number of rows below or above (depending on * the value and sign of the offset) the current row. */ val input: Expression /** * Default result value for the function when the `offset`th row does not exist. */ val default: Expression /** * (Foldable) expression that contains the number of rows between the current row and the row * where the input expression is evaluated. */ val offset: Expression /** * Direction of the number of rows between the current row and the row where the input expression * is evaluated. */ val direction: SortDirection override def children: Seq[Expression] = Seq(input, offset, default) /* * The result of an OffsetWindowFunction is dependent on the frame in which the * OffsetWindowFunction is executed, the input expression and the default expression. Even when * both the input and the default expression are foldable, the result is still not foldable due to * the frame. */ override def foldable: Boolean = false override def nullable: Boolean = default == null || default.nullable || input.nullable override lazy val frame: WindowFrame = { val boundary = direction match { case Ascending => offset case Descending => UnaryMinus(offset) match { case e: Expression if e.foldable => Literal.create(e.eval(EmptyRow), e.dataType) case o => o } } SpecifiedWindowFrame(RowFrame, boundary, boundary) } override def checkInputDataTypes(): TypeCheckResult = { val check = super.checkInputDataTypes() if (check.isFailure) { check } else if (!offset.foldable) { TypeCheckFailure(s"Offset expression '$offset' must be a literal.") } else { TypeCheckSuccess } } override def dataType: DataType = input.dataType override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType, IntegerType, TypeCollection(input.dataType, NullType)) override def toString: String = s"$prettyName($input, $offset, $default)" } /** * The Lead function returns the value of `input` at the `offset`th row after the current row in * the window. Offsets start at 0, which is the current row. The offset must be constant * integer value. The default offset is 1. When the value of `input` is null at the `offset`th row, * null is returned. If there is no such offset row, the `default` expression is evaluated. * * @param input expression to evaluate `offset` rows after the current row. * @param offset rows to jump ahead in the partition. * @param default to use when the offset is larger than the window. The default value is null. */ @ExpressionDescription( usage = """ _FUNC_(input[, offset[, default]]) - Returns the value of `input` at the `offset`th row after the current row in the window. The default value of `offset` is 1 and the default value of `default` is null. If the value of `input` at the `offset`th row is null, null is returned. If there is no such an offset row (e.g., when the offset is 1, the last row of the window does not have any subsequent row), `default` is returned. """) case class Lead(input: Expression, offset: Expression, default: Expression) extends OffsetWindowFunction { def this(input: Expression, offset: Expression) = this(input, offset, Literal(null)) def this(input: Expression) = this(input, Literal(1)) def this() = this(Literal(null)) override val direction = Ascending } /** * The Lag function returns the value of `input` at the `offset`th row before the current row in * the window. Offsets start at 0, which is the current row. The offset must be constant * integer value. The default offset is 1. When the value of `input` is null at the `offset`th row, * null is returned. If there is no such offset row, the `default` expression is evaluated. * * @param input expression to evaluate `offset` rows before the current row. * @param offset rows to jump back in the partition. * @param default to use when the offset row does not exist. */ @ExpressionDescription( usage = """ _FUNC_(input[, offset[, default]]) - Returns the value of `input` at the `offset`th row before the current row in the window. The default value of `offset` is 1 and the default value of `default` is null. If the value of `input` at the `offset`th row is null, null is returned. If there is no such offset row (e.g., when the offset is 1, the first row of the window does not have any previous row), `default` is returned. """) case class Lag(input: Expression, offset: Expression, default: Expression) extends OffsetWindowFunction { def this(input: Expression, offset: Expression) = this(input, offset, Literal(null)) def this(input: Expression) = this(input, Literal(1)) def this() = this(Literal(null)) override val direction = Descending } abstract class AggregateWindowFunction extends DeclarativeAggregate with WindowFunction { self: Product => override val frame = SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow) override def dataType: DataType = IntegerType override def nullable: Boolean = true override lazy val mergeExpressions = throw new UnsupportedOperationException("Window Functions do not support merging.") } abstract class RowNumberLike extends AggregateWindowFunction { override def children: Seq[Expression] = Nil protected val zero = Literal(0) protected val one = Literal(1) protected val rowNumber = AttributeReference("rowNumber", IntegerType, nullable = false)() override val aggBufferAttributes: Seq[AttributeReference] = rowNumber :: Nil override val initialValues: Seq[Expression] = zero :: Nil override val updateExpressions: Seq[Expression] = Add(rowNumber, one) :: Nil } /** * A [[SizeBasedWindowFunction]] needs the size of the current window for its calculation. */ trait SizeBasedWindowFunction extends AggregateWindowFunction { // It's made a val so that the attribute created on driver side is serialized to executor side. // Otherwise, if it's defined as a function, when it's called on executor side, it actually // returns the singleton value instantiated on executor side, which has different expression ID // from the one created on driver side. val n: AttributeReference = SizeBasedWindowFunction.n } object SizeBasedWindowFunction { val n = AttributeReference("window__partition__size", IntegerType, nullable = false)() } /** * The RowNumber function computes a unique, sequential number to each row, starting with one, * according to the ordering of rows within the window partition. * * This documentation has been based upon similar documentation for the Hive and Presto projects. */ @ExpressionDescription( usage = """ _FUNC_() - Assigns a unique, sequential number to each row, starting with one, according to the ordering of rows within the window partition. """) case class RowNumber() extends RowNumberLike { override val evaluateExpression = rowNumber override def prettyName: String = "row_number" } /** * The CumeDist function computes the position of a value relative to all values in the partition. * The result is the number of rows preceding or equal to the current row in the ordering of the * partition divided by the total number of rows in the window partition. Any tie values in the * ordering will evaluate to the same position. * * This documentation has been based upon similar documentation for the Hive and Presto projects. */ @ExpressionDescription( usage = """ _FUNC_() - Computes the position of a value relative to all values in the partition. """) case class CumeDist() extends RowNumberLike with SizeBasedWindowFunction { override def dataType: DataType = DoubleType // The frame for CUME_DIST is Range based instead of Row based, because CUME_DIST must // return the same value for equal values in the partition. override val frame = SpecifiedWindowFrame(RangeFrame, UnboundedPreceding, CurrentRow) override val evaluateExpression = Divide(Cast(rowNumber, DoubleType), Cast(n, DoubleType)) override def prettyName: String = "cume_dist" } /** * The NTile function divides the rows for each window partition into `n` buckets ranging from 1 to * at most `n`. Bucket values will differ by at most 1. If the number of rows in the partition does * not divide evenly into the number of buckets, then the remainder values are distributed one per * bucket, starting with the first bucket. * * The NTile function is particularly useful for the calculation of tertiles, quartiles, deciles and * other common summary statistics * * The function calculates two variables during initialization: The size of a regular bucket, and * the number of buckets that will have one extra row added to it (when the rows do not evenly fit * into the number of buckets); both variables are based on the size of the current partition. * During the calculation process the function keeps track of the current row number, the current * bucket number, and the row number at which the bucket will change (bucketThreshold). When the * current row number reaches bucket threshold, the bucket value is increased by one and the * threshold is increased by the bucket size (plus one extra if the current bucket is padded). * * This documentation has been based upon similar documentation for the Hive and Presto projects. * * @param buckets number of buckets to divide the rows in. Default value is 1. */ @ExpressionDescription( usage = """ _FUNC_(n) - Divides the rows for each window partition into `n` buckets ranging from 1 to at most `n`. """) case class NTile(buckets: Expression) extends RowNumberLike with SizeBasedWindowFunction { def this() = this(Literal(1)) override def children: Seq[Expression] = Seq(buckets) // Validate buckets. Note that this could be relaxed, the bucket value only needs to constant // for each partition. override def checkInputDataTypes(): TypeCheckResult = { if (!buckets.foldable) { return TypeCheckFailure(s"Buckets expression must be foldable, but got $buckets") } if (buckets.dataType != IntegerType) { return TypeCheckFailure(s"Buckets expression must be integer type, but got $buckets") } val i = buckets.eval().asInstanceOf[Int] if (i > 0) { TypeCheckSuccess } else { TypeCheckFailure(s"Buckets expression must be positive, but got: $i") } } private val bucket = AttributeReference("bucket", IntegerType, nullable = false)() private val bucketThreshold = AttributeReference("bucketThreshold", IntegerType, nullable = false)() private val bucketSize = AttributeReference("bucketSize", IntegerType, nullable = false)() private val bucketsWithPadding = AttributeReference("bucketsWithPadding", IntegerType, nullable = false)() private def bucketOverflow(e: Expression) = If(GreaterThanOrEqual(rowNumber, bucketThreshold), e, zero) override val aggBufferAttributes = Seq( rowNumber, bucket, bucketThreshold, bucketSize, bucketsWithPadding ) override val initialValues = Seq( zero, zero, zero, Cast(Divide(n, buckets), IntegerType), Cast(Remainder(n, buckets), IntegerType) ) override val updateExpressions = Seq( Add(rowNumber, one), Add(bucket, bucketOverflow(one)), Add(bucketThreshold, bucketOverflow( Add(bucketSize, If(LessThan(bucket, bucketsWithPadding), one, zero)))), NoOp, NoOp ) override val evaluateExpression = bucket } /** * A RankLike function is a WindowFunction that changes its value based on a change in the value of * the order of the window in which is processed. For instance, when the value of `input` changes * in a window ordered by `input` the rank function also changes. The size of the change of the * rank function is (typically) not dependent on the size of the change in `input`. * * This documentation has been based upon similar documentation for the Hive and Presto projects. */ abstract class RankLike extends AggregateWindowFunction { /** Store the values of the window 'order' expressions. */ protected val orderAttrs = children.map { expr => AttributeReference(expr.sql, expr.dataType)() } /** Predicate that detects if the order attributes have changed. */ protected val orderEquals = children.zip(orderAttrs) .map(EqualNullSafe.tupled) .reduceOption(And) .getOrElse(Literal(true)) protected val orderInit = children.map(e => Literal.create(null, e.dataType)) protected val rank = AttributeReference("rank", IntegerType, nullable = false)() protected val rowNumber = AttributeReference("rowNumber", IntegerType, nullable = false)() protected val zero = Literal(0) protected val one = Literal(1) protected val increaseRowNumber = Add(rowNumber, one) /** * Different RankLike implementations use different source expressions to update their rank value. * Rank for instance uses the number of rows seen, whereas DenseRank uses the number of changes. */ protected def rankSource: Expression = rowNumber /** Increase the rank when the current rank == 0 or when the one of order attributes changes. */ protected val increaseRank = If(And(orderEquals, Not(EqualTo(rank, zero))), rank, rankSource) override val aggBufferAttributes: Seq[AttributeReference] = rank +: rowNumber +: orderAttrs override val initialValues = zero +: one +: orderInit override val updateExpressions = increaseRank +: increaseRowNumber +: children override val evaluateExpression: Expression = rank override def sql: String = s"${prettyName.toUpperCase(Locale.ROOT)}()" def withOrder(order: Seq[Expression]): RankLike } /** * The Rank function computes the rank of a value in a group of values. The result is one plus the * number of rows preceding or equal to the current row in the ordering of the partition. The values * will produce gaps in the sequence. * * This documentation has been based upon similar documentation for the Hive and Presto projects. * * @param children to base the rank on; a change in the value of one the children will trigger a * change in rank. This is an internal parameter and will be assigned by the * Analyser. */ @ExpressionDescription( usage = """ _FUNC_() - Computes the rank of a value in a group of values. The result is one plus the number of rows preceding or equal to the current row in the ordering of the partition. The values will produce gaps in the sequence. """) case class Rank(children: Seq[Expression]) extends RankLike { def this() = this(Nil) override def withOrder(order: Seq[Expression]): Rank = Rank(order) } /** * The DenseRank function computes the rank of a value in a group of values. The result is one plus * the previously assigned rank value. Unlike [[Rank]], [[DenseRank]] will not produce gaps in the * ranking sequence. * * This documentation has been based upon similar documentation for the Hive and Presto projects. * * @param children to base the rank on; a change in the value of one the children will trigger a * change in rank. This is an internal parameter and will be assigned by the * Analyser. */ @ExpressionDescription( usage = """ _FUNC_() - Computes the rank of a value in a group of values. The result is one plus the previously assigned rank value. Unlike the function rank, dense_rank will not produce gaps in the ranking sequence. """) case class DenseRank(children: Seq[Expression]) extends RankLike { def this() = this(Nil) override def withOrder(order: Seq[Expression]): DenseRank = DenseRank(order) override protected def rankSource = Add(rank, one) override val updateExpressions = increaseRank +: children override val aggBufferAttributes = rank +: orderAttrs override val initialValues = zero +: orderInit override def prettyName: String = "dense_rank" } /** * The PercentRank function computes the percentage ranking of a value in a group of values. The * result the rank of the minus one divided by the total number of rows in the partition minus one: * (r - 1) / (n - 1). If a partition only contains one row, the function will return 0. * * The PercentRank function is similar to the CumeDist function, but it uses rank values instead of * row counts in the its numerator. * * This documentation has been based upon similar documentation for the Hive and Presto projects. * * @param children to base the rank on; a change in the value of one of the children will trigger a * change in rank. This is an internal parameter and will be assigned by the * Analyser. */ @ExpressionDescription( usage = """ _FUNC_() - Computes the percentage ranking of a value in a group of values. """) case class PercentRank(children: Seq[Expression]) extends RankLike with SizeBasedWindowFunction { def this() = this(Nil) override def withOrder(order: Seq[Expression]): PercentRank = PercentRank(order) override def dataType: DataType = DoubleType override val evaluateExpression = If(GreaterThan(n, one), Divide(Cast(Subtract(rank, one), DoubleType), Cast(Subtract(n, one), DoubleType)), Literal(0.0d)) override def prettyName: String = "percent_rank" }
lxsmnv/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
Scala
apache-2.0
30,287
package hu.blackbelt.cd.bintray.deploy import java.nio.file.{Files, StandardCopyOption} import java.util.{Properties, UUID} import awscala.s3.S3 import com.amazonaws.regions.Regions import com.amazonaws.services.s3.model.GetObjectRequest import hu.blackbelt.cd.bintray.VFS.FS object Access { val bintray_organization = "bintray.organization" val bintray_user = "bintray.user" val bintray_apikey = "bintray.apikey" val aws_accessKeyId = "aws.accessKeyId" val aws_secretKey = "aws.secretKey" def collect = { implicit val s3 = S3()(com.amazonaws.regions.Region.getRegion(Regions.EU_CENTRAL_1)) val destination = FS.getPath(s"/tmp/${UUID.randomUUID().toString}") Files.createDirectories(destination) val s3Object = s3.getObject(new GetObjectRequest("blackbelt-secrets", "bintray-deploy/access.properties")) Files.copy(s3Object.getObjectContent, destination, StandardCopyOption.REPLACE_EXISTING) import scala.collection.JavaConverters._ val prop = new Properties() prop.load(Files.newInputStream(destination)) prop.entrySet().asScala.foreach { (entry) => { sys.props += ((entry.getKey.asInstanceOf[String], entry.getValue.asInstanceOf[String])) } } } }
tsechov/s3-bintray-deploy
src/main/scala/hu/blackbelt/cd/bintray/deploy/Access.scala
Scala
apache-2.0
1,231
package filodb.core.memstore import scala.collection.mutable.ArrayBuffer import scala.concurrent.duration._ import scala.util.Random import com.googlecode.javaewah.IntIterator import org.scalatest.BeforeAndAfter import filodb.core._ import filodb.core.binaryrecord2.RecordBuilder import filodb.core.metadata.Dataset import filodb.core.query.{ColumnFilter, Filter} import filodb.memory.format.UnsafeUtils.ZeroPointer import filodb.memory.format.UTF8Wrapper import filodb.memory.format.ZeroCopyUTF8String._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.matchers.should.Matchers class PartKeyLuceneIndexSpec extends AnyFunSpec with Matchers with BeforeAndAfter { import Filter._ import GdeltTestData._ val keyIndex = new PartKeyLuceneIndex(dataset6.ref, dataset6.schema.partition, 0, 1.hour) val partBuilder = new RecordBuilder(TestData.nativeMem) def partKeyOnHeap(dataset: Dataset, base: Any, offset: Long): Array[Byte] = dataset.partKeySchema.asByteArray(base, offset) before { keyIndex.reset() keyIndex.refreshReadersBlocking() } after { partBuilder.removeAndFreeContainers(partBuilder.allContainers.length) } implicit class RichIntIterator(ii: IntIterator) { def toSeq: Seq[Int] = { val newSeq = new collection.mutable.ArrayBuffer[Int]() while (ii.hasNext) { newSeq += ii.next } newSeq } } it("should add part keys and parse filters correctly") { val start = System.currentTimeMillis() // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, System.currentTimeMillis())() } val end = System.currentTimeMillis() keyIndex.refreshReadersBlocking() // Should get empty iterator when passing no filters val partNums1 = keyIndex.partIdsFromFilters(Nil, start, end) partNums1 shouldEqual debox.Buffer(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) val filter2 = ColumnFilter("Actor2Code", Equals("GOV".utf8)) val partNums2 = keyIndex.partIdsFromFilters(Seq(filter2), start, end) partNums2 shouldEqual debox.Buffer(7, 8, 9) val filter3 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums3 = keyIndex.partIdsFromFilters(Seq(filter3), start, end) partNums3 shouldEqual debox.Buffer(8, 9) val filter4 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums4 = keyIndex.partIdsFromFilters(Seq(filter4), 10, start-1) partNums4 shouldEqual debox.Buffer.empty[Int] val filter5 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums5 = keyIndex.partIdsFromFilters(Seq(filter5), end + 100, end + 100000) partNums5 should not equal debox.Buffer.empty[Int] val filter6 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums6 = keyIndex.partIdsFromFilters(Seq(filter6), start - 10000, end ) partNums6 should not equal debox.Buffer.empty[Int] val filter7 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums7 = keyIndex.partIdsFromFilters(Seq(filter7), (start + end)/2, end + 1000 ) partNums7 should not equal debox.Buffer.empty[Int] } it("should fetch part key records from filters correctly") { // Add the first ten keys and row numbers val pkrs = partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.map { case (addr, i) => val pk = partKeyOnHeap(dataset6, ZeroPointer, addr) keyIndex.addPartKey(pk, i, i, i + 10)() PartKeyLuceneIndexRecord(pk, i, i + 10) } keyIndex.refreshReadersBlocking() val filter2 = ColumnFilter("Actor2Code", Equals("GOV".utf8)) val result = keyIndex.partKeyRecordsFromFilters(Seq(filter2), 0, Long.MaxValue) val expected = Seq(pkrs(7), pkrs(8), pkrs(9)) result.map(_.partKey.toSeq) shouldEqual expected.map(_.partKey.toSeq) result.map( p => (p.startTime, p.endTime)) shouldEqual expected.map( p => (p.startTime, p.endTime)) } it("should upsert part keys with endtime and foreachPartKeyStillIngesting should work") { // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => val time = System.currentTimeMillis() keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, time)() if (i%2 == 0) keyIndex.upsertPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, time, time + 300)() } keyIndex.refreshReadersBlocking() keyIndex.indexNumEntries shouldEqual 10 val partIdsIngesting = ArrayBuffer[Int]() val numHits = keyIndex.foreachPartKeyStillIngesting { (id, key) => partIdsIngesting += id } numHits shouldEqual 5 partIdsIngesting shouldEqual Seq(1, 3, 5, 7, 9) } it("should add part keys and fetch startTimes correctly for more than 1024 keys") { val numPartIds = 3000 // needs to be more than 1024 to test the lucene term limit val start = System.currentTimeMillis() // we dont care much about the partKey here, but the startTime against partId. val partKeys = Stream.continually(readers.head).take(numPartIds).toList partKeyFromRecords(dataset6, records(dataset6, partKeys), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, start + i)() } keyIndex.refreshReadersBlocking() val startTimes = keyIndex.startTimeFromPartIds((0 until numPartIds).iterator) for { i <- 0 until numPartIds} { startTimes(i) shouldEqual start + i } } it("should add part keys and fetch partIdsEndedBefore and removePartKeys correctly for more than 1024 keys") { val numPartIds = 3000 // needs to be more than 1024 to test the lucene term limit val start = 1000 // we dont care much about the partKey here, but the startTime against partId. val partKeys = Stream.continually(readers.head).take(numPartIds).toList partKeyFromRecords(dataset6, records(dataset6, partKeys), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, start + i, start + i + 100)() } keyIndex.refreshReadersBlocking() val pIds = keyIndex.partIdsEndedBefore(start + 200) val pIdsList = pIds.toList() for { i <- 0 until numPartIds} { pIdsList.contains(i) shouldEqual (if (i <= 100) true else false) } keyIndex.removePartKeys(pIds) keyIndex.refreshReadersBlocking() for { i <- 0 until numPartIds} { keyIndex.partKeyFromPartId(i).isDefined shouldEqual (if (i <= 100) false else true) } } it("should update part keys with endtime and parse filters correctly") { val start = System.currentTimeMillis() // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => val time = System.currentTimeMillis() keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, time)() keyIndex.refreshReadersBlocking() // updates need to be able to read startTime from index, so commit keyIndex.updatePartKeyWithEndTime(partKeyOnHeap(dataset6, ZeroPointer, addr), i, time + 10000)() } val end = System.currentTimeMillis() keyIndex.refreshReadersBlocking() // Should get empty iterator when passing no filters val partNums1 = keyIndex.partIdsFromFilters(Nil, start, end) partNums1 shouldEqual debox.Buffer(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) val filter2 = ColumnFilter("Actor2Code", Equals("GOV".utf8)) val partNums2 = keyIndex.partIdsFromFilters(Seq(filter2), start, end) partNums2 shouldEqual debox.Buffer(7, 8, 9) val filter3 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums3 = keyIndex.partIdsFromFilters(Seq(filter3), start, end) partNums3 shouldEqual debox.Buffer(8, 9) val filter4 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums4 = keyIndex.partIdsFromFilters(Seq(filter4), 10, start-1) partNums4 shouldEqual debox.Buffer.empty[Int] val filter5 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums5 = keyIndex.partIdsFromFilters(Seq(filter5), end + 20000, end + 100000) partNums5 shouldEqual debox.Buffer.empty[Int] val filter6 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums6 = keyIndex.partIdsFromFilters(Seq(filter6), start - 10000, end-1 ) partNums6 should not equal debox.Buffer.empty[Int] val filter7 = ColumnFilter("Actor2Name", Equals("REGIME".utf8)) val partNums7 = keyIndex.partIdsFromFilters(Seq(filter7), (start + end)/2, end + 1000 ) partNums7 should not equal debox.Buffer.empty[Int] } it("should parse filters with UTF8Wrapper and string correctly") { // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, System.currentTimeMillis())() } keyIndex.refreshReadersBlocking() val filter2 = ColumnFilter("Actor2Name", Equals(UTF8Wrapper("REGIME".utf8))) val partNums2 = keyIndex.partIdsFromFilters(Seq(filter2), 0, Long.MaxValue) partNums2 shouldEqual debox.Buffer(8, 9) val filter3 = ColumnFilter("Actor2Name", Equals("REGIME")) val partNums3 = keyIndex.partIdsFromFilters(Seq(filter3), 0, Long.MaxValue) partNums3 shouldEqual debox.Buffer(8, 9) } it("should obtain indexed names and values") { // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, System.currentTimeMillis())() } keyIndex.refreshReadersBlocking() keyIndex.indexNames(10).toList shouldEqual Seq("Actor2Code", "Actor2Name") keyIndex.indexValues("not_found").toSeq should equal (Nil) val infos = Seq("AFR", "CHN", "COP", "CVL", "EGYEDU").map(_.utf8).map(TermInfo(_, 1)) val top2infos = Seq(TermInfo("GOV".utf8, 3), TermInfo("AGR".utf8, 2)) // top 2 items by frequency keyIndex.indexValues("Actor2Code", 2) shouldEqual top2infos val allValues = keyIndex.indexValues("Actor2Code") allValues take 2 shouldEqual top2infos allValues.drop(2).toSet shouldEqual infos.toSet } it("should be able to AND multiple filters together") { // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, System.currentTimeMillis())() } keyIndex.refreshReadersBlocking() val filters1 = Seq(ColumnFilter("Actor2Code", Equals("GOV".utf8)), ColumnFilter("Actor2Name", Equals("REGIME".utf8))) val partNums1 = keyIndex.partIdsFromFilters(filters1, 0, Long.MaxValue) partNums1 shouldEqual debox.Buffer(8, 9) val filters2 = Seq(ColumnFilter("Actor2Code", Equals("GOV".utf8)), ColumnFilter("Actor2Name", Equals("CHINA".utf8))) val partNums2 = keyIndex.partIdsFromFilters(filters2, 0, Long.MaxValue) partNums2 shouldEqual debox.Buffer.empty[Int] } it("should ignore unsupported columns and return empty filter") { val index2 = new PartKeyLuceneIndex(dataset1.ref, dataset1.schema.partition, 0, 1.hour) partKeyFromRecords(dataset1, records(dataset1, readers.take(10))).zipWithIndex.foreach { case (addr, i) => index2.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, System.currentTimeMillis())() } keyIndex.refreshReadersBlocking() val filters1 = Seq(ColumnFilter("Actor2Code", Equals("GOV".utf8)), ColumnFilter("Year", Equals(1979))) val partNums1 = index2.partIdsFromFilters(filters1, 0, Long.MaxValue) partNums1 shouldEqual debox.Buffer.empty[Int] } it("should be able to fetch partKey from partId and partId from partKey") { // Add the first ten keys and row numbers partKeyFromRecords(dataset6, records(dataset6, readers.take(10)), Some(partBuilder)) .zipWithIndex.foreach { case (addr, i) => val partKeyBytes = partKeyOnHeap(dataset6, ZeroPointer, addr) keyIndex.addPartKey(partKeyBytes, i, System.currentTimeMillis())() keyIndex.refreshReadersBlocking() keyIndex.partKeyFromPartId(i).get.bytes shouldEqual partKeyBytes // keyIndex.partIdFromPartKey(new BytesRef(partKeyBytes)) shouldEqual i } } it("should be able to sort results by endTime, startTime") { val addedKeys = partKeyFromRecords(dataset6, records(dataset6, readers.take(100)), Some(partBuilder)) .zipWithIndex.map { case (addr, i) => val start = Math.abs(Random.nextLong()) keyIndex.addPartKey(partKeyOnHeap(dataset6, ZeroPointer, addr), i, start)() keyIndex.refreshReadersBlocking() // updates need to be able to read startTime from index, so commit val end = start + Random.nextInt() keyIndex.updatePartKeyWithEndTime(partKeyOnHeap(dataset6, ZeroPointer, addr), i, end)() (end, start, i) } keyIndex.refreshReadersBlocking() for { from <- 0 until 99 // for various from values limit <- 3 to 100-from // for various limit values } { val sortedKeys = addedKeys.sorted val dropFrom = sortedKeys.drop(from) val partNums3 = keyIndex.partIdsOrderedByEndTime(limit, fromEndTime = dropFrom(0)._1) partNums3.toArray.toSeq shouldEqual dropFrom.map(_._3).take(limit).sorted val untilTimePartIds = keyIndex.partIdsOrderedByEndTime(10, toEndTime = dropFrom(0)._1 - 1) untilTimePartIds.toArray.toSeq shouldEqual sortedKeys.take(from).map(_._3).take(10).sorted } } }
tuplejump/FiloDB
core/src/test/scala/filodb.core/memstore/PartKeyLuceneIndexSpec.scala
Scala
apache-2.0
14,112
/** * sbt-application - application builder with ProGuard and JavaFX support * * Copyright (c) 2012 Alexey Aksenov ezh@ezh.msk.ru * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sbt.application.javafx import java.io.File import java.lang.Boolean import java.lang.String import java.net.URLClassLoader import org.apache.tools.ant.Project class CSSToBinTask(val project: Project, val classLoader: URLClassLoader) extends AntTask { lazy val mainClass = classLoader.loadClass("com.sun.javafx.tools.ant.DeployFXTask") lazy val setVerboseMethod = mainClass.getMethod("setVerbose", classOf[java.lang.Boolean]) lazy val setOutdirMethod = mainClass.getMethod("setOutdir", classOf[java.lang.String]) //lazy val createFilesetMethod = mainClass.getMethod("createFileset", classOf[String], classOf[String], classOf[java.util.List]) lazy val instance = Class.forName("com.sun.javafx.tools.ant.DeployFXTask", true, classLoader).newInstance() def setVerbose(verbose: java.lang.Boolean): Unit = setVerboseMethod.invoke(instance, verbose) def setOutdirMethod(outDir: File): Unit = setOutdirMethod.invoke(instance, outDir) //def createFileset(cp: String): Unit = setClasspathMethod.invoke(instance, cp) }
sbt-android-mill/sbt-application
src/main/scala/sbt/application/javafx/CSSToBinTask.scala
Scala
apache-2.0
1,727
package com.outr.arango.api.model import io.circe.Json case class GeneralGraphEdgeDefinitionAddHttpExamplesRc202(error: Boolean, code: Option[Int] = None, graph: Option[GraphRepresentation] = None)
outr/arangodb-scala
api/src/main/scala/com/outr/arango/api/model/GeneralGraphEdgeDefinitionAddHttpExamplesRc202.scala
Scala
mit
316
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.optimizer import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions.{Alias, IntegerLiteral, Literal} import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation, Project} import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.internal.SQLConf /** * A dummy optimizer rule for testing that decrements integer literals until 0. */ object DecrementLiterals extends Rule[LogicalPlan] { def apply(plan: LogicalPlan): LogicalPlan = plan transformExpressions { case IntegerLiteral(i) if i > 0 => Literal(i - 1) } } class OptimizerSuite extends PlanTest { test("Optimizer exceeds max iterations") { val iterations = 5 val maxIterationsNotEnough = 3 val maxIterationsEnough = 10 val analyzed = Project(Alias(Literal(iterations), "attr")() :: Nil, OneRowRelation()).analyze withSQLConf(SQLConf.OPTIMIZER_MAX_ITERATIONS.key -> maxIterationsNotEnough.toString) { val optimizer = new SimpleTestOptimizer() { override def defaultBatches: Seq[Batch] = Batch("test", fixedPoint, DecrementLiterals) :: Nil } val message1 = intercept[RuntimeException] { optimizer.execute(analyzed) }.getMessage assert(message1.startsWith(s"Max iterations ($maxIterationsNotEnough) reached for batch " + s"test, please set '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}' to a larger value.")) withSQLConf(SQLConf.OPTIMIZER_MAX_ITERATIONS.key -> maxIterationsEnough.toString) { try { optimizer.execute(analyzed) } catch { case ex: AnalysisException if ex.getMessage.contains(SQLConf.OPTIMIZER_MAX_ITERATIONS.key) => fail("optimizer.execute should not reach max iterations.") } } val message2 = intercept[RuntimeException] { optimizer.execute(analyzed) }.getMessage assert(message2.startsWith(s"Max iterations ($maxIterationsNotEnough) reached for batch " + s"test, please set '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}' to a larger value.")) } } }
maropu/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerSuite.scala
Scala
apache-2.0
3,060
package com.webtrends.harness.component.kafka.actor import akka.actor.ActorRef import com.webtrends.harness.app.HActor import com.webtrends.harness.component.kafka.actor.KafkaTopicManager.DownSources import com.webtrends.harness.component.kafka.util.KafkaSettings import com.webtrends.harness.component.metrics.MetricsAdapter import com.webtrends.harness.component.metrics.metrictype.Counter import org.apache.http.client.config.RequestConfig import org.apache.http.client.methods.HttpGet import org.apache.http.impl.client.HttpClientBuilder import scala.io.Source import scala.util.Try import scala.util.parsing.json.{JSON, JSONObject} import scala.collection.mutable /** * Allows one to setup monitoring on any source nodes to watch for scheduled downtime and put workers into a NORMAL state * instead of DEGRADED/CRITICAL when it is expected for them to go down */ case class HostList(hosts: List[String]) case class LastMessage() class SourceMonitor(component: String) extends HActor with KafkaSettings with MetricsAdapter { val nagiosServer = Try { kafkaConfig.getString("nagios-host") } getOrElse "http://nagios:8080/host/" val downSources = mutable.Set.empty[String] // Calls to nagios should timeout if the API becomes despondent val nagiosConnectTimeout = Try { kafkaConfig.getInt("nagios-timeout-millis") } getOrElse 5000 val requestBuilder = RequestConfig.custom().setConnectTimeout(nagiosConnectTimeout) .setConnectionRequestTimeout(nagiosConnectTimeout).build() val nagiosSourceChecks = Counter(s"nagios.source.checks.$component") /** * These two receives are designed in such a way that we will only ever process the last message * in the mailbox (ie the most recently received) as there is no reason to process a bunch of * intermediate host lists. As a result, one should spin up a separate SourceMonitor for each * place it is used */ def waitingOnHostList: Receive = { case HostList(hosts) => self ! LastMessage() context.become(processLastHostList(hosts.toSet, sender())) } def processLastHostList(lastHostList: Set[String], itsSender: ActorRef): Receive = { case HostList(hosts) => context.become(processLastHostList(hosts.toSet, sender())) case LastMessage() => itsSender ! DownSources(lastHostList.filter(host => !hostIsGreen(host.split('.')(0)))) context.become(waitingOnHostList) } override def receive: Receive = waitingOnHostList def hostIsGreen(host: String): Boolean = { try { val response = getRestContent(s"$nagiosServer$host") response match { case Some(resp) => nagiosSourceChecks.incr val parsed = JSON.parseRaw(resp).get.asInstanceOf[JSONObject].obj("content").asInstanceOf[JSONObject] if (parsed.obj("scheduled_downtime_depth") != "0") downSources.add(host) else downSources.remove(host) parsed.obj("scheduled_downtime_depth") == "0" case None => log.warn(s"$component: Did not get a nagios response for $host, using last status of ${downSources.contains(host)}") !downSources.contains(host) // Were we able to get through to nagios last time and see the host was down } } catch { case e: NoSuchElementException => log.error(e, s"$component: Error parsing nagios response for host $host") !downSources.contains(host) // Nagios status call returns something unexpected case e: Exception => log.error(e, s"$component: Error getting nagios response for host $host") !downSources.contains(host) } } // Returns the text content from a REST URL. Returns a None if there is a problem. def getRestContent(url:String): Option[String] = { var content: Option[String] = None val httpClient = HttpClientBuilder.create().setDefaultRequestConfig(requestBuilder).build() try { val httpResponse = httpClient.execute(new HttpGet(url)) val entity = httpResponse.getEntity if (entity != null) { val inputStream = entity.getContent content = Some(Source.fromInputStream(inputStream).getLines().mkString) inputStream.close() } } finally { httpClient.close() } content } }
malibuworkcrew/wookiee-kafka
src/main/scala/com/webtrends/harness/component/kafka/actor/SourceMonitor.scala
Scala
apache-2.0
4,217
package gitbucket.core.api /** * path for api url. if set path '/repos/aa/bb' then, expand 'http://server:post/repos/aa/bb' when converted to json. */ case class ApiPath(path: String)
intermezzo-fr/gitbucket
src/main/scala/gitbucket/core/api/ApiPath.scala
Scala
apache-2.0
187
import cats.free.Free import scala.collection.mutable import cats.free.Free.liftF import cats.{~>, Id} object InteractionKv { // DSL Interaction sealed trait Interaction[A] case object Ask extends Interaction[String] case class Tell(input: String) extends Interaction[Unit] type InteractionAlgebra[A] = Free[Interaction, A] def ask(question: String): InteractionAlgebra[String] = tell(question).flatMap(_ => Free.liftF(Ask)) def tell(input: String): InteractionAlgebra[Unit] = Free.liftF(Tell(input)) // Interpretation of our program def interactionInterpreter: (Interaction ~> Id) = new (Interaction ~> Id) { def apply[A](fa: Interaction[A]): Id[A] = fa match { case Ask => scala.io.StdIn.readLine case Tell(input) => println(input) } } // DSL KV sealed trait KVStoreA[A] case class Put[T](key: String, value: T) extends KVStoreA[Unit] case class Get[T](key: String) extends KVStoreA[Option[T]] case class Delete(key: String) extends KVStoreA[Unit] type KVStore[A] = Free[KVStoreA, A] def put[T](key: String, value: T): KVStore[Unit] = liftF[KVStoreA, Unit](Put[T](key, value)) def get[T](key: String): KVStore[Option[T]] = liftF[KVStoreA, Option[T]](Get[T](key)) def delete(key: String): KVStore[Unit] = liftF(Delete(key)) def kvInterpreter: (KVStoreA ~> Id) = new (KVStoreA ~> Id) { val kvs = mutable.Map.empty[String, Any] def apply[A](fa: KVStoreA[A]): Id[A] = fa match { case Put(key, value) => kvs(key) = value case Get(key) => kvs.get(key) case Delete(key) => kvs.remove(key); () } } }
PaNaVTEC/Katas
key-value-pair/src/main/scala/InteractionKv.scala
Scala
apache-2.0
1,607
package jigg.pipeline /* Copyright 2013-2015 Takafumi Sakakibara and Hiroshi Noji Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import jigg.nlp.ccg.{ParserModel, ParserRunner, SuperTaggerRunner} import jigg.nlp.ccg.lexicon.{PoSTaggedSentence, Derivation, Point} import jigg.nlp.ccg.tagger.{MaxEntMultiTagger} import jigg.nlp.ccg.parser.{TransitionBasedParser, KBestDecoder} import jigg.util.PropertiesUtil import jigg.util.XMLUtil.RichNode import java.util.Properties import scala.xml._ import scala.collection.mutable.HashMap import scala.collection.mutable.ArrayBuffer class CCGParseAnnotator(override val name: String, override val props: Properties) extends SentencesAnnotator { @Prop(gloss = "Path to the trained model (you can omit this if you load a jar which packs models)") var model = "" @Prop(gloss = "Pruning parameter in supertagging") var beta = 0.001 @Prop(gloss = "Maximum category candidates for each by supertagging, -1 for infinity") var maxK = -1 @Prop(gloss = "Beam size (usually 64 is ok); Recommend to use the same beam size as training. This is automatically done if the model is loaded from a model jar.") var beam = 64 @Prop(gloss = "Outputs k-best derivations if this value > 1") var kBest = 1 @Prop(gloss = "Output connected derivations first even when unconnected trees have higher scores") var preferConnected = false readProps() val taggerParams = SuperTaggerRunner.Params(beta, maxK) val params = ParserRunner.Params(beam, preferConnected, taggerParams) lazy val parserModel = loadModel() lazy val parser = new ParserRunner(parserModel, params) // A hack to prevent loading when -help is given. override def init() = { parserModel parser } def loadModel() = try { model match { case "" => ParserModel.loadFromJar(beam) case path => ParserModel.loadFrom(path) } } catch { case e: Exception => val errorMsg = s"""Failed to start CCG parser. Make sure the model file of CCG is already installed. If not, execute the following command in jigg directory: ./script/download_models.sh """ argumentError("model", errorMsg) } override def newSentenceAnnotation(sentence: Node) = { val sentenceID = (sentence \\ "@id").toString // s12 val tokens = sentence \\ "tokens" val tokenSeq = tokens \\ "token" val posTaggedSentence = SentenceConverter.toTaggedSentence(tokenSeq) val derivs: Seq[(Derivation, Double)] = getDerivations(posTaggedSentence) val point2id = getPoint2id(derivs.unzip._1) def ccgAnnotation(derivID: Int, deriv: Derivation, score: Double): Node = { val ccgID = sentenceID + "_ccg" + derivID // e.g., s12_ccg0 val spans = new ArrayBuffer[Node] def spanID(pointid: Int) = sentenceID + "_sp" + pointid deriv.roots foreach { root => deriv foreachPoint({ point => val pid = point2id(derivID, point) val rule = deriv.get(point).get val ruleSymbol = rule.ruleSymbol match { case "" => None case symbol => Some(Text(symbol)) } val childIDs = rule.childPoint.points map { p => spanID(point2id(derivID, p)) } match { case Seq() => tokenSeq(point.x).attribute("id") case ids => Some(Text(ids.mkString(" "))) } spans += <span id={ spanID(pid) } begin={ point.x.toString } end={ point.y.toString } symbol={ point.category.toString } rule={ ruleSymbol } children={ childIDs } /> }, root) } val rootIDs = deriv.roots.map { p => spanID(point2id(derivID, p)) }.mkString(" ") <ccg annotators={ name } root={ rootIDs } id={ ccgID } score={ score.toString }>{ spans }</ccg> } val ccgs = derivs.zipWithIndex map { case ((deriv, score), i) => ccgAnnotation(i, deriv, score) } sentence addChild ccgs } object SentenceConverter { val dict = parserModel.taggerModel.dict def toTaggedSentence(tokenSeq: NodeSeq) = { val terminalSeq = tokenSeq map { token => val form = dict.getWordOrCreate(token \\ "@form" toString()) val lemma = dict.getWordOrCreate(token \\ "@lemma" toString()) val cForm = token \\ "@cForm" toString() match { case "*" => "_"; case x => x } val posSeq = Seq("@pos", "@pos1", "@pos2", "@pos3") map { token \\ _ toString() } val pos = posSeq.indexOf("*") match { case -1 => posSeq.mkString("-") case idx => posSeq.take(idx).mkString("-") } val combinedPoS = dict.getPoSOrCreate(pos + "/" + cForm) (form, lemma, combinedPoS) } new PoSTaggedSentence(terminalSeq.map(_._1), terminalSeq.map(_._2), terminalSeq.map(_._3)) } } def getDerivations(sentence: PoSTaggedSentence): Seq[(Derivation, Double)] = parser.kBestDerivations(sentence, kBest) def getPoint2id(derivs: Seq[Derivation]): Map[(Int, Point), Int] = { val map = new HashMap[(Int, Point), Int] var i = 0 derivs.zipWithIndex foreach { case (deriv, derivID) => deriv.roots foreach { root => deriv foreachPoint({ point => map += (derivID, point) -> i i += 1 }, root) } } map.toMap } override def requires = Set(JaRequirement.TokenizeWithIPA) override def requirementsSatisfied = Set(JaRequirement.CCGDerivation) }
tomeken-yoshinaga/jigg
src/main/scala/jigg/pipeline/CCGParseAnnotator.scala
Scala
apache-2.0
5,848
package org.cg.scala.dhc.domelments import org.cg.scala.dhc.util.FileInfo /** * Created by ssmertnig on 4/28/17. */ case class TargetExtension(override val file: FileInfo, val extensions: Array[Extension]) extends Extension(file, extensions) { override def getItems(): List[Item] = { val systems = (xml \\ "targetSystems" \\ "targetSystem") val items = systems.flatMap(s => s \\ "targetItems" \\ "item") items.map(i => Item(i)).toList } override def getErrors(): List[String] = { val items = getItems() /* Canonical <-> Target Itemname item/canonicalItemSource Attributename item/attribute/transformationExpression (spel)/spel items */ val canonicalItemAttributeMap = canonicalItemsToMap val itemsWithoutCanonicalReference = items.filter(i => canonicalItemAttributeMap.get(i.canonicalItemSource.getOrElse("")).isEmpty) val itemsWithCanonicalReference = items.toSet -- itemsWithoutCanonicalReference.toSet val attributeNSpelItems = flattenAttributesNSpelItems(itemsWithCanonicalReference) val spelItemsWithParseError = getSpelItemsWithParseError(attributeNSpelItems) val canonicalItemRefsFromSpelExpr = flattenItemAttributeNSpelItemSubStructure(attributeNSpelItems).toList val invalidCanonicalItemRefsFromSpelExpr = canonicalItemRefsFromSpelExpr.filter(i => canonicalItemAttributeMap.get(i.refItemName).isEmpty) val validCanonicalItemRefsFromSpelExpr = canonicalItemRefsFromSpelExpr.toSet -- invalidCanonicalItemRefsFromSpelExpr val scanonicalItemRefsFromSpelExpr = canonicalItemRefsFromSpelExpr.toString() val sinvalidCanonicalItemRefsFromSpelExpr = invalidCanonicalItemRefsFromSpelExpr.toString() val flatAttributeNSpelItems = flattenItemAttributeNSpelItem(attributeNSpelItems) ++ validCanonicalItemRefsFromSpelExpr val spelItemsWithoutRefAttributes = flatAttributeNSpelItems.filter(a => canonicalItemAttributeMap(a.refItemName).get(a.spelIdentifier).isEmpty) (getErrors4ItemsWithoutCanonicalReference(itemsWithoutCanonicalReference) ++ getErrorMsgs4SpelParsingErrors(spelItemsWithParseError) ++ getErrors4MissingRawRefAttribute(spelItemsWithoutRefAttributes) ++ getErrors4InvalidCanonicalItemRefsFromSpelExpr(invalidCanonicalItemRefsFromSpelExpr)) .toList } protected def getSpelItemsWithParseError(attributeNSpelItems: Set[(Item, Attribute, SpelItem)]) = { attributeNSpelItems .filter { case (_, att, spelItem) => spelItem.error.isDefined } .map { case (_, att, spelItem) => (att.name, att.ln, spelItem.error.get) } } protected def flattenAttributesNSpelItems(itemsWithCanonicalReference: Set[Item]) = { for (item <- itemsWithCanonicalReference; attribute <- item.attributes; transformation <- attribute.transformation ) yield (item, attribute, transformation.spelItem) } val SRC_MISSING = "Canonical item source missing" protected def flattenItemAttributeNSpelItem(attributeNSpelItems: Set[(Item, Attribute, SpelItem)]) = { attributeNSpelItems .filter { case (_, _, spelItemCheck) => spelItemCheck.error.isEmpty } .flatMap { case (item, att, spelItem) => spelItem.identifiers .map(spelIdentifier => FlatItemAttributeNSpelItem(item.canonicalItemSource.getOrElse(SRC_MISSING), att.name, att.ln, spelIdentifier)) } } protected def flattenItemAttributeNSpelItemSubStructure(attributeNSpelItems: Set[(Item, Attribute, SpelItem)]) = { attributeNSpelItems .filter { case (_, _, spelItemCheck) => spelItemCheck.itemReferences.isDefined } .flatMap { case (item, att, spelItem) => spelItem.itemReferences.get .map(itemRef => FlatItemAttributeNSpelItem(itemRef.itemName, att.name, att.ln, itemRef.attributeName)) } } protected def getErrors4MissingRawRefAttribute(spelItemsWithoutRefAttributes: Set[FlatItemAttributeNSpelItem]) = { spelItemsWithoutRefAttributes.map(i => s"E02 Target attribute '${i.attributeName}' at ln ${i.ln}: no canonical reference for identifier '${i.spelIdentifier}' found") } private def getErrors4InvalidCanonicalItemRefsFromSpelExpr(invalidCanonicalItemRefsFromSpelExpr: List[FlatItemAttributeNSpelItem]) = invalidCanonicalItemRefsFromSpelExpr.map(i => s"E01 Target item '${i.refItemName}' in spel expression for attribute '${i.attributeName}' at ln ${i.ln}: no canonical item found") }
curiosag/datahubchecker
datahubchecker-utility/src/main/scala/org/cg/scala/dhc/domelments/TargetExtension.scala
Scala
unlicense
4,476
package breeze.linalg /* Copyright 2012 David Hall Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import scala.{specialized=>spec} import breeze.generic._ import breeze.linalg.support._ import breeze.linalg.operators._ import breeze.math._ import breeze.util.{ArrayUtil, Isomorphism} import breeze.storage.Zero import scala.reflect.ClassTag import com.github.fommil.netlib.BLAS.{getInstance => blas} import breeze.macros.expand import scala.math.BigInt import spire.syntax.cfor._ import CanTraverseValues.ValuesVisitor import CanZipAndTraverseValues.PairValuesVisitor import java.io.ObjectStreamException /** * A DenseVector is the "obvious" implementation of a Vector, with one twist. * The underlying data may have more data than the Vector, represented using an offset * into the array (for the 0th element), and a stride that is how far elements are apart * from one another. * * The i'th element is at offset + i * stride * * @author dlwh * * @param data data array * @param offset index of the 0'th element * @param stride separation between elements * @param length number of elements */ @SerialVersionUID(1L) // TODO: scala doesn't propagate this to specialized subclasses. Sigh. class DenseVector[@spec(Double, Int, Float, Long) V](val data: Array[V], val offset: Int, val stride: Int, val length: Int) extends StorageVector[V] with VectorLike[V, DenseVector[V]] with Serializable{ def this(data: Array[V]) = this(data, 0, 1, data.length) def this(data: Array[V], offset: Int) = this(data, offset, 1, data.length) def this(length: Int)(implicit man: ClassTag[V]) = this(new Array[V](length), 0, 1, length) // uncomment to get all the ridiculous places where specialization fails. // if(data.isInstanceOf[Array[Double]] && getClass.getName() == "breeze.linalg.DenseVector") throw new Exception("...") // ensure that operators are all loaded. DenseVector.init() def repr: DenseVector[V] = this def activeSize = length def apply(i: Int): V = { if(i < - size || i >= size) throw new IndexOutOfBoundsException(i + " not in [-"+size+","+size+")") val trueI = if(i<0) i+size else i data(offset + trueI * stride) } def update(i: Int, v: V): Unit = { if(i < - size || i >= size) throw new IndexOutOfBoundsException(i + " not in [-"+size+","+size+")") val trueI = if(i<0) i+size else i data(offset + trueI * stride) = v } private val noOffsetOrStride = offset == 0 && stride == 1 def unsafeUpdate(i: Int, v: V): Unit = if (noOffsetOrStride) data(i) = v else data(offset+i*stride) = v def activeIterator: Iterator[(Int, V)] = iterator def activeValuesIterator: Iterator[V] = valuesIterator def activeKeysIterator: Iterator[Int] = keysIterator override def equals(p1: Any) = p1 match { case y: DenseVector[_] => y.length == length && ArrayUtil.nonstupidEquals(data, offset, stride, length, y.data, y.offset, y.stride, y.length) case _ => super.equals(p1) } override def toString = { valuesIterator.mkString("DenseVector(",", ", ")") } /** * Returns a copy of this DenseVector. stride will always be 1, offset will always be 0. * @return */ def copy: DenseVector[V] = { implicit val man = ClassTag[V](data.getClass.getComponentType.asInstanceOf[Class[V]]) val r = new DenseVector(new Array[V](length)) r := this r } /** * same as apply(i). Gives the value at the underlying offset. * @param i index into the data array * @return apply(i) */ def valueAt(i: Int): V = apply(i) /** * Unsafe version of above, a way to skip the checks. */ def unsafeValueAt(i: Int): V = data(offset + i * stride) /** * Gives the logical index from the physical index. * @param i * @return i */ def indexAt(i: Int): Int = i /** * Always returns true. * * Some storages (namely HashStorage) won't have active * indices packed. This lets you know if the bin is * actively in use. * @param i index into index/data arrays * @return */ def isActive(i: Int): Boolean = true /** * Always returns true. * @return */ def allVisitableIndicesActive: Boolean = true /** * Faster foreach * @param fn * @tparam U */ override def foreach[@spec(Unit) U](fn: (V) => U): Unit = { if (stride == 1) { // ABCE stuff cforRange(offset until (offset + length)) { j => fn(data(j)) } } else { var i = offset cforRange(0 until length) { j => fn(data(i)) i += stride } } } /** * Slices the DenseVector, in the range [start,end] with a stride stride. * @param start * @param end * @param stride */ def slice(start: Int, end: Int, stride: Int=1): DenseVector[V] = { if(start > end || start < 0) throw new IllegalArgumentException("Slice arguments " + start +", " +end +" invalid.") if(end > length || end < 0) throw new IllegalArgumentException("End " + end + "is out of bounds for slice of DenseVector of length " + length) new DenseVector(data, start * this.stride + offset, stride * this.stride, (end-start)/stride) } // <editor-fold defaultstate="collapsed" desc=" Conversions (DenseMatrix, Array, Scala Vector) "> /** Creates a copy of this DenseVector that is represented as a 1 by length DenseMatrix */ def toDenseMatrix: DenseMatrix[V] = { copy.asDenseMatrix } /** Creates a view of this DenseVector that is represented as a 1 by length DenseMatrix */ def asDenseMatrix: DenseMatrix[V] = { new DenseMatrix[V](1, length, data, offset, stride) } override def toArray(implicit cm: ClassTag[V]): Array[V] = if(stride == 1){ ArrayUtil.copyOfRange(data, offset, offset + length) } else { val arr = new Array[V](length) var i = 0 var off = offset while(i < length) { arr(i) = data(off) off += stride i += 1 } arr } /**Returns copy of this [[breeze.linalg.DenseVector]] as a [[scala.Vector]]*/ def toScalaVector()(implicit cm: ClassTag[V]): scala.Vector[V] = this.toArray.toVector // </editor-fold> @throws(classOf[ObjectStreamException]) protected def writeReplace(): Object = { new DenseVector.SerializedForm(data, offset, stride, length) } } object DenseVector extends VectorConstructors[DenseVector] with DenseVector_GenericOps with DenseVectorOps with DenseVector_OrderingOps with DenseVector_SpecialOps { def zeros[@spec(Double, Int, Float, Long) V: ClassTag : Zero](size: Int): DenseVector[V] = { val data = new Array[V](size) if(size != 0 && data(0) != implicitly[Zero[V]].zero) ArrayUtil.fill(data, 0, data.length, implicitly[Zero[V]].zero) new DenseVector(data) } def apply[@spec(Double, Int, Float, Long) V](values: Array[V]): DenseVector[V] = new DenseVector(values) def ones[@spec(Double, Int, Float, Long) V: ClassTag:Semiring](size: Int): DenseVector[V] = fill[V](size, implicitly[Semiring[V]].one) def fill[@spec(Double, Int, Float, Long) V: ClassTag:Semiring](size: Int, v: V): DenseVector[V] = { val r = apply(new Array[V](size)) assert(r.stride == 1) ArrayUtil.fill(r.data, r.offset, r.length, v) r } // concatenation /** * Horizontal concatenation of two or more vectors into one matrix. * @throws IllegalArgumentException if vectors have different sizes */ def horzcat[V: ClassTag:Zero](vectors: DenseVector[V]*): DenseMatrix[V] = { val size = vectors.head.size if (!(vectors forall (_.size == size))) throw new IllegalArgumentException("All vectors must have the same size!") val result = DenseMatrix.zeros[V](size, vectors.size) for ((v, col) <- vectors.zipWithIndex) result(::, col) := v result } /** * Vertical concatenation of two or more column vectors into one large vector. */ def vertcat[V](vectors: DenseVector[V]*)(implicit canSet: OpSet.InPlaceImpl2[DenseVector[V], DenseVector[V]], vman: ClassTag[V], zero: Zero[V]): DenseVector[V] = { val size = vectors.foldLeft(0)(_ + _.size) val result = zeros[V](size) var offset = 0 for (v <- vectors) { result.slice(offset, offset + v.size) := v offset += v.size } result } // capabilities implicit def canCreateZerosLike[V:ClassTag:Zero]:CanCreateZerosLike[DenseVector[V], DenseVector[V]] = new CanCreateZerosLike[DenseVector[V], DenseVector[V]] { def apply(v1: DenseVector[V]): DenseVector[V] = { zeros[V](v1.length) } } implicit def canCopyDenseVector[V:ClassTag]: CanCopy[DenseVector[V]] = { new CanCopy[DenseVector[V]] { def apply(v1: DenseVector[V]): DenseVector[V] = { v1.copy } } } implicit def negFromScale[V](implicit scale: OpMulScalar.Impl2[DenseVector[V], V, DenseVector[V]], field: Ring[V]) = { new OpNeg.Impl[DenseVector[V], DenseVector[V]] { override def apply(a : DenseVector[V]): DenseVector[V] = { scale(a, field.negate(field.one)) } } } implicit def canMapValues[@specialized(Int, Float, Double) V, @specialized(Int, Float, Double) V2](implicit man: ClassTag[V2]): CanMapValues[DenseVector[V], V, V2, DenseVector[V2]] = { new CanMapValues[DenseVector[V], V, V2, DenseVector[V2]] { /**Maps all key-value pairs from the given collection. */ def map(from: DenseVector[V], fn: (V) => V2): DenseVector[V2] = { // this is slow // DenseVector.tabulate(from.length)(i => fn(from(i))) val arr = new Array[V2](from.length) val d = from.data val stride = from.stride val off = from.offset // https://wikis.oracle.com/display/HotSpotInternals/RangeCheckElimination if (stride == 1) { if (off == 0) { cforRange(0 until arr.length) { j => arr(j) = fn(d(j)) } } else { cforRange(0 until arr.length) { j => arr(j) = fn(d(j + off)) } } } else { var i = 0 var j = off while(i < arr.length) { arr(i) = fn(d(j)) i += 1 j += stride } } new DenseVector[V2](arr) } /**Maps all active key-value pairs from the given collection. */ def mapActive(from: DenseVector[V], fn: (V) => V2): DenseVector[V2] = { map(from, fn) } } } implicit def handholdCMV[T]= new CanMapValues.HandHold[DenseVector[T], T] implicit def canIterateValues[V]: CanTraverseValues[DenseVector[V], V] = new CanTraverseValues[DenseVector[V], V] { def isTraversableAgain(from: DenseVector[V]): Boolean = true /** Iterates all key-value pairs from the given collection. */ def traverse(from: DenseVector[V], fn: ValuesVisitor[V]): Unit = { fn.visitArray(from.data, from.offset, from.length, from.stride) } } implicit def canTraverseZipValues[V,W]: CanZipAndTraverseValues[DenseVector[V], DenseVector[W], V,W] = new CanZipAndTraverseValues[DenseVector[V], DenseVector[W], V,W] { /** Iterates all key-value pairs from the given collection. */ def traverse(from1: DenseVector[V], from2: DenseVector[W], fn: PairValuesVisitor[V,W]): Unit = { if (from1.size != from2.size) { throw new IllegalArgumentException("Vectors to be zipped must have same size") } cfor(0)(i => i < from1.size, i => i+1)(i => { fn.visit(from1.unsafeValueAt(i), from2.unsafeValueAt(i)) }) } } implicit def canTraverseKeyValuePairs[V]: CanTraverseKeyValuePairs[DenseVector[V], Int, V] = new CanTraverseKeyValuePairs[DenseVector[V], Int, V] { def isTraversableAgain(from: DenseVector[V]): Boolean = true /** Iterates all key-value pairs from the given collection. */ def traverse(from: DenseVector[V], fn: CanTraverseKeyValuePairs.KeyValuePairsVisitor[Int, V]): Unit = { import from._ fn.visitArray((ind: Int)=> (ind - offset)/stride, data, offset, length, stride) } } implicit def canTransformValues[@specialized(Int, Float, Double) V]: CanTransformValues[DenseVector[V], V] = new CanTransformValues[DenseVector[V], V] { def transform(from: DenseVector[V], fn: (V) => V) { val d = from.data val length = from.length val stride = from.stride val offset = from.offset if (stride == 1) { cforRange(offset until offset + length) { j => d(j) = fn(d(j)) } } else { val end = offset + stride * length var j = offset while (j != end) { d(j) = fn(d(j)) j += stride } } } def transformActive(from: DenseVector[V], fn: (V) => V) { transform(from, fn) } } implicit def canMapPairs[V, V2](implicit man: ClassTag[V2]):CanMapKeyValuePairs[DenseVector[V], Int, V, V2, DenseVector[V2]] = new CanMapKeyValuePairs[DenseVector[V], Int, V, V2, DenseVector[V2]] { /**Maps all key-value pairs from the given collection. */ def map(from: DenseVector[V], fn: (Int, V) => V2): DenseVector[V2] = { // slow: DenseVector.tabulate(from.length)(i => fn(i, from(i))) val arr = new Array[V2](from.length) val d = from.data val stride = from.stride var i = 0 var j = from.offset while(i < arr.length) { arr(i) = fn(i, d(j)) i += 1 j += stride } new DenseVector[V2](arr) } /**Maps all active key-value pairs from the given collection. */ def mapActive(from: DenseVector[V], fn: (Int, V) => V2): DenseVector[V2] = { map(from, fn) } } // slicing implicit def canSlice[V]: CanSlice[DenseVector[V], Range, DenseVector[V]] = __canSlice.asInstanceOf[CanSlice[DenseVector[V], Range, DenseVector[V]]] private val __canSlice: CanSlice[DenseVector[Any], Range, DenseVector[Any]] = { new CanSlice[DenseVector[Any], Range, DenseVector[Any]] { def apply(v: DenseVector[Any], re: Range): DenseVector[Any] = { val r: Range = re.getRangeWithoutNegativeIndexes( v.length ) require(r.isEmpty || r.last < v.length, s"Processed slice range must be empty (=${r.isEmpty}) " + s"or the last entry (=${r.last}) must be less than target dimension length v.length (=${v.length})") require(r.isEmpty || r.start >= 0, s"Processed slice range must be empty (=${r.isEmpty}) " + s"or start (=${r.start}) must be >=0") new DenseVector(v.data, offset = v.offset + v.stride * r.start, stride = v.stride * r.step, length = r.length) } } } // implicit def canSliceExtender[V]: CanSlice[DenseVector[V], RangeExtender, DenseVector[V]] = __canSliceExtender.asInstanceOf[CanSlice[DenseVector[V], RangeExtender, DenseVector[V]]] // // private val __canSliceExtender = { // new CanSlice[DenseVector[Any], RangeExtender, DenseVector[Any]] { // def apply(v: DenseVector[Any], re: RangeExtender) = { // canSlice(v, re.getRange(v.length) ) // } // } // } implicit def canTransposeComplex: CanTranspose[DenseVector[Complex], DenseMatrix[Complex]] = { new CanTranspose[DenseVector[Complex], DenseMatrix[Complex]] { def apply(from: DenseVector[Complex]): DenseMatrix[Complex] = { new DenseMatrix(data = from.data map { _.conjugate }, offset = from.offset, cols = from.length, rows = 1, majorStride = from.stride) } } } // There's a bizarre error specializing float's here. class CanZipMapValuesDenseVector[@spec(Double, Int, Float, Long) V, @spec(Int, Double) RV:ClassTag] extends CanZipMapValues[DenseVector[V],V,RV,DenseVector[RV]] { def create(length : Int) = new DenseVector(new Array[RV](length)) /**Maps all corresponding values from the two collection. */ def map(from: DenseVector[V], from2: DenseVector[V], fn: (V, V) => RV): DenseVector[RV] = { require(from.length == from2.length, s"Vectors must have same length: ${from.length} != ${from2.length}") val result = create(from.length) var i = 0 while (i < from.length) { result.data(i) = fn(from(i), from2(i)) i += 1 } result } } implicit def zipMap[V, R:ClassTag]: CanZipMapValuesDenseVector[V, R] = new CanZipMapValuesDenseVector[V, R] implicit val zipMap_d: CanZipMapValuesDenseVector[Double, Double] = new CanZipMapValuesDenseVector[Double, Double] implicit val zipMap_f: CanZipMapValuesDenseVector[Float, Float] = new CanZipMapValuesDenseVector[Float, Float] implicit val zipMap_i: CanZipMapValuesDenseVector[Int, Int] = new CanZipMapValuesDenseVector[Int, Int] class CanZipMapKeyValuesDenseVector[@spec(Double, Int, Float, Long) V, @spec(Int, Double) RV:ClassTag] extends CanZipMapKeyValues[DenseVector[V],Int, V,RV,DenseVector[RV]] { def create(length : Int) = new DenseVector(new Array[RV](length)) /**Maps all corresponding values from the two collection. */ def map(from: DenseVector[V], from2: DenseVector[V], fn: (Int, V, V) => RV): DenseVector[RV] = { require(from.length == from2.length, "Vector lengths must match!") val result = create(from.length) var i = 0 while (i < from.length) { result.data(i) = fn(i, from(i), from2(i)) i += 1 } result } override def mapActive(from: DenseVector[V], from2: DenseVector[V], fn: ((Int), V, V) => RV): DenseVector[RV] = { map(from, from2, fn) } } implicit def zipMapKV[V, R:ClassTag]: CanZipMapKeyValuesDenseVector[V, R] = new CanZipMapKeyValuesDenseVector[V, R] implicit val canAddIntoD: OpAdd.InPlaceImpl2[DenseVector[Double], DenseVector[Double]] = { new OpAdd.InPlaceImpl2[DenseVector[Double], DenseVector[Double]] { def apply(a: DenseVector[Double], b: DenseVector[Double]) = { require(a.length == b.length, s"Vectors must have same length: ${a.length} != ${b.length}") // negative strides want the offset to be the *last* logical element, not the first. so weird. val boff = if (b.stride >= 0) b.offset else (b.offset + b.stride * (b.length - 1)) val aoff = if (a.stride >= 0) a.offset else (a.offset + a.stride * (a.length - 1)) blas.daxpy( a.length, 1.0, b.data, boff, b.stride, a.data, aoff, a.stride) } implicitly[BinaryUpdateRegistry[Vector[Double], Vector[Double], OpAdd.type]].register(this) } } implicit object canDaxpy extends scaleAdd.InPlaceImpl3[DenseVector[Double], Double, DenseVector[Double]] with Serializable { def apply(y: DenseVector[Double], a: Double, x: DenseVector[Double]) { require(x.length == y.length, s"Vectors must have same length: ${x.length} != ${y.length}") val xoff = if (x.stride >= 0) x.offset else (x.offset + x.stride * (x.length - 1)) val yoff = if (y.stride >= 0) y.offset else (y.offset + y.stride * (y.length - 1)) blas.daxpy( x.length, a, x.data, xoff, x.stride, y.data, yoff, y.stride) } } implicitly[TernaryUpdateRegistry[Vector[Double], Double, Vector[Double], scaleAdd.type]].register(canDaxpy) implicit val canAddD: OpAdd.Impl2[DenseVector[Double], DenseVector[Double], DenseVector[Double]] = { pureFromUpdate_Double(canAddIntoD) } implicitly[BinaryRegistry[Vector[Double], Vector[Double], OpAdd.type, Vector[Double]]].register(canAddD) implicit val canSubIntoD: OpSub.InPlaceImpl2[DenseVector[Double], DenseVector[Double]] = { new OpSub.InPlaceImpl2[DenseVector[Double], DenseVector[Double]] { def apply(a: DenseVector[Double], b: DenseVector[Double]) = { require(a.length == b.length, s"Vectors must have same length: ${a.length} != ${b.length}") val boff = if (b.stride >= 0) b.offset else (b.offset + b.stride * (b.length - 1)) val aoff = if (a.stride >= 0) a.offset else (a.offset + a.stride * (a.length - 1)) blas.daxpy( a.length, -1.0, b.data, boff, b.stride, a.data, aoff, a.stride) } implicitly[BinaryUpdateRegistry[Vector[Double], Vector[Double], OpSub.type]].register(this) } } implicit val canSubD: OpSub.Impl2[DenseVector[Double], DenseVector[Double], DenseVector[Double]] = { pureFromUpdate_Double(canSubIntoD) } implicitly[BinaryRegistry[Vector[Double], Vector[Double], OpSub.type, Vector[Double]]].register(canSubD) implicit object canDotD extends OpMulInner.Impl2[DenseVector[Double], DenseVector[Double], Double] { def apply(a: DenseVector[Double], b: DenseVector[Double]) = { require(a.length == b.length, s"Vectors must have same length: ${a.length} != ${b.length}") if (a.length < 200) { // benchmarks suggest breakeven point is around length 200 if (a.offset == 0 && b.offset == 0 && a.stride == 1 && b.stride == 1) { DenseVectorSupportMethods.smallDotProduct_Double(a.data, b.data, a.length); // val ad = a.data // val bd = b.data // var sum = 0.0 // cforRange(0 until a.length) { i => // sum += ad(i) * bd(i) // } // sum } else { var sum = 0.0 cforRange(0 until a.length) { i => sum += a(i) * b(i) } sum } } else { val boff = if (b.stride >= 0) b.offset else (b.offset + b.stride * (b.length - 1)) val aoff = if (a.stride >= 0) a.offset else (a.offset + a.stride * (a.length - 1)) blas.ddot( a.length, b.data, boff, b.stride, a.data, aoff, a.stride) } } } implicitly[BinaryRegistry[Vector[Double], Vector[Double], OpMulInner.type, Double]].register(canDotD) implicit val canScaleIntoD: OpMulScalar.InPlaceImpl2[DenseVector[Double], Double] = { new OpMulScalar.InPlaceImpl2[DenseVector[Double], Double] { def apply(a: DenseVector[Double], b: Double) = { // in stark contrast to the above, this works the way you expect w.r.t. negative strides. // Fuck BLAS blas.dscal( a.length, b, a.data, a.offset, a.stride) } implicitly[BinaryUpdateRegistry[Vector[Double], Double, OpMulScalar.type]].register(this) } } implicit val canScaleD: OpMulScalar.Impl2[DenseVector[Double], Double, DenseVector[Double]] = { binaryOpFromUpdateOp(implicitly[CanCopy[DenseVector[Double]]], canScaleIntoD, implicitly[ClassTag[Double]]) } implicitly[BinaryRegistry[Vector[Double], Double, OpMulScalar.type, Vector[Double]]].register(canScaleD) implicit val canSetD: OpSet.InPlaceImpl2[DenseVector[Double], DenseVector[Double]] = new OpSet.InPlaceImpl2[DenseVector[Double], DenseVector[Double]] { def apply(a: DenseVector[Double], b: DenseVector[Double]) { require(a.length == b.length, s"Vectors must have same length: ${a.length} != ${b.length}") val boff = if (b.stride >= 0) b.offset else (b.offset + b.stride * (b.length - 1)) val aoff = if (a.stride >= 0) a.offset else (a.offset + a.stride * (a.length - 1)) blas.dcopy( a.length, b.data, boff, b.stride, a.data, aoff, a.stride) } implicitly[BinaryUpdateRegistry[Vector[Double], Vector[Double], OpSet.type]].register(this) } /* TODO: scaladoc crashes on this. I don't know why. It makes me want to die a little. Returns the k-norm of this Vector. */ @expand @expand.valify implicit def canNorm[@expand.args(Int, Float, Long, BigInt, Complex) T]: norm.Impl2[DenseVector[T], Double, Double] = { new norm.Impl2[DenseVector[T], Double, Double] { def apply(v: DenseVector[T], n: Double): Double = { import v._ if (n == 1) { var sum = 0.0 foreach (v => sum += v.abs.toDouble ) sum } else if (n == 2) { var sum = 0.0 foreach (v => { val nn = v.abs.toDouble; sum += nn * nn }) math.sqrt(sum) } else if (n == Double.PositiveInfinity) { var max = 0.0 foreach (v => { val nn = v.abs.toDouble; if (nn > max) max = nn }) max } else { var sum = 0.0 foreach (v => { val nn = v.abs.toDouble; sum += math.pow(nn,n) }) math.pow(sum, 1.0 / n) } } } } /** * Returns the p-norm of this Vector (specialized for Double). */ implicit def canNorm_Double: norm.Impl2[DenseVector[Double], Double, Double] = { new norm.Impl2[DenseVector[Double], Double, Double] { def apply(v: DenseVector[Double], p: Double): Double = { if (p == 2) { var sq = 0.0 v.foreach (x => sq += x * x) math.sqrt(sq) } else if (p == 1) { var sum = 0.0 v.foreach (x => sum += math.abs(x)) sum } else if (p == Double.PositiveInfinity) { var max = 0.0 v.foreach (x => max = math.max(max, math.abs(x))) max } else if (p == 0) { var nnz = 0 v.foreach (x => if (x != 0) nnz += 1) nnz } else { var sum = 0.0 v.foreach (x => sum += math.pow(math.abs(x), p)) math.pow(sum, 1.0 / p) } } } } implicit def canDim[E]: dim.Impl[DenseVector[E],Int] = new dim.Impl[DenseVector[E],Int] { def apply(v: DenseVector[E]): Int = v.length } // this produces bad spaces for builtins (inefficient because of bad implicit lookup) implicit def space[E](implicit field: Field[E], man: ClassTag[E]): MutableFiniteCoordinateField[DenseVector[E],Int,E] = { import field._ implicit val cmv = canMapValues[E,E] MutableFiniteCoordinateField.make[DenseVector[E],Int,E] } implicit val space_Double: MutableFiniteCoordinateField[DenseVector[Double], Int, Double] = { MutableFiniteCoordinateField.make[DenseVector[Double],Int,Double] } implicit val space_Float: MutableFiniteCoordinateField[DenseVector[Float], Int, Float] = { MutableFiniteCoordinateField.make[DenseVector[Float],Int,Float] } implicit val space_Int: MutableFiniteCoordinateField[DenseVector[Int], Int, Int] = { MutableFiniteCoordinateField.make[DenseVector[Int],Int,Int] } implicit val space_Long: MutableFiniteCoordinateField[DenseVector[Long], Int, Long] = { MutableFiniteCoordinateField.make[DenseVector[Long],Int,Long] } object TupleIsomorphisms { implicit object doubleIsVector extends Isomorphism[Double,DenseVector[Double]] { def forward(t: Double) = DenseVector(t) def backward(t: DenseVector[Double]) = { assert(t.size == 1); t(0)} } implicit object pdoubleIsVector extends Isomorphism[(Double,Double),DenseVector[Double]] { def forward(t: (Double,Double)) = DenseVector(t._1,t._2) def backward(t: DenseVector[Double]) = { assert(t.size == 2); (t(0),t(1))} } } /** * This class exists because @specialized instances don't respect the serial * @param data * @param offset * @param stride * @param length */ @SerialVersionUID(1L) case class SerializedForm(data: Array[_], offset: Int, stride: Int, length: Int) extends Serializable { @throws(classOf[ObjectStreamException]) def readResolve():Object = { data match {//switch to make specialized happy case x: Array[Int] => new DenseVector(x, offset, stride, length) case x: Array[Long] => new DenseVector(x, offset, stride, length) case x: Array[Double] => new DenseVector(x, offset, stride, length) case x: Array[Float] => new DenseVector(x, offset, stride, length) case x: Array[Short] => new DenseVector(x, offset, stride, length) case x: Array[Byte] => new DenseVector(x, offset, stride, length) case x: Array[Char] => new DenseVector(x, offset, stride, length) case x: Array[_] => new DenseVector(x, offset, stride, length) } } } // used to make sure the operators are loaded @noinline private def init() = {} }
calippo/breeze
math/src/main/scala/breeze/linalg/DenseVector.scala
Scala
apache-2.0
28,908
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.batch.sql import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.table.api.Types import org.apache.flink.table.api.config.ExecutionConfigOptions import org.apache.flink.table.plan.stats.TableStats import org.apache.flink.table.planner.plan.rules.physical.batch.BatchExecSortMergeJoinRule import org.apache.flink.table.planner.plan.rules.physical.batch.BatchPhysicalSortRule.TABLE_EXEC_RANGE_SORT_ENABLED import org.apache.flink.table.planner.plan.stats.FlinkStatistic import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedTableFunctions.StringSplit import org.apache.flink.table.planner.utils.{TableFunc1, TableTestBase} import com.google.common.collect.ImmutableSet import org.junit.{Before, Test} class RemoveCollationTest extends TableTestBase { private val util = batchTestUtil() @Before def setup(): Unit = { util.addTableSource("x", Array[TypeInformation[_]](Types.INT, Types.LONG, Types.STRING), Array("a", "b", "c"), FlinkStatistic.builder().tableStats(new TableStats(100L)).build() ) util.addTableSource("y", Array[TypeInformation[_]](Types.INT, Types.LONG, Types.STRING), Array("d", "e", "f"), FlinkStatistic.builder().tableStats(new TableStats(100L)).build() ) util.addTableSource("t1", Array[TypeInformation[_]](Types.INT, Types.LONG, Types.STRING), Array("a1", "b1", "c1"), FlinkStatistic.builder().tableStats(new TableStats(100L)).build() ) util.addTableSource("t2", Array[TypeInformation[_]](Types.INT, Types.LONG, Types.STRING), Array("d1", "e1", "f1"), FlinkStatistic.builder().tableStats(new TableStats(100L)).build() ) util.tableEnv.getConfig.getConfiguration.setBoolean( BatchExecSortMergeJoinRule.TABLE_OPTIMIZER_SMJ_REMOVE_SORT_ENABLED, true) } @Test def testRemoveCollation_OverWindowAgg(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "NestedLoopJoin,SortMergeJoin,HashAgg") val sqlQuery = """ | SELECT | SUM(b) sum_b, | AVG(SUM(b)) OVER (PARTITION BY a order by a) avg_b, | RANK() OVER (PARTITION BY a ORDER BY a) rn | FROM x | GROUP BY a """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Aggregate(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") val sqlQuery = """ |WITH r AS (SELECT * FROM x, y WHERE a = d AND c LIKE 'He%') |SELECT sum(b) FROM r group by a """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Aggregate_1(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") val sqlQuery = """ |WITH r AS (SELECT * FROM x, y WHERE a = d AND c LIKE 'He%') |SELECT sum(b) FROM r group by d """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Sort(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) val sqlQuery = """ |WITH r AS (SELECT a, b, COUNT(c) AS cnt FROM x GROUP BY a, b) |SELECT * FROM r ORDER BY a """.stripMargin // exec node does not support range sort yet, so we verify rel plan here util.verifyRelPlan(sqlQuery) } @Test def testRemoveCollation_Aggregate_3(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg") util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) val sqlQuery = """ |WITH r AS (SELECT * FROM x ORDER BY a, b) |SELECT a, b, COUNT(c) AS cnt FROM r GROUP BY a, b """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Rank_1(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg") val sqlQuery = """ |SELECT a, SUM(b) FROM ( | SELECT * FROM ( | SELECT a, b, RANK() OVER(PARTITION BY a ORDER BY b) rk FROM x) | WHERE rk <= 10 |) GROUP BY a """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Rank_2(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg") val sqlQuery = """ |SELECT a, b, MAX(c) FROM ( | SELECT * FROM ( | SELECT a, b, c, RANK() OVER(PARTITION BY a ORDER BY b) rk FROM x) | WHERE rk <= 10 |) GROUP BY a, b """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Rank_3(): Unit = { // TODO remove local rank for single distribution input val sqlQuery = """ |SELECT * FROM ( | SELECT a, b, c, RANK() OVER(PARTITION BY a ORDER BY b) rk FROM ( | SELECT a, b, c FROM x ORDER BY a, b | ) |) WHERE rk <= 10 """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Rank_4(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg") val sqlQuery = """ |SELECT * FROM ( | SELECT a, c, RANK() OVER(PARTITION BY a ORDER BY a) rk FROM ( | SELECT a, COUNT(c) AS c FROM x GROUP BY a | ) |) WHERE rk <= 10 """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Rank_Singleton(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashAgg") val sqlQuery = """ |SELECT COUNT(a), SUM(b) FROM ( | SELECT * FROM ( | SELECT a, b, RANK() OVER(ORDER BY b) rk FROM x) | WHERE rk <= 10 |) """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_MultipleSortMergeJoins1(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") val sql = """ |select * from | x join y on a = d | join t1 on a = a1 | left outer join t2 on a = d1 """.stripMargin util.verifyExecPlan(sql) } @Test def testRemoveCollation_MultipleSortMergeJoins_MultiJoinKeys1(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") val sql = """ |select * from | x join y on a = d and b = e | join t1 on a = a1 and b = b1 | left outer join t2 on a = d1 and b = e1 """.stripMargin util.verifyExecPlan(sql) } @Test def testRemoveCollation_MultipleSortMergeJoins2(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") val sql = """ |select * from | x join y on a = d | join t1 on d = a1 | left outer join t2 on a1 = d1 """.stripMargin util.verifyExecPlan(sql) } @Test def testRemoveCollation_MultipleSortMergeJoins_MultiJoinKeys2(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") val sql = """ |select * from | x join y on a = d and b = e | join t1 on d = a1 and e = b1 | left outer join t2 on a1 = d1 and b1 = e1 """.stripMargin util.verifyExecPlan(sql) } @Test def testRemoveCollation_MultipleSortMergeJoins3(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin") util.addTableSource("tb1", Array[TypeInformation[_]]( Types.STRING, Types.STRING, Types.STRING, Types.STRING, Types.STRING), Array("id", "key", "tb2_ids", "tb3_ids", "name"), FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("id"))).build() ) util.addTableSource("tb2", Array[TypeInformation[_]](Types.STRING, Types.STRING), Array("id", "name"), FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("id"))).build() ) util.addTableSource("tb3", Array[TypeInformation[_]](Types.STRING, Types.STRING), Array("id", "name"), FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("id"))).build() ) util.addTableSource("tb4", Array[TypeInformation[_]](Types.STRING, Types.STRING), Array("id", "name"), FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("id"))).build() ) util.addTableSource("tb5", Array[TypeInformation[_]](Types.STRING, Types.STRING), Array("id", "name"), FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("id"))).build() ) util.addFunction("split", new StringSplit()) val sql = """ |with v1 as ( | select id, tb2_id from tb1, LATERAL TABLE(split(tb2_ids)) AS T(tb2_id) |), |v2 as ( | select id, tb3_id from tb1, LATERAL TABLE(split(tb3_ids)) AS T(tb3_id) |), | |join_tb2 as ( | select tb1_id, LISTAGG(tb2_name, ',') as tb2_names | from ( | select v1.id as tb1_id, tb2.name as tb2_name | from v1 left outer join tb2 on tb2_id = tb2.id | ) group by tb1_id |), | |join_tb3 as ( | select tb1_id, LISTAGG(tb3_name, ',') as tb3_names | from ( | select v2.id as tb1_id, tb3.name as tb3_name | from v2 left outer join tb3 on tb3_id = tb3.id | ) group by tb1_id |) | |select | tb1.id, | tb1.tb2_ids, | tb1.tb3_ids, | tb1.name, | tb2_names, | tb3_names, | tb4.name, | tb5.name | from tb1 | left outer join join_tb2 on tb1.id = join_tb2.tb1_id | left outer join join_tb3 on tb1.id = join_tb3.tb1_id | left outer join tb4 on tb1.key = tb4.id | left outer join tb5 on tb1.key = tb5.id """.stripMargin util.verifyExecPlan(sql) } @Test def testRemoveCollation_Correlate1(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin,HashAgg") util.addFunction("split", new TableFunc1) val sqlQuery = """ |WITH r AS (SELECT f, count(f) as cnt FROM y GROUP BY f), | v as (SELECT f1, f, cnt FROM r, LATERAL TABLE(split(f)) AS T(f1)) |SELECT * FROM x, v WHERE c = f """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Correlate2(): Unit = { util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin,HashAgg") util.addFunction("split", new TableFunc1) val sqlQuery = """ |WITH r AS (SELECT f, count(f) as cnt FROM y GROUP BY f), | v as (SELECT f, f1 FROM r, LATERAL TABLE(split(f)) AS T(f1)) |SELECT * FROM x, v WHERE c = f AND f LIKE '%llo%' """.stripMargin util.verifyExecPlan(sqlQuery) } @Test def testRemoveCollation_Correlate3(): Unit = { // do not remove shuffle util.tableEnv.getConfig.getConfiguration.setString( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin,NestedLoopJoin,HashAgg") util.addFunction("split", new TableFunc1) val sqlQuery = """ |WITH r AS (SELECT f, count(f) as cnt FROM y GROUP BY f), | v as (SELECT f1 FROM r, LATERAL TABLE(split(f)) AS T(f1)) |SELECT * FROM x, v WHERE c = f1 """.stripMargin util.verifyExecPlan(sqlQuery) } }
aljoscha/flink
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/RemoveCollationTest.scala
Scala
apache-2.0
13,244
/** * Copyright (c) 2002-2012 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.internal.pipes.matching import scala.collection.JavaConverters._ import org.neo4j.graphdb.{DynamicRelationshipType, Direction, Node} import collection.{Iterable, Traversable} import collection.Map import org.neo4j.cypher.internal.commands.Predicate import org.neo4j.helpers.ThisShouldNotHappenError import org.neo4j.cypher.internal.pipes.ExecutionContext /* This class performs simpler join operations, but faster than the full matcher. By linking together a number of these joiners, the whole pattern can be matched. Joiners can't handle loops or optional elements. Right now, they can't handle variable length relationships, but there's nothing stopping that, in theory. */ class Joiner(source: Linkable, start: String, dir: Direction, end: String, relType: Seq[String], relName: String, predicate: Predicate) extends Linkable { def getResult(m: ExecutionContext): Traversable[ExecutionContext] = source.getResult(m).flatMap(getSingleResult) def getSingleResult(m: ExecutionContext): Iterable[ExecutionContext] = { val startNode = m.get(start) match { case Some(x: Node) => x case _ => throw new ThisShouldNotHappenError("Andres Taylor", "The start node has to come from the underlying pipe!") } val rels = if (relType.isEmpty) startNode.getRelationships(dir).asScala else startNode.getRelationships(dir, relType.map(t => DynamicRelationshipType.withName(t)):_*) .asScala val between = rels.flatMap(rel => { val otherNode = rel.getOtherNode(startNode) val otherAlreadyFound = m.exists { case (key, value) => key != start && value == otherNode } if (otherAlreadyFound) { None } else { val product = Map(relName -> rel, end -> otherNode) Some(m.newWith(product)) } }) between.filter(predicate.isMatch) } def providesKeys(): Seq[String] = source.providesKeys() ++ Seq(relName, end) } class Start(val providesKeys: Seq[String]) extends Linkable { def getResult(m: ExecutionContext): Traversable[ExecutionContext] = Seq(m) } trait Linkable { def getResult(m: ExecutionContext): Traversable[ExecutionContext] def providesKeys(): Seq[String] }
dksaputra/community
cypher/src/main/scala/org/neo4j/cypher/internal/pipes/matching/Joiner.scala
Scala
gpl-3.0
3,095
/* __ *\ ** ________ ___ / / ___ __ ____ Scala.js Test Suite ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ ** ** /____/\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \* */ package org.scalajs.testsuite.compiler import org.scalajs.jasminetest.JasmineTest import scala.scalajs.js object BooleanTest extends JasmineTest { describe("Boolean primitives") { it("&, | and ^ on booleans should return booleans") { expect(js.typeOf(true & false)).toEqual("boolean") expect(js.typeOf(true | false)).toEqual("boolean") expect(js.typeOf(true ^ false)).toEqual("boolean") } it("&, | and ^ on booleans should return correct results") { expect(false & false).toBeFalsy expect(false & true).toBeFalsy expect(true & false).toBeFalsy expect(true & true).toBeTruthy expect(false | false).toBeFalsy expect(true | false).toBeTruthy expect(false | true).toBeTruthy expect(true | true).toBeTruthy expect(false ^ false).toBeFalsy expect(true ^ false).toBeTruthy expect(false ^ true).toBeTruthy expect(true ^ true).toBeFalsy } } }
colinrgodsey/scala-js
test-suite/src/test/scala/org/scalajs/testsuite/compiler/BooleanTest.scala
Scala
bsd-3-clause
1,477
/* * Copyright 2015 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.circuitbreaker import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.{Matchers, WordSpecLike} import scala.concurrent.Future class WithCircuitBreakerSpec extends WordSpecLike with Matchers with Eventually with ScalaFutures { private def returnOk = Future.successful(true) "WithCircuitBreaker" should { "return the function result when no exception is thrown" in new UsingCircuitBreaker { lazy override val circuitBreakerName = "someServiceCircuitBreaker" lazy override val numberOfCallsToTriggerStateChange: Option[Int] = None lazy override val unhealthyServiceUnavailableDuration: Option[Long] = None lazy override val turbulencePeriodDuration: Option[Long] = None whenReady(withCircuitBreaker[Boolean](returnOk)) { actualResult => actualResult shouldBe true } } def throwException = throw new Exception("some exception") "return a circuit breaker exception when the function throws an exception" in new UsingCircuitBreaker { lazy override val circuitBreakerName = "test_2" lazy override val numberOfCallsToTriggerStateChange: Option[Int] = None lazy override val unhealthyServiceUnavailableDuration: Option[Long] = None lazy override val turbulencePeriodDuration: Option[Long] = None intercept[Exception] { withCircuitBreaker[Boolean](throwException) } intercept[Exception] { withCircuitBreaker[Boolean](throwException) } intercept[Exception] { withCircuitBreaker[Boolean](throwException) } Repository.circuitBreaker(circuitBreakerName).currentState.name shouldBe "HEALTHY" intercept[Exception] { withCircuitBreaker[Boolean](throwException) } Repository.circuitBreaker(circuitBreakerName).currentState.name shouldBe "UNHEALTHY" intercept[UnhealthyServiceException] { withCircuitBreaker[Boolean](throwException) }.getMessage shouldBe "test_2" Repository.circuitBreaker(circuitBreakerName).currentState.name shouldBe "UNHEALTHY" } "return a false canServiceBeInvoked when in an unhealthy state" in new UsingCircuitBreaker { lazy override val circuitBreakerName = "test_3" lazy override val numberOfCallsToTriggerStateChange: Option[Int] = Some(1) lazy override val unhealthyServiceUnavailableDuration: Option[Long] = Some(20) lazy override val turbulencePeriodDuration: Option[Long] = None intercept[Exception] { withCircuitBreaker[Boolean](throwException) } Repository.circuitBreaker(circuitBreakerName).currentState.name shouldBe "UNHEALTHY" canServiceBeInvoked shouldBe false } "return a true canServiceBeInvoked when in a healthy state" in new UsingCircuitBreaker { lazy override val circuitBreakerName = "test_4" lazy override val numberOfCallsToTriggerStateChange: Option[Int] = None lazy override val unhealthyServiceUnavailableDuration: Option[Long] = None lazy override val turbulencePeriodDuration: Option[Long] = None Repository.circuitBreaker(circuitBreakerName).currentState.name shouldBe "HEALTHY" canServiceBeInvoked shouldBe true } } }
hugocf/reactive-circuit-breaker
src/test/scala/uk/gov/hmrc/circuitbreaker/WithCircuitBreakerSpec.scala
Scala
apache-2.0
3,856
package org.jetbrains.plugins.scala.lang.refactoring.util import com.intellij.openapi.util.TextRange import com.intellij.psi._ import com.intellij.psi.util.PsiTreeUtil import org.jetbrains.plugins.scala.ScalaBundle import org.jetbrains.plugins.scala.extensions.ResolvesTo import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, ScCaseClause} import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScEnumerator, ScGenerator} import org.jetbrains.plugins.scala.lang.psi.api.statements._ import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter} import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScMember, ScTypeDefinition} import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil._ import scala.collection.mutable.ArrayBuffer /** * User: Alexander Podkhalyuzin * Date: 24.06.2008 */ object ScalaVariableValidator { def empty = new ScalaVariableValidator(null, true, null, null) def apply(file: PsiFile, element: PsiElement, occurrences: Array[TextRange]): ScalaVariableValidator = { val container = enclosingContainer(commonParent(file, occurrences: _*)) val containerOne = enclosingContainer(element) new ScalaVariableValidator(element, occurrences.isEmpty, container, containerOne) } } class ScalaVariableValidator(selectedElement: PsiElement, noOccurrences: Boolean, enclosingContainerAll: PsiElement, enclosingOne: PsiElement) extends ScalaValidator(selectedElement, noOccurrences, enclosingContainerAll, enclosingOne) { protected override def findConflictsImpl(name: String, allOcc: Boolean): Seq[(PsiNamedElement, String)] = { //returns declaration and message val container = enclosingContainer(allOcc) if (container == null) return Seq.empty val buf = new ArrayBuffer[(PsiNamedElement, String)] buf ++= validateDown(container, name, allOcc) buf ++= validateReference(selectedElement, name) var cl = container while (cl != null && !cl.isInstanceOf[ScTypeDefinition]) cl = cl.getParent if (cl != null) { cl match { case x: ScTypeDefinition => for (member <- x.members) { member match { case x: ScVariable => for (el <- x.declaredElements if el.name == name) buf += ((el, messageForMember(el.name))) case x: ScValue => for (el <- x.declaredElements if el.name == name) buf += ((el, messageForMember(el.name))) case _ => } } for (function <- x.functions; if function.name == name) { buf += ((x, messageForMember(function.name))) } x match { case scClass: ScClass => for { constructor <- scClass.constructor parameter <- constructor.parameters if parameter.name == name } { buf += ((parameter, messageForClassParameter(parameter.name))) } case _ => } } } buf } private def validateReference(context: PsiElement, name: String): Seq[(PsiNamedElement, String)] = { ScalaPsiElementFactory.createExpressionFromText(name, context) match { case ResolvesTo(elem@ScalaPsiUtil.inNameContext(nameCtx)) => val message = nameCtx match { case _: ScClassParameter => messageForClassParameter(name) case _: ScParameter => messageForParameter(name) case m: ScMember if m.isLocal => if (m.getTextOffset < context.getTextOffset) messageForLocal(name) else "" case _: ScCaseClause | _: ScGenerator | _: ScEnumerator => messageForLocal(name) case _: PsiMember => messageForMember(name) case _ => "" } if (message != "") Seq((elem, message)) else Seq.empty case _ => Seq.empty } } private def validateDown(element: PsiElement, name: String, allOcc: Boolean): Seq[(PsiNamedElement, String)] = { val container = enclosingContainer(allOcc) val buf = new ArrayBuffer[(PsiNamedElement, String)] for (child <- element.getChildren) { child match { case x: ScClassParameter if x.name == name => buf += ((x, messageForClassParameter(x.name))) case x: ScParameter if x.name == name => buf += ((x, messageForParameter(x.name))) case x: ScFunctionDefinition if x.name == name => buf += (if (x.isLocal) (x, messageForLocal(x.name)) else (x, messageForMember(x.name))) case x: ScBindingPattern if x.name == name => buf += (if (x.isClassMember) (x, messageForMember(x.name)) else (x, messageForLocal(x.name))) case _ => } } if (element != container) for (child <- element.getChildren) { buf ++= validateDown(child, name, allOcc) } else { var from = { var parent: PsiElement = if (allOcc) { selectedElement //todo: } else { selectedElement } if (PsiTreeUtil.isAncestor(container, parent, true)) while (parent.getParent != null && parent.getParent != container) parent = parent.getParent else parent = container.getFirstChild parent } var fromDoubles = from.getPrevSibling var i = 0 while (fromDoubles != null) { i = i + 1 fromDoubles match { case x: ScVariableDefinition => val elems = x.declaredElements for (elem <- elems; if elem.name == name) buf += (if (x.isLocal) (elem, messageForLocal(elem.name)) else (elem, messageForMember(elem.name))) case x: ScPatternDefinition => val elems = x.declaredElements for (elem <- elems; if elem.name == name) buf += (if (x.isLocal) (elem, messageForLocal(elem.name)) else (elem, messageForMember(elem.name))) case _ => } fromDoubles = fromDoubles.getPrevSibling } while (from != null) { buf ++= validateDown(from, name, allOcc) from = from.getNextSibling } } buf } private def messageForMember(name: String) = ScalaBundle.message("introduced.variable.will.conflict.with.field", name) private def messageForLocal(name: String) = ScalaBundle.message("introduced.variable.will.conflict.with.local", name) private def messageForParameter(name: String) = ScalaBundle.message("introduced.variable.will.conflict.with.parameter", name) private def messageForClassParameter(name: String) = ScalaBundle.message("introduced.variable.will.conflict.with.class.parameter", name) }
ilinum/intellij-scala
src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaVariableValidator.scala
Scala
apache-2.0
6,792
package model case class FeeBucket(index: Int, minFee: Int, maxFee: Int, count: Int, memCount: Int) case class FeePrediction(fee: Long, minDelay: Int, maxDelay: Int, minMinutes: Int, maxMinutes: Int) case class FeeSummary( predictions: Array[FeePrediction], feeByDelay: Map[Int, Long], feeByMinutes: Map[Int, Long]) { private val delays = feeByDelay.keys.toSeq.sortBy(delay => delay) private val minutes = feeByMinutes.keys.toSeq.sortBy(delay => delay) val fastestFee: Long = feeByDelay(delays.head).max(feeByMinutes(minutes.head)) private val halfHourDelay = minutes.filter(_ <= 30).lastOption.getOrElse(minutes.head) val halfHourFee: Long = feeByMinutes(halfHourDelay) private val hourDelay = minutes.filter(_ <= 60).lastOption.getOrElse(minutes.head) val hourFee: Long = feeByMinutes(hourDelay) } object Fees { def getFeeStructure(): Array[FeeBucket] = { val fees = Transactions.transactionsByFeeToday().sortBy(_._1) // get bucket size and maximum buckets val txCount = fees.map(_._2).sum val counts = fees.map(_._2).scan(0)((p, v) => p + v) val most = (txCount * 0.95).toInt val mostIndex = counts.indexWhere(_ >= most) val maxBucket = fees(mostIndex)._1.toInt.max(100) // at least 100 satoshis max val bucketSize = (maxBucket / 190 + 1) * 10 val lastBucket = (maxBucket + bucketSize - 1) / bucketSize val maxFee = fees.last._1.toInt // group fees into buckets val countByBucket = fees.groupBy(f => Math.min((f._1 + bucketSize - 1) / bucketSize, lastBucket)). map(f => (f._1, f._2.map(_._2).sum)) // group mempool fees into buckets val memFees = Transactions.transactionsByFeeMempool() val memCountByBucket = memFees.groupBy(f => Math.min((f._1 + bucketSize - 1) / bucketSize, lastBucket)). map(f => (f._1, f._2.map(_._2).sum)) // generate bucket metadata val buckets = (0 to lastBucket).map { index => val bucketCount = countByBucket.getOrElse(index, 0) val bucketMemCount = memCountByBucket.getOrElse(index, 0) val bucketMax = if (index == lastBucket) maxFee else bucketSize * index FeeBucket(index, Math.max(0, bucketSize * (index - 1) + 1), bucketMax, bucketCount, bucketMemCount) } buckets.toArray } def getFeePredictions(predictor: Predictor, feeBuckets: Array[FeeBucket]) : FeeSummary = { val predictions = feeBuckets.map { bucket => getFeePrediction(predictor, bucket.maxFee) } /** never predict zero conf transactions */ val feeByDelay = predictions.groupBy(_.maxDelay).map { case (maxDelay, list) => (maxDelay, list.map(_.fee).min.max(10)) } filter { _._2 < 1000 } val feeByMinutes = predictions.groupBy(_.maxMinutes).map { case (maxDelay, list) => (maxDelay, list.map(_.fee).min.max(10)) } filter { _._2 < 1000 } FeeSummary(predictions, feeByDelay, feeByMinutes) } def getFeePrediction(predictor: Predictor, fee: Long) : FeePrediction = { val confirmations = predictor.confirmations(fee) val delays = confirmations.map(_.delay).sortBy(c => c) val minutes = confirmations.map(_.minutes).sortBy(c => c) val (minDelay, maxDelay) = getInterval(delays) val (minMinutes, maxMinutes) = getInterval(minutes) FeePrediction(fee, minDelay, maxDelay, rounded(minMinutes, false), rounded(maxMinutes, true)) } private def rounded(minutes: Int, up: Boolean) : Int = { val factor = if (minutes <= 60) 5 else if (minutes <= 120) 10 else 60 if (up) ((minutes + factor - 1) / factor) * factor else (minutes / factor) * factor } private def getInterval(confirmations: Array[Int]) : (Int, Int) = { val dropCount = confirmations.length / 20 val min = confirmations(dropCount) val max = confirmations(confirmations.length - 1 - dropCount) (min, max) } }
papauschek/cointape
model/src/main/scala/model/Fees.scala
Scala
mit
3,902
package org.scaladebugger.api.profiles.traits.info.events import com.sun.jdi.event.MonitorContendedEnteredEvent import org.scaladebugger.api.profiles.traits.info.{ObjectInfo, ThreadInfo} /** * Represents the interface that needs to be implemented to provide * an abstraction over the JDI montior contended entered event interface. */ trait MonitorContendedEnteredEventInfo extends MonitorEventInfo { /** * Returns the JDI representation this profile instance wraps. * * @return The JDI instance */ override def toJdiInstance: MonitorContendedEnteredEvent /** * Returns the monitor that was entered. * * @return The information profile about the monitor object */ override def monitor: ObjectInfo /** * Returns the thread where this event occurred. * * @return The information profile about the thread */ override def thread: ThreadInfo }
chipsenkbeil/scala-debugger
scala-debugger-api/src/main/scala/org/scaladebugger/api/profiles/traits/info/events/MonitorContendedEnteredEventInfo.scala
Scala
apache-2.0
893
package io.react2.reactiveaws.core import com.typesafe.config.ConfigFactory /** * @author dbalduini */ trait HasConfig { lazy val config = ConfigFactory.load() }
dbalduini/ReactiveAws
core/src/main/scala/io/react2/reactiveaws/core/HasConfig.scala
Scala
apache-2.0
168
/* * Copyright 2014–2018 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package matryoshka.example import org.specs2.mutable._ import slamdata.Predef._ import matryoshka._ import matryoshka.data._ import matryoshka.implicits._ import scalaz._ sealed trait IntList[A] object IntList { case class IntCons[A](h: Int, tail: A) extends IntList[A] case class IntNil[A]() extends IntList[A] implicit val intListFunctor: Functor[IntList] = new Functor[IntList] { def map[A, B](list: IntList[A])(f: A => B): IntList[B] = list match { case IntNil() => IntNil() case IntCons(h, t) => IntCons(h, f(t)) } } val to: Coalgebra[IntList, List[Int]] = { case Nil => IntNil() case h :: t => IntCons(h, t) } val from: Algebra[IntList, List[Int]] = { case IntNil() => Nil case IntCons(h, t) => h :: t } val sum: Algebra[IntList, Int] = { case IntNil() => 0 case IntCons(h, t) => h + t } val len: Algebra[IntList, Int] = { case IntNil() => 0 case IntCons(_, t) => t + 1 } def filter(f: Int => Boolean): Algebra[IntList, List[Int]] = { case IntNil() => Nil case IntCons(h, t) => if(f(h)) h :: t else t } def lessThan(i: Int): IntList ~> IntList = new (IntList ~> IntList) { def apply[A](l: IntList[A]): IntList[A] = l match { case IntNil() => IntNil() case l @ IntCons(h, t) => if(h < i) l else IntNil() } } def mapHead(f: Int => Int): GCoalgebra[Fix[IntList] \/ ?, IntList, Fix[IntList]] = { case Fix(IntNil()) => IntNil() case Fix(IntCons(h, t)) => IntCons(f(h), \/.left(t)) } val infinite: Coalgebra[IntList, Int] = n => IntCons(n, n + 1) } class IntListSpec extends Specification { import IntList._ val intList = Fix(IntCons(1, Fix(IntCons(2, Fix(IntNil[Fix[IntList]]()))))) "construct an IntList" >> { List(1, 2).ana[Fix[IntList]](to) should ===(intList) } "convert an IntList to a List" >> { intList.cata(from) should ===(List(1, 2)) } "filter an IntList" >> { (0 until 10).toList.hylo(filter(_ < 5), to) should ===((0 until 5).toList) } "map the head of an IntList" >> { intList.apo.apply(mapHead(_ * 3)).cata(from) should ===(List(3, 2)) } // "short circuit the creation of infinite IntList" >> { // 1.postpro[Fix[IntList]](IntList.lessThan(10), infinite).cata(len) should ===(9) // } "short circuit the fold of an IntList" >> { intList.prepro(IntList.lessThan(2), sum) should ===(1) } }
slamdata/matryoshka
tests/shared/src/test/scala/matryoshka/example/intList.scala
Scala
apache-2.0
3,005
package org.scalaide.ui.internal.actions import org.eclipse.jface.action.IAction import org.eclipse.jface.viewers.ISelection import org.eclipse.ui.IObjectActionDelegate import org.eclipse.ui.IWorkbenchPart import org.eclipse.ui.IWorkbenchWindow import org.scalaide.core.internal.logging.LogManager import org.eclipse.ui.IWorkbenchWindowActionDelegate import org.scalaide.ui.internal.diagnostic import org.scalaide.util.eclipse.SWTUtils import org.scalaide.util.eclipse.EclipseUtils class RunDiagnosticAction extends IObjectActionDelegate with IWorkbenchWindowActionDelegate { private var parentWindow: IWorkbenchWindow = null val RUN_DIAGNOSTICS = "org.scala-ide.sdt.ui.runDiag.action" val REPORT_BUG = "org.scala-ide.sdt.ui.reportBug.action" val OPEN_LOG_FILE = "org.scala-ide.sdt.ui.openLogFile.action" override def init(window: IWorkbenchWindow): Unit = { parentWindow = window } override def dispose = { } override def selectionChanged(action: IAction, selection: ISelection): Unit = { } override def run(action: IAction): Unit = { EclipseUtils.withSafeRunner("Error occurred while trying to create diagnostic dialog") { action.getId match { case RUN_DIAGNOSTICS => val shell = if (parentWindow == null) SWTUtils.getShell else parentWindow.getShell new diagnostic.DiagnosticDialog(shell).open case REPORT_BUG => val shell = if (parentWindow == null) SWTUtils.getShell else parentWindow.getShell new diagnostic.ReportBugDialog(shell).open case OPEN_LOG_FILE => OpenExternalFile(LogManager.logFile).open() case _ => } } } override def setActivePart(action: IAction, targetPart: IWorkbenchPart): Unit = { } }
Kwestor/scala-ide
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/actions/RunDiagnosticAction.scala
Scala
bsd-3-clause
1,754
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.system.chooser import java.util.Arrays import org.apache.samza.system.IncomingMessageEnvelope import org.apache.samza.system.SystemStreamPartition import org.apache.samza.Partition import org.apache.samza.system.SystemStream import org.apache.samza.system.SystemStreamMetadata import org.apache.samza.system.SystemStreamMetadata.SystemStreamPartitionMetadata import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.runners.Parameterized.Parameters import scala.collection.JavaConversions._ import scala.collection.immutable.Queue @RunWith(value = classOf[Parameterized]) class TestBootstrappingChooser(getChooser: (MessageChooser, Map[SystemStream, SystemStreamMetadata]) => MessageChooser) { val envelope1 = new IncomingMessageEnvelope(new SystemStreamPartition("kafka", "stream", new Partition(0)), null, null, 1); val envelope2 = new IncomingMessageEnvelope(new SystemStreamPartition("kafka", "stream1", new Partition(1)), null, null, 2); val envelope3 = new IncomingMessageEnvelope(new SystemStreamPartition("kafka", "stream1", new Partition(0)), null, null, 3); val envelope4 = new IncomingMessageEnvelope(new SystemStreamPartition("kafka", "stream", new Partition(0)), "123", null, 4); /** * Helper function to create metadata for a single envelope with a single offset. */ private def getMetadata(envelope: IncomingMessageEnvelope, newestOffset: String, futureOffset: Option[String] = None) = { new SystemStreamMetadata( envelope.getSystemStreamPartition.getStream, Map(envelope.getSystemStreamPartition.getPartition -> new SystemStreamPartitionMetadata(null, newestOffset, futureOffset.getOrElse(null)))) } @Test def testChooserShouldIgnoreStreamsThatArentInOffsetMap { val mock = new MockMessageChooser val chooser = getChooser(mock, Map()) chooser.register(envelope1.getSystemStreamPartition, "foo") chooser.start assertEquals(1, mock.starts) assertEquals("foo", mock.registers(envelope1.getSystemStreamPartition)) chooser.update(envelope1) assertEquals(envelope1, chooser.choose) assertNull(chooser.choose) chooser.stop assertEquals(1, mock.stops) } @Test def testChooserShouldEliminateCaughtUpStreamsOnRegister { val mock = new MockMessageChooser val metadata = getMetadata(envelope1, "100", Some("123")) val chooser = getChooser(mock, Map(envelope1.getSystemStreamPartition.getSystemStream -> metadata)) // Even though envelope1's SSP is registered as a bootstrap stream, since // 123=123, it should be marked as "caught up" and treated like a normal // stream. This means that non-bootstrap stream envelope should be allowed // to be chosen. chooser.register(envelope1.getSystemStreamPartition, "123") chooser.register(envelope2.getSystemStreamPartition, "321") chooser.start chooser.update(envelope2) assertEquals(envelope2, chooser.choose) assertNull(chooser.choose) } @Test def testChooserShouldEliminateCaughtUpStreamsAfterRegister { val mock = new MockMessageChooser val metadata = getMetadata(envelope1, "123") val chooser = getChooser(mock, Map(envelope1.getSystemStreamPartition.getSystemStream -> metadata)) // Even though envelope1's SSP is registered as a bootstrap stream, since // 123=123, it should be marked as "caught up" and treated like a normal // stream. This means that non-bootstrap stream envelope should be allowed // to be chosen. chooser.register(envelope1.getSystemStreamPartition, "1") chooser.register(envelope2.getSystemStreamPartition, null) chooser.start chooser.update(envelope2) // Choose should not return anything since bootstrapper is blocking // wrapped.choose until it gets an update from envelope1's SSP. assertNull(chooser.choose) chooser.update(envelope1) // Now that we have an update from the required SSP, the mock chooser // should be called, and return. assertEquals(envelope2, chooser.choose) // The chooser still has an envelope from envelope1's SSP, so it should // return. assertEquals(envelope1, chooser.choose) // No envelope for envelope1's SSP has been given, so it should block. chooser.update(envelope2) assertNull(chooser.choose) // Now we're giving an envelope with the proper last offset (123), so no // envelope1's SSP should be treated no differently than envelope2's. chooser.update(envelope4) assertEquals(envelope2, chooser.choose) assertEquals(envelope4, chooser.choose) assertNull(chooser.choose) // Should not block here since there are no more lagging bootstrap streams. chooser.update(envelope2) assertEquals(envelope2, chooser.choose) assertNull(chooser.choose) chooser.update(envelope2) assertEquals(envelope2, chooser.choose) assertNull(chooser.choose) } @Test def testChooserShouldWorkWithTwoBootstrapStreams { val mock = new MockMessageChooser val metadata1 = getMetadata(envelope1, "123") val metadata2 = getMetadata(envelope2, "321") val chooser = getChooser(mock, Map(envelope1.getSystemStreamPartition.getSystemStream -> metadata1, envelope2.getSystemStreamPartition.getSystemStream -> metadata2)) chooser.register(envelope1.getSystemStreamPartition, "1") chooser.register(envelope2.getSystemStreamPartition, "1") chooser.register(envelope3.getSystemStreamPartition, "1") chooser.start chooser.update(envelope1) assertNull(chooser.choose) chooser.update(envelope3) assertNull(chooser.choose) chooser.update(envelope2) // Fully loaded now. assertEquals(envelope1, chooser.choose) // Can't pick again because envelope1's SSP is missing. assertNull(chooser.choose) chooser.update(envelope1) // Can pick again. assertEquals(envelope3, chooser.choose) // Can still pick since envelope3.SSP isn't being tracked. assertEquals(envelope2, chooser.choose) // Can't pick since envelope2.SSP needs an envelope now. assertNull(chooser.choose) chooser.update(envelope2) // Now we get envelope1 again. assertEquals(envelope1, chooser.choose) // Can't pick again. assertNull(chooser.choose) // Now use envelope4, to trigger "all caught up" for envelope1.SSP. chooser.update(envelope4) // Chooser's contents is currently: e2, e4 (System.err.println(mock.getEnvelopes)) // Add envelope3, whose SSP isn't being tracked. chooser.update(envelope3) assertEquals(envelope2, chooser.choose) assertNull(chooser.choose) chooser.update(envelope2) // Chooser's contents is currently: e4, e3, e2 (System.err.println(mock.getEnvelopes)) assertEquals(envelope4, chooser.choose) // This should be allowed, even though no message from envelope1.SSP is // available, since envelope4 triggered "all caught up" because its offset // matches the offset map for this SSP, and we still have an envelope for // envelope2.SSP in the queue. assertEquals(envelope3, chooser.choose) assertEquals(envelope2, chooser.choose) assertNull(chooser.choose) // Fin. } @Test def testChooserRegisteredCorrectSsps { val mock = new MockMessageChooser val metadata1 = getMetadata(envelope1, "123") val metadata2 = getMetadata(envelope2, "321") val chooser = new BootstrappingChooser(mock, Map(envelope1.getSystemStreamPartition.getSystemStream -> metadata1, envelope2.getSystemStreamPartition.getSystemStream -> metadata2)) chooser.register(envelope1.getSystemStreamPartition, "1") chooser.register(envelope2.getSystemStreamPartition, "1") chooser.start // it should only contain stream partition 0 and stream1 partition 1 val expectedLaggingSsps = Set(envelope1.getSystemStreamPartition, envelope2.getSystemStreamPartition) assertEquals(expectedLaggingSsps, chooser.laggingSystemStreamPartitions) val expectedSystemStreamLagCounts = Map(envelope1.getSystemStreamPartition.getSystemStream -> 1, envelope2.getSystemStreamPartition.getSystemStream -> 1) assertEquals(expectedSystemStreamLagCounts, chooser.systemStreamLagCounts) } } object TestBootstrappingChooser { // Test both BatchingChooser and DefaultChooser here. DefaultChooser with // just batch size defined should behave just like plain vanilla batching // chooser. @Parameters def parameters: java.util.Collection[Array[(MessageChooser, Map[SystemStream, SystemStreamMetadata]) => MessageChooser]] = Arrays.asList( Array((wrapped: MessageChooser, bootstrapStreamMetadata: Map[SystemStream, SystemStreamMetadata]) => new BootstrappingChooser(wrapped, bootstrapStreamMetadata)), Array((wrapped: MessageChooser, bootstrapStreamMetadata: Map[SystemStream, SystemStreamMetadata]) => new DefaultChooser(wrapped, bootstrapStreamMetadata = bootstrapStreamMetadata))) }
zcan/samza
samza-core/src/test/scala/org/apache/samza/system/chooser/TestBootstrappingChooser.scala
Scala
apache-2.0
9,753