code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package joint.shapes.devs
import japgolly.scalajs.react.CallbackTo
import japgolly.scalajs.react.vdom.html_<^.VdomElement
import joint.dia._
import joint.shapes.basic.Generic
import joint.shapes.chs.nodes.{Extender, ExtenderOps}
import scala.scalajs.js
import scala.scalajs.js.annotation.ScalaJSDefined
@js.native
trait Model extends Generic[ModelOptions, Model] {
def translate(x: Int, y: Int): js.native = js.native
override def attributes: ModelOptions = js.native
def get(name: String): Link = js.native
def toJSON(): Model = js.native
def addInPort(port: String, opt: js.UndefOr[js.Any] = js.undefined): Model = js.native
def addOutPort(port: String, opt: js.UndefOr[js.Any] = js.undefined): Model = js.native
def removeOutPort(port: String, opt: js.UndefOr[js.Any] = js.undefined): Model = js.native
def removeInPort(port: String, opt: js.UndefOr[js.Any] = js.undefined): Model = js.native
def openDialog(cellView: CellView): Unit = js.native
def dialog(props: Props): VdomElement = js.native
val markup: String = js.native
}
object Model extends ExtenderOps {
override val extender: Extender = js.Dynamic.global.joint.shapes.devs.Model.asInstanceOf[Extender]
def apply(props: ModelOptions): Model = js.Dynamic.newInstance(js.Dynamic.global.joint.shapes.devs.Model)(props).asInstanceOf[Model]
}
@ScalaJSDefined
trait ModelOptions extends Options {
var markup: js.UndefOr[String] = js.undefined
var portMarkup: js.UndefOr[String] = js.undefined
var portLabelMarkup: js.UndefOr[String] = js.undefined
var outPorts: js.UndefOr[js.Array[String]] = js.undefined
var inPorts: js.UndefOr[js.Array[String]] = js.undefined
var ports: js.UndefOr[PortOptions] = js.undefined
var nodeMetadata: js.UndefOr[NodeMetadata] = js.undefined
}
@ScalaJSDefined
class NodeMetadata(val byteString: String, val json: String) extends js.Object
case class Props(isOpen: Boolean,
cellView: js.UndefOr[CellView],
close: CallbackTo[Unit]) | abdheshkumar/scalajs-practices | client/src/main/scala/joint/shapes/devs/Model.scala | Scala | apache-2.0 | 2,010 |
package com.sksamuel.elastic4s.analyzers
import org.elasticsearch.common.xcontent.{XContentBuilder, XContentFactory}
// Base class for analyzers that have custom parameters set.
abstract class AnalyzerDefinition(val name: String) {
def build(source: XContentBuilder): Unit
def json: XContentBuilder = {
val builder = XContentFactory.jsonBuilder
builder.startObject()
build(builder)
builder.endObject()
builder
}
}
case class StopAnalyzerDefinition(override val name: String,
stopwords: Iterable[String] = Nil) extends AnalyzerDefinition(name) {
def build(source: XContentBuilder): Unit = {
source.field("type", "stop")
source.field("stopwords", stopwords.toArray[String]: _*)
}
def stopwords(stopwords: Iterable[String]): StopAnalyzerDefinition = copy(stopwords = stopwords)
def stopwords(stopwords: String, rest: String*): StopAnalyzerDefinition = copy(stopwords = stopwords +: rest)
}
case class StandardAnalyzerDefinition(override val name: String,
stopwords: Iterable[String] = Nil,
maxTokenLength: Int = 255) extends AnalyzerDefinition(name) {
def build(source: XContentBuilder): Unit = {
source.field("type", "standard")
source.field("stopwords", stopwords.toArray[String]: _*)
source.field("max_token_length", maxTokenLength)
}
def stopwords(stopwords: Iterable[String]): StandardAnalyzerDefinition = copy(stopwords = stopwords)
def stopwords(stopwords: String, rest: String*): StandardAnalyzerDefinition = copy(stopwords = stopwords +: rest)
def maxTokenLength(maxTokenLength: Int): StandardAnalyzerDefinition = copy(maxTokenLength = maxTokenLength)
}
case class PatternAnalyzerDefinition(override val name: String,
regex: String,
lowercase: Boolean = true) extends AnalyzerDefinition(name) {
def build(source: XContentBuilder): Unit = {
source.field("type", "pattern")
source.field("lowercase", lowercase)
source.field("pattern", regex)
}
def lowercase(lowercase: Boolean): PatternAnalyzerDefinition = copy(lowercase = lowercase)
}
case class SnowballAnalyzerDefinition(override val name: String,
lang: String = "English",
stopwords: Iterable[String] = Nil) extends AnalyzerDefinition(name) {
def build(source: XContentBuilder): Unit = {
source.field("type", "snowball")
source.field("language", lang)
if (stopwords.nonEmpty)
source.field("stopwords", stopwords.toArray[String]: _*)
}
def language(lang: String): SnowballAnalyzerDefinition = copy(lang = lang)
def stopwords(stopwords: Iterable[String]): SnowballAnalyzerDefinition = copy(stopwords = stopwords)
def stopwords(stopwords: String, rest: String*): SnowballAnalyzerDefinition = copy(stopwords = stopwords +: rest)
}
case class CustomAnalyzerDefinition(override val name: String,
tokenizer: Tokenizer,
filters: Seq[AnalyzerFilter] = Nil) extends AnalyzerDefinition(name) {
def build(source: XContentBuilder): Unit = {
source.field("type", "custom")
source.field("tokenizer", tokenizer.name)
val tokenFilters = filters.collect { case token: TokenFilter => token }
val charFilters = filters.collect { case char: CharFilter => char }
if (tokenFilters.nonEmpty) {
source.field("filter", tokenFilters.map(_.name).toArray: _*)
}
if (charFilters.nonEmpty) {
source.field("char_filter", charFilters.map(_.name).toArray: _*)
}
}
def filters(filters: Seq[AnalyzerFilter]): CustomAnalyzerDefinition = copy(filters = filters)
def addFilter(filter: AnalyzerFilter): CustomAnalyzerDefinition = copy(filters = filters :+ filter)
}
object CustomAnalyzerDefinition {
def apply(name: String,
tokenizer: Tokenizer,
first: AnalyzerFilter,
rest: AnalyzerFilter*): CustomAnalyzerDefinition = {
CustomAnalyzerDefinition(name, tokenizer, first +: rest)
}
}
| k4200/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/analyzers/AnalyzerDefinition.scala | Scala | apache-2.0 | 4,153 |
package com.dominikgruber.scalatorrent.bencode
import scala.util.parsing.combinator._
/**
* Descriptions taken from the specification:
* https://wiki.theory.org/BitTorrentSpecification#Bencoding
*/
object BencodeParser extends RegexParsers {
override def skipWhitespace = false
/**
* Bencoded strings are encoded as follows:
* <string length encoded in base ten ASCII>:<string data>, or key:value
* Note that there is no constant beginning delimiter, and no ending
* delimiter.
*/
def string: Parser[String] = ("""[1-9]\\d*""".r <~ ":") into { count =>
repN(count.toInt, ".|\\n".r) ^^ (_.mkString)
}
/**
* Integers are encoded as follows: i<integer encoded in base ten ASCII>e
* The initial i and trailing e are beginning and ending delimiters. You can
* have negative numbers such as i-3e. Only the significant digits should be
* used, one cannot pad the Integer with zeroes. such as i04e. However, i0e is
* valid.
*/
def integer: Parser[Int] = "i" ~> """(0|\\-?[1-9]\\d*)""".r <~ "e" ^^ (_.toInt)
/**
* Lists are encoded as follows: l<bencoded values>e
* The initial l and trailing e are beginning and ending delimiters. Lists may
* contain any bencoded type, including integers, strings, dictionaries, and
* even lists within other lists.
*/
def list: Parser[List[Any]] = "l" ~> rep1(bencodeElem) <~ "e"
/**
* Dictionaries are encoded as follows: d<bencoded string><bencoded element>e
* The initial d and trailing e are the beginning and ending delimiters. Note
* that the keys must be bencoded strings. The values may be any bencoded
* type, including integers, strings, lists, and other dictionaries. Keys must
* be strings and appear in sorted order (sorted as raw strings, not
* alphanumerics). The strings should be compared using a binary comparison,
* not a culture-specific "natural" comparison.
*
* @todo Ensure keys appear in sorted order
*/
def dictionary: Parser[Map[String,Any]] =
"d" ~> rep1(string ~ bencodeElem) <~ "e" ^^ (_.map(x => (x._1, x._2)).toMap)
def bencodeElem = string | integer | list | dictionary
def apply(input: String) = parseAll(bencodeElem, input)
}
| TheDom/scala-torrent | src/main/scala/com/dominikgruber/scalatorrent/bencode/BencodeParser.scala | Scala | mit | 2,205 |
// The MIT License (MIT)
// Copyright (c) 2012 Ram Hardy & Elad Hemar
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
// and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
// CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
package models
import play.api.db.DB
import play.api.Play.current
import anorm._
import anorm.SqlParser._
import com.lambdaworks.crypto.SCryptUtil
/**
* Created with IntelliJ IDEA.
* User: Ram Hardy
* Date: 6/8/12
* Time: 4:02 PM
*/
case class User(id:Pk[Long], name:String, email:String, password:String, accessToken:String, resetPasswordToken:Option[String]) {
def checkPassword(password:String):Boolean={
try {
SCryptUtil.check(password, this.password)
} catch {
case _=> false
}
}
}
object User {
val user={
get[Pk[Long]]("id")~get[String]("name")~get[String]("email")~get[String]("password")~get[String]("access_token")~get[Option[String]]("reset_password_token") map {
case id~name~email~password~access_token~reset_password_token=> User(id, name, email, password, access_token, reset_password_token)
}
}
def create(email:String, name:String, password:String, accessToken:String, resetPasswordToken:String):Option[Long]=DB.withConnection { implicit c=>
val hash=SCryptUtil.scrypt(password, 16384, 8, 1)
SQL("INSERT INTO users (name, email, password, access_token, reset_password_token, time_created) VALUES ({name}, {email}, {password}, {access_token}, {reset_password_token}, NOW())").on('email->email.toLowerCase, 'name->name, 'password->hash, 'access_token->accessToken, 'reset_password_token->resetPasswordToken).executeInsert()
}
def findById(userId:Long):Option[User]=DB.withConnection { implicit c=>
SQL("SELECT * FROM users WHERE id={id}").on('id->userId).as(user*).headOption;
}
def findByEmail(email:String):Option[User]=DB.withConnection { implicit c=>
SQL("SELECT * FROM users WHERE email={email}").on('email->email.toLowerCase).as(user*).headOption;
}
def updateEmail(userId:Long, email:String)=DB.withConnection { implicit c=>
SQL("UPDATE users SET email={email} WHERE id={id}").on('email->email.toLowerCase, 'id->userId).executeUpdate()
}
def updatePassword(userId:Long, password:String)=DB.withConnection { implicit c=>
val hash=SCryptUtil.scrypt(password, 16384, 8, 1)
SQL("UPDATE users SET password={password} WHERE id={id}").on('password->hash, 'id->userId).executeUpdate()
}
def updateName(userId:Long, name:String)=DB.withConnection { implicit c=>
SQL("UPDATE users SET name={name} WHERE id={id}").on('name->name, 'id->userId).executeUpdate()
}
def updateResetPasswordToken(userId:Long, tokenOption:Option[String])=DB.withConnection { implicit c=>
tokenOption match {
case Some(token)=> SQL("UPDATE users SET reset_password_token={token} WHERE id={id}").on('token->token, 'id->userId).executeUpdate()
case None=> SQL("UPDATE users SET reset_password_token=NULL WHERE id={id}").on('id->userId).executeUpdate()
}
}
}
| computerpunc/demo-website-with-play | app/models/User.scala | Scala | mit | 3,957 |
import java.io.{File}
import scala.collection.immutable.{ Map, Set, Seq }
import scalaxb.compiler._
import scalaxb.compiler.xsd.Driver
import scalaxb.compiler.ConfigEntry._
object GeneralTest extends TestBase {
// Log.configureLogger(true)
override val module: Module = new Driver
val inFile = new File("integration/src/test/resources/general.xsd")
val importFile = new File("integration/src/test/resources/general_import.xsd")
val mimeFile = new File("integration/src/test/resources/xmlmime.xsd")
val usageFile = new File(tmp, "GeneralUsage.scala")
val custumFile = new File(tmp, "CustomizationUsage.scala")
val config = Config.default.update(PackageNames(
Map(None -> Some("general"),
Some("http://www.w3.org/2005/05/xmlmime") -> Some("xmlmime"),
Some("http://www.example.com/general_import") -> Some("gimport")))).
update(Outdir(tmp))
lazy val generated = module.processFiles(Seq(inFile, mimeFile, importFile),
config)
copyFileFromResource("GeneralUsage.scala", usageFile)
copyFileFromResource("CustomizationUsage.scala", custumFile)
"general.scala file must compile together with GeneralUsage.scala" in {
(List("GeneralUsage.allTests"),
usageFile :: generated) must evaluateTo(true,
outdir = "./tmp")
}
"general.scala file must compile together with CustomizationUsage.scala" in {
(List("CustomizationUsage.allTests"),
custumFile :: generated) must evaluateTo(true,
outdir = "./tmp")
}
}
| briantopping/scalaxb | integration/src/test/scala/GeneralTest.scala | Scala | mit | 1,497 |
package metaconfig
import scala.language.dynamics
class ConfDynamic(val asConf: Configured[Conf]) extends Dynamic {
def as[T](implicit ev: ConfDecoder[T]): Configured[T] =
asConf.andThen(_.as[T])
def selectDynamic(name: String): ConfDynamic = {
val result =
asConf.andThen {
case obj @ Conf.Obj(values) =>
values
.collectFirst {
case (`name`, value) =>
Configured.Ok(value)
}
.getOrElse(ConfError.missingField(obj, name).notOk)
case els =>
ConfError
.typeMismatch(s"Conf.Obj (with field $name)", els, name)
.notOk
}
ConfDynamic(result)
}
}
object ConfDynamic {
def apply(conf: Configured[Conf]): ConfDynamic = new ConfDynamic(conf)
}
| olafurpg/metaconfig | metaconfig-core/shared/src/main/scala/metaconfig/ConfDynamic.scala | Scala | apache-2.0 | 795 |
package sbtazurepack.test.sprayapp
import akka.actor._
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
import spray.can.Http
import spray.http.MediaTypes._
import spray.routing._
import scala.concurrent.duration._
import scala.util.Properties
class AzureService extends HttpServiceActor {
override def receive = runRoute {
path("") {
get {
respondWithMediaType(`text/html`) {
complete {
<html>
<body>
<p>sprayapp</p>
</body>
</html>
}
}
}
}
}
}
object Main extends App {
implicit val system = ActorSystem("on-spray-can")
val service = system.actorOf(Props[AzureService], "azure-service")
// Retrieving address and port from environment variables
// or falling back to default values
val serviceAddress = Properties.envOrElse("SERVICE_ADDRESS", "localhost")
val servicePort = Properties.envOrElse("SERVICE_PORT", "8080").toInt
implicit val timeout = Timeout(5.seconds)
IO(Http) ? Http.Bind(service, interface = serviceAddress, port = servicePort)
}
| kostrse/sbt-azurepack | src/sbt-test/sbt-azurepack/simple/Main.scala | Scala | mit | 1,120 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import java.lang.{Boolean => JBoolean, Integer => JInteger, Long => JLong}
import java.lang.reflect.{InvocationTargetException, Method, Modifier}
import java.net.URI
import java.util.{ArrayList => JArrayList, List => JList, Locale, Map => JMap, Set => JSet}
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.IMetaStoreClient
import org.apache.hadoop.hive.metastore.TableType
import org.apache.hadoop.hive.metastore.api.{Database, EnvironmentContext, Function => HiveFunction, FunctionType, MetaException, PrincipalType, ResourceType, ResourceUri}
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.io.AcidUtils
import org.apache.hadoop.hive.ql.metadata.{Hive, HiveException, Partition, Table}
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc
import org.apache.hadoop.hive.ql.processors.{CommandProcessor, CommandProcessorFactory}
import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException
import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, CatalogTablePartition, CatalogUtils, FunctionResource, FunctionResourceType}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{AtomicType, IntegralType, StringType}
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
/**
* A shim that defines the interface between [[HiveClientImpl]] and the underlying Hive library used
* to talk to the metastore. Each Hive version has its own implementation of this class, defining
* version-specific version of needed functions.
*
* The guideline for writing shims is:
* - always extend from the previous version unless really not possible
* - initialize methods in lazy vals, both for quicker access for multiple invocations, and to
* avoid runtime errors due to the above guideline.
*/
private[client] sealed abstract class Shim {
/**
* Set the current SessionState to the given SessionState. Also, set the context classloader of
* the current thread to the one set in the HiveConf of this given `state`.
*/
def setCurrentSessionState(state: SessionState): Unit
/**
* This shim is necessary because the return type is different on different versions of Hive.
* All parameters are the same, though.
*/
def getDataLocation(table: Table): Option[String]
def setDataLocation(table: Table, loc: String): Unit
def getAllPartitions(hive: Hive, table: Table): Seq[Partition]
def getPartitionsByFilter(hive: Hive, table: Table, predicates: Seq[Expression]): Seq[Partition]
def getCommandProcessor(token: String, conf: HiveConf): CommandProcessor
def getDriverResults(driver: Driver): Seq[String]
def getMetastoreClientConnectRetryDelayMillis(conf: HiveConf): Long
def alterTable(hive: Hive, tableName: String, table: Table): Unit
def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit
def getTablesByType(
hive: Hive,
dbName: String,
pattern: String,
tableType: TableType): Seq[String]
def createPartitions(
hive: Hive,
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit
def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit
def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit
def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit
def createFunction(hive: Hive, db: String, func: CatalogFunction): Unit
def dropFunction(hive: Hive, db: String, name: String): Unit
def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit
def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit
def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction]
def listFunctions(hive: Hive, db: String, pattern: String): Seq[String]
def dropIndex(hive: Hive, dbName: String, tableName: String, indexName: String): Unit
def dropTable(
hive: Hive,
dbName: String,
tableName: String,
deleteData: Boolean,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit
def dropPartition(
hive: Hive,
dbName: String,
tableName: String,
part: JList[String],
deleteData: Boolean,
purge: Boolean): Unit
def getDatabaseOwnerName(db: Database): String
def setDatabaseOwnerName(db: Database, owner: String): Unit
protected def findStaticMethod(klass: Class[_], name: String, args: Class[_]*): Method = {
val method = findMethod(klass, name, args: _*)
require(Modifier.isStatic(method.getModifiers()),
s"Method $name of class $klass is not static.")
method
}
def getMSC(hive: Hive): IMetaStoreClient
protected def findMethod(klass: Class[_], name: String, args: Class[_]*): Method = {
klass.getMethod(name, args: _*)
}
}
private[client] class Shim_v0_12 extends Shim with Logging {
// See HIVE-12224, HOLD_DDLTIME was broken as soon as it landed
protected lazy val holdDDLTime = JBoolean.FALSE
// deletes the underlying data along with metadata
protected lazy val deleteDataInDropIndex = JBoolean.TRUE
protected lazy val getMSCMethod = {
// Since getMSC() in Hive 0.12 is private, findMethod() could not work here
val msc = classOf[Hive].getDeclaredMethod("getMSC")
msc.setAccessible(true)
msc
}
override def getMSC(hive: Hive): IMetaStoreClient = {
getMSCMethod.invoke(hive).asInstanceOf[IMetaStoreClient]
}
private lazy val startMethod =
findStaticMethod(
classOf[SessionState],
"start",
classOf[SessionState])
private lazy val getDataLocationMethod = findMethod(classOf[Table], "getDataLocation")
private lazy val setDataLocationMethod =
findMethod(
classOf[Table],
"setDataLocation",
classOf[URI])
private lazy val getAllPartitionsMethod =
findMethod(
classOf[Hive],
"getAllPartitionsForPruner",
classOf[Table])
private lazy val getCommandProcessorMethod =
findStaticMethod(
classOf[CommandProcessorFactory],
"get",
classOf[String],
classOf[HiveConf])
private lazy val getDriverResultsMethod =
findMethod(
classOf[Driver],
"getResults",
classOf[JArrayList[String]])
private lazy val createPartitionMethod =
findMethod(
classOf[Hive],
"createPartition",
classOf[Table],
classOf[JMap[String, String]],
classOf[Path],
classOf[JMap[String, String]],
classOf[String],
classOf[String],
JInteger.TYPE,
classOf[JList[Object]],
classOf[String],
classOf[JMap[String, String]],
classOf[JList[Object]],
classOf[JList[Object]])
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val dropIndexMethod =
findMethod(
classOf[Hive],
"dropIndex",
classOf[String],
classOf[String],
classOf[String],
JBoolean.TYPE)
private lazy val alterTableMethod =
findMethod(
classOf[Hive],
"alterTable",
classOf[String],
classOf[Table])
private lazy val alterPartitionsMethod =
findMethod(
classOf[Hive],
"alterPartitions",
classOf[String],
classOf[JList[Partition]])
override def setCurrentSessionState(state: SessionState): Unit = {
// Starting from Hive 0.13, setCurrentSessionState will internally override
// the context class loader of the current thread by the class loader set in
// the conf of the SessionState. So, for this Hive 0.12 shim, we add the same
// behavior and make shim.setCurrentSessionState of all Hive versions have the
// consistent behavior.
Thread.currentThread().setContextClassLoader(state.getConf.getClassLoader)
startMethod.invoke(null, state)
}
override def getDataLocation(table: Table): Option[String] =
Option(getDataLocationMethod.invoke(table)).map(_.toString())
override def setDataLocation(table: Table, loc: String): Unit =
setDataLocationMethod.invoke(table, new URI(loc))
// Follows exactly the same logic of DDLTask.createPartitions in Hive 0.12
override def createPartitions(
hive: Hive,
database: String,
tableName: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = {
val table = hive.getTable(database, tableName)
parts.foreach { s =>
val location = s.storage.locationUri.map(
uri => new Path(table.getPath, new Path(uri))).orNull
val params = if (s.parameters.nonEmpty) s.parameters.asJava else null
val spec = s.spec.asJava
if (hive.getPartition(table, spec, false) != null && ignoreIfExists) {
// Ignore this partition since it already exists and ignoreIfExists == true
} else {
if (location == null && table.isView()) {
throw new HiveException("LOCATION clause illegal for view partition");
}
createPartitionMethod.invoke(
hive,
table,
spec,
location,
params, // partParams
null, // inputFormat
null, // outputFormat
-1: JInteger, // numBuckets
null, // cols
null, // serializationLib
null, // serdeParams
null, // bucketCols
null) // sortCols
}
}
}
override def getAllPartitions(hive: Hive, table: Table): Seq[Partition] =
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]].asScala.toSeq
override def getPartitionsByFilter(
hive: Hive,
table: Table,
predicates: Seq[Expression]): Seq[Partition] = {
// getPartitionsByFilter() doesn't support binary comparison ops in Hive 0.12.
// See HIVE-4888.
logDebug("Hive 0.12 doesn't support predicate pushdown to metastore. " +
"Please use Hive 0.13 or higher.")
getAllPartitions(hive, table)
}
override def getCommandProcessor(token: String, conf: HiveConf): CommandProcessor =
getCommandProcessorMethod.invoke(null, token, conf).asInstanceOf[CommandProcessor]
override def getDriverResults(driver: Driver): Seq[String] = {
val res = new JArrayList[String]()
getDriverResultsMethod.invoke(driver, res)
res.asScala
}
override def getMetastoreClientConnectRetryDelayMillis(conf: HiveConf): Long = {
conf.getIntVar(HiveConf.ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY) * 1000L
}
override def getTablesByType(
hive: Hive,
dbName: String,
pattern: String,
tableType: TableType): Seq[String] = {
throw new UnsupportedOperationException("Hive 2.2 and lower versions don't support " +
"getTablesByType. Please use Hive 2.3 or higher version.")
}
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
JBoolean.FALSE, inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, holdDDLTime)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, holdDDLTime, listBucketingEnabled: JBoolean)
}
override def dropIndex(hive: Hive, dbName: String, tableName: String, indexName: String): Unit = {
dropIndexMethod.invoke(hive, dbName, tableName, indexName, deleteDataInDropIndex)
}
override def dropTable(
hive: Hive,
dbName: String,
tableName: String,
deleteData: Boolean,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = {
if (purge) {
throw new UnsupportedOperationException("DROP TABLE ... PURGE")
}
hive.dropTable(dbName, tableName, deleteData, ignoreIfNotExists)
}
override def alterTable(hive: Hive, tableName: String, table: Table): Unit = {
alterTableMethod.invoke(hive, tableName, table)
}
override def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit = {
alterPartitionsMethod.invoke(hive, tableName, newParts)
}
override def dropPartition(
hive: Hive,
dbName: String,
tableName: String,
part: JList[String],
deleteData: Boolean,
purge: Boolean): Unit = {
if (purge) {
throw new UnsupportedOperationException("ALTER TABLE ... DROP PARTITION ... PURGE")
}
hive.dropPartition(dbName, tableName, part, deleteData)
}
override def createFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
throw new AnalysisException("Hive 0.12 doesn't support creating permanent functions. " +
"Please use Hive 0.13 or higher.")
}
def dropFunction(hive: Hive, db: String, name: String): Unit = {
throw new NoSuchPermanentFunctionException(db, name)
}
def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit = {
throw new NoSuchPermanentFunctionException(db, oldName)
}
def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
throw new NoSuchPermanentFunctionException(db, func.identifier.funcName)
}
def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction] = {
None
}
def listFunctions(hive: Hive, db: String, pattern: String): Seq[String] = {
Seq.empty[String]
}
override def getDatabaseOwnerName(db: Database): String = ""
override def setDatabaseOwnerName(db: Database, owner: String): Unit = {}
}
private[client] class Shim_v0_13 extends Shim_v0_12 {
private lazy val setCurrentSessionStateMethod =
findStaticMethod(
classOf[SessionState],
"setCurrentSessionState",
classOf[SessionState])
private lazy val setDataLocationMethod =
findMethod(
classOf[Table],
"setDataLocation",
classOf[Path])
private lazy val getAllPartitionsMethod =
findMethod(
classOf[Hive],
"getAllPartitionsOf",
classOf[Table])
private lazy val getPartitionsByFilterMethod =
findMethod(
classOf[Hive],
"getPartitionsByFilter",
classOf[Table],
classOf[String])
private lazy val getCommandProcessorMethod =
findStaticMethod(
classOf[CommandProcessorFactory],
"get",
classOf[Array[String]],
classOf[HiveConf])
private lazy val getDriverResultsMethod =
findMethod(
classOf[Driver],
"getResults",
classOf[JList[Object]])
private lazy val getDatabaseOwnerNameMethod =
findMethod(
classOf[Database],
"getOwnerName")
private lazy val setDatabaseOwnerNameMethod =
findMethod(
classOf[Database],
"setOwnerName",
classOf[String])
override def setCurrentSessionState(state: SessionState): Unit =
setCurrentSessionStateMethod.invoke(null, state)
override def setDataLocation(table: Table, loc: String): Unit =
setDataLocationMethod.invoke(table, new Path(loc))
override def createPartitions(
hive: Hive,
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit = {
val addPartitionDesc = new AddPartitionDesc(db, table, ignoreIfExists)
parts.zipWithIndex.foreach { case (s, i) =>
addPartitionDesc.addPartition(
s.spec.asJava, s.storage.locationUri.map(CatalogUtils.URIToString(_)).orNull)
if (s.parameters.nonEmpty) {
addPartitionDesc.getPartition(i).setPartParams(s.parameters.asJava)
}
}
hive.createPartitions(addPartitionDesc)
}
override def getAllPartitions(hive: Hive, table: Table): Seq[Partition] =
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]].asScala.toSeq
private def toHiveFunction(f: CatalogFunction, db: String): HiveFunction = {
val resourceUris = f.resources.map { resource =>
new ResourceUri(ResourceType.valueOf(
resource.resourceType.resourceType.toUpperCase(Locale.ROOT)), resource.uri)
}
new HiveFunction(
f.identifier.funcName,
db,
f.className,
null,
PrincipalType.USER,
TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis).toInt,
FunctionType.JAVA,
resourceUris.asJava)
}
override def createFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
hive.createFunction(toHiveFunction(func, db))
}
override def dropFunction(hive: Hive, db: String, name: String): Unit = {
hive.dropFunction(db, name)
}
override def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit = {
val catalogFunc = getFunctionOption(hive, db, oldName)
.getOrElse(throw new NoSuchPermanentFunctionException(db, oldName))
.copy(identifier = FunctionIdentifier(newName, Some(db)))
val hiveFunc = toHiveFunction(catalogFunc, db)
hive.alterFunction(db, oldName, hiveFunc)
}
override def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit = {
hive.alterFunction(db, func.identifier.funcName, toHiveFunction(func, db))
}
private def fromHiveFunction(hf: HiveFunction): CatalogFunction = {
val name = FunctionIdentifier(hf.getFunctionName, Option(hf.getDbName))
val resources = hf.getResourceUris.asScala.map { uri =>
val resourceType = uri.getResourceType() match {
case ResourceType.ARCHIVE => "archive"
case ResourceType.FILE => "file"
case ResourceType.JAR => "jar"
case r => throw new AnalysisException(s"Unknown resource type: $r")
}
FunctionResource(FunctionResourceType.fromString(resourceType), uri.getUri())
}
CatalogFunction(name, hf.getClassName, resources)
}
override def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction] = {
try {
Option(hive.getFunction(db, name)).map(fromHiveFunction)
} catch {
case NonFatal(e) if isCausedBy(e, s"$name does not exist") =>
None
}
}
private def isCausedBy(e: Throwable, matchMassage: String): Boolean = {
if (e.getMessage.contains(matchMassage)) {
true
} else if (e.getCause != null) {
isCausedBy(e.getCause, matchMassage)
} else {
false
}
}
override def listFunctions(hive: Hive, db: String, pattern: String): Seq[String] = {
hive.getFunctions(db, pattern).asScala
}
/**
* Converts catalyst expression to the format that Hive's getPartitionsByFilter() expects, i.e.
* a string that represents partition predicates like "str_key=\\"value\\" and int_key=1 ...".
*
* Unsupported predicates are skipped.
*/
def convertFilters(table: Table, filters: Seq[Expression]): String = {
/**
* An extractor that matches all binary comparison operators except null-safe equality.
*
* Null-safe equality is not supported by Hive metastore partition predicate pushdown
*/
object SpecialBinaryComparison {
def unapply(e: BinaryComparison): Option[(Expression, Expression)] = e match {
case _: EqualNullSafe => None
case _ => Some((e.left, e.right))
}
}
object ExtractableLiteral {
def unapply(expr: Expression): Option[String] = expr match {
case Literal(null, _) => None // `null`s can be cast as other types; we want to avoid NPEs.
case Literal(value, _: IntegralType) => Some(value.toString)
case Literal(value, _: StringType) => Some(quoteStringLiteral(value.toString))
case _ => None
}
}
object ExtractableLiterals {
def unapply(exprs: Seq[Expression]): Option[Seq[String]] = {
// SPARK-24879: The Hive metastore filter parser does not support "null", but we still want
// to push down as many predicates as we can while still maintaining correctness.
// In SQL, the `IN` expression evaluates as follows:
// > `1 in (2, NULL)` -> NULL
// > `1 in (1, NULL)` -> true
// > `1 in (2)` -> false
// Since Hive metastore filters are NULL-intolerant binary operations joined only by
// `AND` and `OR`, we can treat `NULL` as `false` and thus rewrite `1 in (2, NULL)` as
// `1 in (2)`.
// If the Hive metastore begins supporting NULL-tolerant predicates and Spark starts
// pushing down these predicates, then this optimization will become incorrect and need
// to be changed.
val extractables = exprs
.filter {
case Literal(null, _) => false
case _ => true
}.map(ExtractableLiteral.unapply)
if (extractables.nonEmpty && extractables.forall(_.isDefined)) {
Some(extractables.map(_.get))
} else {
None
}
}
}
object ExtractableValues {
private lazy val valueToLiteralString: PartialFunction[Any, String] = {
case value: Byte => value.toString
case value: Short => value.toString
case value: Int => value.toString
case value: Long => value.toString
case value: UTF8String => quoteStringLiteral(value.toString)
}
def unapply(values: Set[Any]): Option[Seq[String]] = {
val extractables = values.toSeq.map(valueToLiteralString.lift)
if (extractables.nonEmpty && extractables.forall(_.isDefined)) {
Some(extractables.map(_.get))
} else {
None
}
}
}
object SupportedAttribute {
// hive varchar is treated as catalyst string, but hive varchar can't be pushed down.
private val varcharKeys = table.getPartitionKeys.asScala
.filter(col => col.getType.startsWith(serdeConstants.VARCHAR_TYPE_NAME) ||
col.getType.startsWith(serdeConstants.CHAR_TYPE_NAME))
.map(col => col.getName).toSet
def unapply(attr: Attribute): Option[String] = {
val resolver = SQLConf.get.resolver
if (varcharKeys.exists(c => resolver(c, attr.name))) {
None
} else if (attr.dataType.isInstanceOf[IntegralType] || attr.dataType == StringType) {
Some(attr.name)
} else {
None
}
}
}
def convertInToOr(name: String, values: Seq[String]): String = {
values.map(value => s"$name = $value").mkString("(", " or ", ")")
}
val useAdvanced = SQLConf.get.advancedPartitionPredicatePushdownEnabled
object ExtractAttribute {
def unapply(expr: Expression): Option[Attribute] = {
expr match {
case attr: Attribute => Some(attr)
case Cast(child @ AtomicType(), dt: AtomicType, _)
if Cast.canUpCast(child.dataType.asInstanceOf[AtomicType], dt) => unapply(child)
case _ => None
}
}
}
def convert(expr: Expression): Option[String] = expr match {
case In(ExtractAttribute(SupportedAttribute(name)), ExtractableLiterals(values))
if useAdvanced =>
Some(convertInToOr(name, values))
case InSet(ExtractAttribute(SupportedAttribute(name)), ExtractableValues(values))
if useAdvanced =>
Some(convertInToOr(name, values))
case op @ SpecialBinaryComparison(
ExtractAttribute(SupportedAttribute(name)), ExtractableLiteral(value)) =>
Some(s"$name ${op.symbol} $value")
case op @ SpecialBinaryComparison(
ExtractableLiteral(value), ExtractAttribute(SupportedAttribute(name))) =>
Some(s"$value ${op.symbol} $name")
case And(expr1, expr2) if useAdvanced =>
val converted = convert(expr1) ++ convert(expr2)
if (converted.isEmpty) {
None
} else {
Some(converted.mkString("(", " and ", ")"))
}
case Or(expr1, expr2) if useAdvanced =>
for {
left <- convert(expr1)
right <- convert(expr2)
} yield s"($left or $right)"
case _ => None
}
filters.flatMap(convert).mkString(" and ")
}
private def quoteStringLiteral(str: String): String = {
if (!str.contains("\\"")) {
s""""$str""""
} else if (!str.contains("'")) {
s"""'$str'"""
} else {
throw new UnsupportedOperationException(
"""Partition filter cannot have both `"` and `'` characters""")
}
}
override def getPartitionsByFilter(
hive: Hive,
table: Table,
predicates: Seq[Expression]): Seq[Partition] = {
// Hive getPartitionsByFilter() takes a string that represents partition
// predicates like "str_key=\\"value\\" and int_key=1 ..."
val filter = convertFilters(table, predicates)
val partitions =
if (filter.isEmpty) {
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]]
} else {
logDebug(s"Hive metastore filter is '$filter'.")
val tryDirectSqlConfVar = HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL
// We should get this config value from the metaStore. otherwise hit SPARK-18681.
// To be compatible with hive-0.12 and hive-0.13, In the future we can achieve this by:
// val tryDirectSql = hive.getMetaConf(tryDirectSqlConfVar.varname).toBoolean
val tryDirectSql = hive.getMSC.getConfigValue(tryDirectSqlConfVar.varname,
tryDirectSqlConfVar.defaultBoolVal.toString).toBoolean
try {
// Hive may throw an exception when calling this method in some circumstances, such as
// when filtering on a non-string partition column when the hive config key
// hive.metastore.try.direct.sql is false
getPartitionsByFilterMethod.invoke(hive, table, filter)
.asInstanceOf[JArrayList[Partition]]
} catch {
case ex: InvocationTargetException if ex.getCause.isInstanceOf[MetaException] &&
!tryDirectSql =>
logWarning("Caught Hive MetaException attempting to get partition metadata by " +
"filter from Hive. Falling back to fetching all partition metadata, which will " +
"degrade performance. Modifying your Hive metastore configuration to set " +
s"${tryDirectSqlConfVar.varname} to true may resolve this problem.", ex)
// HiveShim clients are expected to handle a superset of the requested partitions
getAllPartitionsMethod.invoke(hive, table).asInstanceOf[JSet[Partition]]
case ex: InvocationTargetException if ex.getCause.isInstanceOf[MetaException] &&
tryDirectSql =>
throw new RuntimeException("Caught Hive MetaException attempting to get partition " +
"metadata by filter from Hive. You can set the Spark configuration setting " +
s"${SQLConf.HIVE_MANAGE_FILESOURCE_PARTITIONS.key} to false to work around this " +
"problem, however this will result in degraded performance. Please report a bug: " +
"https://issues.apache.org/jira/browse/SPARK", ex)
}
}
partitions.asScala.toSeq
}
override def getCommandProcessor(token: String, conf: HiveConf): CommandProcessor =
getCommandProcessorMethod.invoke(null, Array(token), conf).asInstanceOf[CommandProcessor]
override def getDriverResults(driver: Driver): Seq[String] = {
val res = new JArrayList[Object]()
getDriverResultsMethod.invoke(driver, res)
res.asScala.map { r =>
r match {
case s: String => s
case a: Array[Object] => a(0).asInstanceOf[String]
}
}
}
override def getDatabaseOwnerName(db: Database): String = {
Option(getDatabaseOwnerNameMethod.invoke(db)).map(_.asInstanceOf[String]).getOrElse("")
}
override def setDatabaseOwnerName(db: Database, owner: String): Unit = {
setDatabaseOwnerNameMethod.invoke(db, owner)
}
}
private[client] class Shim_v0_14 extends Shim_v0_13 {
// true if this is an ACID operation
protected lazy val isAcid = JBoolean.FALSE
// true if list bucketing enabled
protected lazy val isSkewedStoreAsSubdir = JBoolean.FALSE
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val dropTableMethod =
findMethod(
classOf[Hive],
"dropTable",
classOf[String],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val getTimeVarMethod =
findMethod(
classOf[HiveConf],
"getTimeVar",
classOf[HiveConf.ConfVars],
classOf[TimeUnit])
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
holdDDLTime, inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, holdDDLTime,
isSrcLocal: JBoolean, isSkewedStoreAsSubdir, isAcid)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, holdDDLTime, listBucketingEnabled: JBoolean, isAcid)
}
override def dropTable(
hive: Hive,
dbName: String,
tableName: String,
deleteData: Boolean,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = {
dropTableMethod.invoke(hive, dbName, tableName, deleteData: JBoolean,
ignoreIfNotExists: JBoolean, purge: JBoolean)
}
override def getMetastoreClientConnectRetryDelayMillis(conf: HiveConf): Long = {
getTimeVarMethod.invoke(
conf,
HiveConf.ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY,
TimeUnit.MILLISECONDS).asInstanceOf[Long]
}
}
private[client] class Shim_v1_0 extends Shim_v0_14
private[client] class Shim_v1_1 extends Shim_v1_0 {
// throws an exception if the index does not exist
protected lazy val throwExceptionInDropIndex = JBoolean.TRUE
private lazy val dropIndexMethod =
findMethod(
classOf[Hive],
"dropIndex",
classOf[String],
classOf[String],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE)
override def dropIndex(hive: Hive, dbName: String, tableName: String, indexName: String): Unit = {
dropIndexMethod.invoke(hive, dbName, tableName, indexName, throwExceptionInDropIndex,
deleteDataInDropIndex)
}
}
private[client] class Shim_v1_2 extends Shim_v1_1 {
// txnId can be 0 unless isAcid == true
protected lazy val txnIdInLoadDynamicPartitions: JLong = 0L
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JLong.TYPE)
private lazy val dropOptionsClass =
Utils.classForName("org.apache.hadoop.hive.metastore.PartitionDropOptions")
private lazy val dropOptionsDeleteData = dropOptionsClass.getField("deleteData")
private lazy val dropOptionsPurge = dropOptionsClass.getField("purgeData")
private lazy val dropPartitionMethod =
findMethod(
classOf[Hive],
"dropPartition",
classOf[String],
classOf[String],
classOf[JList[String]],
dropOptionsClass)
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, holdDDLTime, listBucketingEnabled: JBoolean, isAcid,
txnIdInLoadDynamicPartitions)
}
override def dropPartition(
hive: Hive,
dbName: String,
tableName: String,
part: JList[String],
deleteData: Boolean,
purge: Boolean): Unit = {
val dropOptions = dropOptionsClass.getConstructor().newInstance().asInstanceOf[Object]
dropOptionsDeleteData.setBoolean(dropOptions, deleteData)
dropOptionsPurge.setBoolean(dropOptions, purge)
dropPartitionMethod.invoke(hive, dbName, tableName, part, dropOptions)
}
}
private[client] class Shim_v2_0 extends Shim_v1_2 {
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JLong.TYPE)
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, isSrcLocal: JBoolean,
isSkewedStoreAsSubdir, isAcid)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, listBucketingEnabled: JBoolean, isAcid, txnIdInLoadDynamicPartitions)
}
}
private[client] class Shim_v2_1 extends Shim_v2_0 {
// true if there is any following stats task
protected lazy val hasFollowingStatsTask = JBoolean.FALSE
// TODO: Now, always set environmentContext to null. In the future, we should avoid setting
// hive-generated stats to -1 when altering tables by using environmentContext. See Hive-12730
protected lazy val environmentContextInAlterTable = null
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
JBoolean.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JLong.TYPE,
JBoolean.TYPE,
classOf[AcidUtils.Operation])
private lazy val alterTableMethod =
findMethod(
classOf[Hive],
"alterTable",
classOf[String],
classOf[Table],
classOf[EnvironmentContext])
private lazy val alterPartitionsMethod =
findMethod(
classOf[Hive],
"alterPartitions",
classOf[String],
classOf[JList[Partition]],
classOf[EnvironmentContext])
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
loadPartitionMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid, hasFollowingStatsTask)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
loadTableMethod.invoke(hive, loadPath, tableName, replace: JBoolean, isSrcLocal: JBoolean,
isSkewedStoreAsSubdir, isAcid, hasFollowingStatsTask)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, replace: JBoolean,
numDP: JInteger, listBucketingEnabled: JBoolean, isAcid, txnIdInLoadDynamicPartitions,
hasFollowingStatsTask, AcidUtils.Operation.NOT_ACID)
}
override def alterTable(hive: Hive, tableName: String, table: Table): Unit = {
alterTableMethod.invoke(hive, tableName, table, environmentContextInAlterTable)
}
override def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit = {
alterPartitionsMethod.invoke(hive, tableName, newParts, environmentContextInAlterTable)
}
}
private[client] class Shim_v2_2 extends Shim_v2_1
private[client] class Shim_v2_3 extends Shim_v2_1 {
private lazy val getTablesByTypeMethod =
findMethod(
classOf[Hive],
"getTablesByType",
classOf[String],
classOf[String],
classOf[TableType])
override def getTablesByType(
hive: Hive,
dbName: String,
pattern: String,
tableType: TableType): Seq[String] = {
getTablesByTypeMethod.invoke(hive, dbName, pattern, tableType)
.asInstanceOf[JList[String]].asScala
}
}
private[client] class Shim_v3_0 extends Shim_v2_3 {
// Spark supports only non-ACID operations
protected lazy val isAcidIUDoperation = JBoolean.FALSE
// Writer ID can be 0 for non-ACID operations
protected lazy val writeIdInLoadTableOrPartition: JLong = 0L
// Statement ID
protected lazy val stmtIdInLoadTableOrPartition: JInteger = 0
protected lazy val listBucketingLevel: JInteger = 0
private lazy val clazzLoadFileType = getClass.getClassLoader.loadClass(
"org.apache.hadoop.hive.ql.plan.LoadTableDesc$LoadFileType")
private lazy val loadPartitionMethod =
findMethod(
classOf[Hive],
"loadPartition",
classOf[Path],
classOf[Table],
classOf[JMap[String, String]],
clazzLoadFileType,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
classOf[JLong],
JInteger.TYPE,
JBoolean.TYPE)
private lazy val loadTableMethod =
findMethod(
classOf[Hive],
"loadTable",
classOf[Path],
classOf[String],
clazzLoadFileType,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
JBoolean.TYPE,
classOf[JLong],
JInteger.TYPE,
JBoolean.TYPE)
private lazy val loadDynamicPartitionsMethod =
findMethod(
classOf[Hive],
"loadDynamicPartitions",
classOf[Path],
classOf[String],
classOf[JMap[String, String]],
clazzLoadFileType,
JInteger.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
JLong.TYPE,
JInteger.TYPE,
JBoolean.TYPE,
classOf[AcidUtils.Operation],
JBoolean.TYPE)
override def loadPartition(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
inheritTableSpecs: Boolean,
isSkewedStoreAsSubdir: Boolean,
isSrcLocal: Boolean): Unit = {
val session = SparkSession.getActiveSession
assert(session.nonEmpty)
val database = session.get.sessionState.catalog.getCurrentDatabase
val table = hive.getTable(database, tableName)
val loadFileType = if (replace) {
clazzLoadFileType.getEnumConstants.find(_.toString.equalsIgnoreCase("REPLACE_ALL"))
} else {
clazzLoadFileType.getEnumConstants.find(_.toString.equalsIgnoreCase("KEEP_EXISTING"))
}
assert(loadFileType.isDefined)
loadPartitionMethod.invoke(hive, loadPath, table, partSpec, loadFileType.get,
inheritTableSpecs: JBoolean, isSkewedStoreAsSubdir: JBoolean,
isSrcLocal: JBoolean, isAcid, hasFollowingStatsTask,
writeIdInLoadTableOrPartition, stmtIdInLoadTableOrPartition, replace: JBoolean)
}
override def loadTable(
hive: Hive,
loadPath: Path,
tableName: String,
replace: Boolean,
isSrcLocal: Boolean): Unit = {
val loadFileType = if (replace) {
clazzLoadFileType.getEnumConstants.find(_.toString.equalsIgnoreCase("REPLACE_ALL"))
} else {
clazzLoadFileType.getEnumConstants.find(_.toString.equalsIgnoreCase("KEEP_EXISTING"))
}
assert(loadFileType.isDefined)
loadTableMethod.invoke(hive, loadPath, tableName, loadFileType.get, isSrcLocal: JBoolean,
isSkewedStoreAsSubdir, isAcidIUDoperation, hasFollowingStatsTask,
writeIdInLoadTableOrPartition, stmtIdInLoadTableOrPartition: JInteger, replace: JBoolean)
}
override def loadDynamicPartitions(
hive: Hive,
loadPath: Path,
tableName: String,
partSpec: JMap[String, String],
replace: Boolean,
numDP: Int,
listBucketingEnabled: Boolean): Unit = {
val loadFileType = if (replace) {
clazzLoadFileType.getEnumConstants.find(_.toString.equalsIgnoreCase("REPLACE_ALL"))
} else {
clazzLoadFileType.getEnumConstants.find(_.toString.equalsIgnoreCase("KEEP_EXISTING"))
}
assert(loadFileType.isDefined)
loadDynamicPartitionsMethod.invoke(hive, loadPath, tableName, partSpec, loadFileType.get,
numDP: JInteger, listBucketingLevel, isAcid, writeIdInLoadTableOrPartition,
stmtIdInLoadTableOrPartition, hasFollowingStatsTask, AcidUtils.Operation.NOT_ACID,
replace: JBoolean)
}
}
private[client] class Shim_v3_1 extends Shim_v3_0
| spark-test/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala | Scala | apache-2.0 | 46,404 |
package nl.anchormen.sbt
import sbt.Keys._
import sbt._
import sbt.plugins.JvmPlugin
/**
* Created by lawrence on 11-10-16.
*/
object BuildEnvironmentsPlugin extends AutoPlugin {
override def requires: JvmPlugin.type = plugins.JvmPlugin
override def trigger: PluginTrigger = allRequirements
object autoImport {
lazy val Dev: Configuration = config("dev") extend Compile
lazy val Prod: Configuration = config("prod") extend Compile
}
import autoImport._
lazy val baseSettings: Seq[Def.Setting[_]] = Classpaths.configSettings ++
Defaults.baseClasspaths ++
Defaults.configSettings ++
Seq(
managedResourceDirectories := (managedResourceDirectories in Compile).value,
managedSourceDirectories := (managedSourceDirectories in Compile).value,
unmanagedSourceDirectories := (unmanagedSourceDirectories in Compile).value,
unmanagedResourceDirectories := (unmanagedResourceDirectories in Compile).value
)
lazy val baseDevSettings: Seq[Def.Setting[_]] = EnvironmentSettings("dev")
lazy val baseProdSettings: Seq[Def.Setting[_]] = EnvironmentSettings("prod")
override lazy val projectSettings: Seq[Def.Setting[_]] = inConfig(Dev)(baseDevSettings) ++ inConfig(Prod)(baseProdSettings)
}
object EnvironmentSettings {
def apply(environmentName: String): Seq[Def.Setting[_]] = {
BuildEnvironmentsPlugin.baseSettings ++
Seq(unmanagedResourceDirectories += baseDirectory.value / "src" / environmentName / "resources")
}
}
| Anchormen/sbt-build-environment | src/main/scala/nl/anchormen/sbt/BuildEnvironmentsPlugin.scala | Scala | mit | 1,454 |
package com.thoughtworks.deeplearning
package plugins
import com.thoughtworks.feature.Factory.inject
import com.thoughtworks.feature.{Factory, ImplicitApply, PartialApply}
import com.thoughtworks.raii.asynchronous.Do
import com.thoughtworks.raii.asynchronous.Do._
import shapeless.Witness
/** A plugin to create [[scala.Float]] weights.
*
* @note Custom optimization algorithm for updating [[FloatWeight]] can be implemented by creating a plugin
* that provides an overridden [[FloatOptimizer]] that provides an overridden [[FloatOptimizer.delta]].
*
* @author 杨博 (Yang Bo)
*/
trait FloatWeights extends Weights {
trait FloatWeightApi extends WeightApi { this: FloatWeight =>
override type Delta = Float
override type Data = Float
override protected type PartiallyAppliedOptimizer = floatPartialApplyOriginalDelta.Rest
override protected def backward[SubtypeOfOptimizer](originalDelta: Float)(
implicit implicitApplyRest: ImplicitApply.Aux[PartiallyAppliedOptimizer, SubtypeOfOptimizer],
asOptimizer: SubtypeOfOptimizer <:< OptimizerApi { type Delta <: Float }): Do[Unit] = {
Do.delay {
val delta =
implicitApplyRest(
floatPartialApplyOriginalDelta(floatPartialApplyWeight(floatOptimizerFactory.newInstance,
floatWeightParameter(this)),
floatOriginalDeltaParameter(originalDelta))).delta
synchronized {
data -= delta
}
}
}
}
/** @template */
type FloatWeight <: FloatWeightApi with Weight
@inject
protected val floatWeightFactory: Factory[FloatWeight]
@inject
protected val floatPartialApplyData: PartialApply[floatWeightFactory.Constructor, Witness.`"data"`.T]
@inject
protected def floatDataParameter: Float <:< floatPartialApplyData.Parameter
object FloatWeight {
/** @usecase def apply(data: Float): FloatWeight = ???
*/
def apply[SubtypeOfWeight, OptimizerFunction, Optimizer](data: Float)(
implicit implicitApplyRest: ImplicitApply[floatPartialApplyData.Rest]) = {
implicitApplyRest(floatPartialApplyData(floatWeightFactory.newInstance, floatDataParameter(data)))
}
}
trait FloatOptimizerApi extends OptimizerApi { this: FloatOptimizer =>
override type Delta = Float
val weight: FloatWeight
}
/** @template */
type FloatOptimizer <: FloatOptimizerApi with Optimizer
@inject
protected val floatOptimizerFactory: Factory[FloatOptimizer]
@inject
protected val floatPartialApplyWeight: PartialApply[floatOptimizerFactory.Constructor, Witness.`"weight"`.T]
@inject
protected def floatWeightParameter: FloatWeight <:< floatPartialApplyWeight.Parameter
@inject
protected val floatPartialApplyOriginalDelta: PartialApply[floatPartialApplyWeight.Rest, Witness.`"originalDelta"`.T]
@inject
protected def floatOriginalDeltaParameter: Float <:< floatPartialApplyOriginalDelta.Parameter
}
| izhangzhihao/DeepLearning.scala | plugins-FloatWeights/src/main/scala/com/thoughtworks/deeplearning/plugins/FloatWeights.scala | Scala | apache-2.0 | 3,033 |
package com.bio4j.dynamograph
import com.bio4j.dynamograph.model.go._
import com.bio4j.dynamograph.model.Properties._
import com.bio4j.dynamograph._
import ohnosequences.typesets._
import ohnosequences.tabula._
import ohnosequences.scarph._
import ohnosequences.tabula.impl._
import com.bio4j.dynamograph.model._
import com.bio4j.dynamograph.reader.VertexReader
import com.bio4j.dynamograph.reader.EdgeReader
object testModel {
case object testId extends Property[String]
case object edgeId extends Property[String]
case object name extends Property[String]
object TestVertexType extends VertexTypeWithId(testId, "testVertexType")
implicit val TestVertexType_id = TestVertexType has testId
case object TestEdgeType extends EdgeTypeWithId (TestVertexType, sourceId, relationId, "testEdgeType", TestVertexType, targetId)
with ManyIn with ManyOut
case object TestVertexTable extends VertexTable(TestVertexType, "TestVertex", EU)
case object TestEdgeTables extends EdgeTables(TestEdgeType, "TestEdge", EU)
case object vertexReader extends VertexReader(TestVertexTable, ServiceProvider.dynamoDbExecutor)
case object edgeReader extends EdgeReader(TestEdgeTables, ServiceProvider.dynamoDbExecutor)
case object TestVertex extends DynamoVertex(TestVertexType, TestVertexTable, vertexReader)
case object testEdge extends DynamoEdge(TestVertex, TestEdgeType, TestVertex, TestEdgeTables, edgeReader)
}
| bio4j/dynamograph | src/test/scala/com/bio4j/dynamograph/testModel.scala | Scala | agpl-3.0 | 1,477 |
package com.nulabinc.backlog.migration.importer.core
import com.nulabinc.backlog.migration.common.dsl.ConsoleDSL
import com.nulabinc.backlog.migration.common.utils.Logging
import com.nulabinc.backlog4j.BacklogAPIException
import monix.eval.Task
import monix.execution.Scheduler
case class RetryException(throwables: List[Throwable]) extends Exception(throwables.toString())
/*
Thank you: http://d.hatena.ne.jp/j5ik2o/20120627/1340748199#20120627fn1
*/
object RetryUtil extends Logging {
import scala.util.control.Exception.allCatch
def retry[T](retryLimit: Int)(f: => T)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T =
retry(retryLimit, 0, classOf[Throwable])(f)
def retry[T](retryLimit: Int, retryInterval: Int)(
f: => T
)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T =
retry(retryLimit, retryInterval, classOf[Throwable])(f)
def retry[T](retryLimit: Int, catchExceptionClasses: Class[_]*)(
f: => T
)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T =
retry(
retryLimit,
0,
e => catchExceptionClasses.exists(_.isAssignableFrom(e.getClass))
)(f)
def retry[T](retryLimit: Int, shouldCatch: Throwable => Boolean)(
f: => T
)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T =
retry(retryLimit, 0, shouldCatch)(f)
def retry[T](
retryLimit: Int,
retryInterval: Int,
catchExceptionClasses: Class[_]*
)(f: => T)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T =
retry(
retryLimit,
retryInterval,
e => catchExceptionClasses.exists(_.isAssignableFrom(e.getClass))
)(f)
def retryBacklogAPIException[T](
retryLimit: Int,
retryInterval: Int
)(f: => T)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T =
retry(
retryLimit = retryLimit,
retryInterval = retryInterval,
shouldCatch = e =>
e match {
case ex: BacklogAPIException =>
!ex.getMessage.contains("No comment content")
case _ =>
true
}
)(f)
def retry[T](
retryLimit: Int,
retryInterval: Int,
shouldCatch: Throwable => Boolean
)(f: => T)(implicit consoleDSL: ConsoleDSL[Task], s: Scheduler): T = {
@annotation.tailrec
def retry0(errors: List[Throwable], f: => T): T = {
allCatch.either(f) match {
case Right(r) => r
case Left(e) =>
if (shouldCatch(e)) {
if (retryLimit > 0) {
val message =
s"(${errors.size + 1} / $retryLimit) Retrying... ${e.getMessage}"
ConsoleDSL[Task].warning(message).runSyncUnsafe()
}
if (errors.size < retryLimit - 1) {
Thread.sleep(retryInterval)
retry0(e :: errors, f)
} else {
throw RetryException(e :: errors)
}
} else throw e
}
}
retry0(Nil, f)
}
}
| nulab/backlog-migration-common | importer/src/main/scala/com/nulabinc/backlog/migration/importer/core/RetryUtil.scala | Scala | mit | 2,940 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package network
package netty
import java.util.UUID
import org.jboss.netty.channel._
import protos.NorbertProtos
import logging.Logging
import jmx.JMX.MBean
import jmx.JMX
import java.util.concurrent.{ScheduledThreadPoolExecutor, TimeUnit, ConcurrentHashMap}
import com.google.protobuf.ByteString
import cluster.Node
import scala.math._
import client.NetworkClientConfig
import common._
import norbertutils._
import network.client.ResponseHandler
import norbertutils.{Clock, SystemClock}
import java.util.{Map => JMap}
import java.util.concurrent.atomic.{AtomicInteger, AtomicLong}
import util.ProtoUtils
@ChannelPipelineCoverage("all")
class ClientChannelHandler(clientName: Option[String],
serviceName: String,
staleRequestTimeoutMins: Int,
staleRequestCleanupFrequencyMins: Int,
requestStatisticsWindow: Long,
outlierMultiplier: Double,
outlierConstant: Double,
responseHandler: ResponseHandler,
avoidByteStringCopy: Boolean,
stats : CachedNetworkStatistics[Node, UUID],
routeAway: Option[ClientStatisticsRequestStrategy.RoutingAwayCallback] = None)
extends SimpleChannelHandler with Logging {
private val requestMap = new ConcurrentHashMap[UUID, Request[_, _]]
val cleanupTask = new Runnable() {
val staleRequestTimeoutMillis = TimeUnit.MILLISECONDS.convert(staleRequestTimeoutMins, TimeUnit.MINUTES)
override def run = {
try {
import collection.JavaConversions._
var expiredEntryCount = 0
requestMap.keySet.foreach { uuid =>
val request = Option(requestMap.get(uuid))
val now = System.currentTimeMillis
request.foreach { r =>
if ((now - r.timestamp) > staleRequestTimeoutMillis) {
requestMap.remove(uuid)
stats.endRequest(r.node, r.id)
expiredEntryCount += 1
}
}
}
if (expiredEntryCount > 0) {
log.info("Expired %d stale entries from the request map".format(expiredEntryCount))
}
} catch {
case e: InterruptedException =>
Thread.currentThread.interrupt
log.error(e, "Interrupted exception in cleanup task")
case e: Exception => log.error(e, "Exception caught in cleanup task, ignoring ")
}
}
}
val clock = SystemClock
val cleanupExecutor = new ScheduledThreadPoolExecutor(1)
cleanupExecutor.scheduleAtFixedRate(cleanupTask, staleRequestCleanupFrequencyMins, staleRequestCleanupFrequencyMins, TimeUnit.MINUTES)
val clientStatsStrategy = new ClientStatisticsRequestStrategy(stats, outlierMultiplier, outlierConstant, clock, routeAway)
val serverErrorStrategy = new SimpleBackoffStrategy(clock)
val clientStatsStrategyJMX = JMX.register(new ClientStatisticsRequestStrategyMBeanImpl(clientName, serviceName, clientStatsStrategy))
val serverErrorStrategyJMX = JMX.register(new ServerErrorStrategyMBeanImpl(clientName, serviceName, serverErrorStrategy))
val strategy = CompositeCanServeRequestStrategy(clientStatsStrategy, serverErrorStrategy)
private val statsJMX = JMX.register(new NetworkClientStatisticsMBeanImpl(clientName, serviceName, stats, clientStatsStrategy))
override def writeRequested(ctx: ChannelHandlerContext, e: MessageEvent) = {
val request = e.getMessage.asInstanceOf[Request[_, _]]
log.debug("Writing request: %s".format(request))
if(!request.callback.isEmpty) {
requestMap.put(request.id, request)
stats.beginRequest(request.node, request.id, 0)
}
val message = NorbertProtos.NorbertMessage.newBuilder
message.setRequestIdMsb(request.id.getMostSignificantBits)
message.setRequestIdLsb(request.id.getLeastSignificantBits)
message.setMessageName(request.name)
request.headers.foreach { header =>
message.addHeader(NorbertProtos.NorbertMessage.Header.newBuilder.setKey(header._1).setValue(header._2).build)
}
message.setMessage(ProtoUtils.byteArrayToByteString(request.requestBytes, avoidByteStringCopy))
super.writeRequested(ctx, new DownstreamMessageEvent(e.getChannel, e.getFuture, message.build, e.getRemoteAddress))
}
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) = {
val message = e.getMessage.asInstanceOf[NorbertProtos.NorbertMessage]
log.debug("Received message: %s".format(message))
val requestId = new UUID(message.getRequestIdMsb, message.getRequestIdLsb)
requestMap.get(requestId) match {
case null => {
log.warn("Received a response message UUID: [%s] without a corresponding request from %s".format(requestId, ctx.getChannel().getRemoteAddress()))
}
case request =>
requestMap.remove(requestId)
stats.endRequest(request.node, request.id)
if (message.getStatus == NorbertProtos.NorbertMessage.Status.OK) {
responseHandler.onSuccess(request, message)
} else if (message.getStatus == NorbertProtos.NorbertMessage.Status.HEAVYLOAD) {
serverErrorStrategy.notifyFailure(request.node)
processException(request, "Heavy load")
} else if (message.getStatus == NorbertProtos.NorbertMessage.Status.GC) {
//Don't notify backoff strategy for GC failures
processException(request, "GC")
} else {
processException(request, Option(message.getErrorMessage).getOrElse("<null>"))
}
}
def processException[RequestMsg, ResponseMsg](requestCtx: Request[RequestMsg, ResponseMsg], errorMessage: String) {
responseHandler.onFailure(requestCtx, new RemoteException(requestCtx.name, errorMessage) with RequestAccess[Request[RequestMsg, ResponseMsg]] {
def request: Request[RequestMsg, ResponseMsg] = requestCtx
})
}
}
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent) = {
e.getCause match {
case _:ConnectTimeoutException =>
log.warn("Caught connect timeout in network layer")
case cause =>
log.warn(cause, "Caught exception in network layer")
}
}
def shutdown: Unit = {
responseHandler.shutdown
cleanupExecutor.shutdownNow
statsJMX.foreach { JMX.unregister(_) }
serverErrorStrategyJMX.foreach { JMX.unregister(_) }
clientStatsStrategyJMX.foreach { JMX.unregister(_) }
}
}
trait HealthScoreCalculator extends Logging {
def doCalculation[T](p: Map[T, StatsEntry], f: Map[T, StatsEntry]): Double = {
def fSize = f.values.map(_.size).sum
def pSize = p.values.map(_.size).sum
val fTotal = f.map{ case(k, v) => v.percentile * v.size }.sum
val pTotal = p.map{ case(k, v) => v.percentile * v.size }.sum
val result = safeDivide(fTotal + pTotal, fSize + pSize)(0)
if(result < 0.0) {
log.warn("Found a negative result when calculating weighted median. Pending = %s. Finished = %s. fSize = %s. pSize = %s. fTotal = %s. pTotal = %s"
.format(p, f, fSize, pSize, fTotal, pTotal))
}
result
}
def averagePercentiles[T](s: Map[T, StatsEntry]): Double = {
val size = s.values.map(_.size).sum
val total = s.map { case (k, v) => v.percentile * v.size }.sum
safeDivide(total, size)(0.0)
}
}
class ClientStatisticsRequestStrategy(val stats: CachedNetworkStatistics[Node, UUID],
@volatile var outlierMultiplier: Double,
@volatile var outlierConstant: Double,
clock: Clock,
routeAway: Option[ClientStatisticsRequestStrategy.RoutingAwayCallback] = None)
extends CanServeRequestStrategy with Logging with HealthScoreCalculator {
// Must be more than outlierMultiplier * average + outlierConstant ms the others by default
val totalNodesMarkedDown = new AtomicLong(0)
val totalNumReroutes = new AtomicLong(0)
val canServeRequests = CacheMaintainer(clock, 200L, () => {
val s = stats.getStatistics(0.5)
val (p, f) = (s.map(_.pending).getOrElse(Map.empty), s.map(_.finished).getOrElse(Map.empty))
val clusterMedian = doCalculation(p, f)
f.map { case (n, nodeN) =>
val nodeP = p.get(n).getOrElse(StatsEntry(0.0, 0, 0))
val nodeMedian = doCalculation(Map(0 -> nodeP),Map(0 -> nodeN))
val available = nodeMedian <= clusterMedian * outlierMultiplier + outlierConstant
if (!available) {
routeAway match {
case Some(callback) => callback(n, nodeMedian, clusterMedian)
case None =>
log.info("Node %s has a median response time of %f. The cluster response time is %f. Routing requests away temporarily.".format(n, nodeMedian, clusterMedian))
}
totalNodesMarkedDown.incrementAndGet
}
(n, available)
}
})
def canServeRequest(node: Node) = {
val map = canServeRequests.get
val canServe = map.flatMap(_.get(node)).getOrElse(true)
if (!canServe) {
totalNumReroutes.incrementAndGet
}
canServe
}
}
object ClientStatisticsRequestStrategy extends Logging {
/* receiver node, receiver median, cluster median */
type RoutingAwayCallback = (Node, Double, Double) => Unit
}
trait ClientStatisticsRequestStrategyMBean extends CanServeRequestStrategyMBean {
def getOutlierMultiplier: Double
def getOutlierConstant: Double
def setOutlierMultiplier(m: Double)
def setOutlierConstant(c: Double)
def getTotalNodesMarkedDown: Long
}
class ClientStatisticsRequestStrategyMBeanImpl(clientName: Option[String], serviceName: String, strategy: ClientStatisticsRequestStrategy)
extends MBean(classOf[ClientStatisticsRequestStrategyMBean], JMX.name(clientName, serviceName))
with ClientStatisticsRequestStrategyMBean {
def getCanServeRequests = toJMap(strategy.canServeRequests.get.getOrElse(Map.empty).map { case (n, a) => (n.id -> a) })
def getOutlierMultiplier = strategy.outlierMultiplier
def getOutlierConstant = strategy.outlierConstant
def setOutlierMultiplier(m: Double) { strategy.outlierMultiplier = m}
def setOutlierConstant(c: Double) = { strategy.outlierConstant = c}
def getTotalNodesMarkedDown = strategy.totalNodesMarkedDown.get.abs
}
| thesiddharth/norbert | network/src/main/scala/com/linkedin/norbert/network/netty/ClientChannelHandler.scala | Scala | apache-2.0 | 10,990 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources.v2
import scala.collection.JavaConverters._
import org.apache.spark.SparkException
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
trait AlterTableTests extends SharedSparkSession {
protected def getTableMetadata(tableName: String): Table
protected val catalogAndNamespace: String
protected val v2Format: String
private def fullTableName(tableName: String): String = {
if (catalogAndNamespace.isEmpty) {
s"default.$tableName"
} else {
s"${catalogAndNamespace}table_name"
}
}
test("AlterTable: table does not exist") {
val t2 = s"${catalogAndNamespace}fake_table"
withTable(t2) {
sql(s"CREATE TABLE $t2 (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE ${catalogAndNamespace}table_name DROP COLUMN id")
}
assert(exc.getMessage.contains(s"${catalogAndNamespace}table_name"))
assert(exc.getMessage.contains("Table or view not found"))
}
}
test("AlterTable: change rejected by implementation") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[SparkException] {
sql(s"ALTER TABLE $t DROP COLUMN id")
}
assert(exc.getMessage.contains("Unsupported table change"))
assert(exc.getMessage.contains("Cannot drop all fields")) // from the implementation
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType().add("id", IntegerType))
}
}
test("AlterTable: add top-level column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN data string")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType().add("id", IntegerType).add("data", StringType))
}
}
test("AlterTable: add column with comment") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN data string COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === StructType(Seq(
StructField("id", IntegerType),
StructField("data", StringType).withComment("doc"))))
}
}
test("AlterTable: add multiple columns") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMNS data string COMMENT 'doc', ts timestamp")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === StructType(Seq(
StructField("id", IntegerType),
StructField("data", StringType).withComment("doc"),
StructField("ts", TimestampType))))
}
}
test("AlterTable: add nested column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN point.z double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType),
StructField("z", DoubleType)))))
}
}
test("AlterTable: add nested column to map key") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<struct<x: double, y: double>, bigint>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN points.key.z double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType),
StructField("z", DoubleType))), LongType)))
}
}
test("AlterTable: add nested column to map value") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN points.value.z double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StringType, StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType),
StructField("z", DoubleType))))))
}
}
test("AlterTable: add nested column to array element") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN points.element.z double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType),
StructField("z", DoubleType))))))
}
}
test("AlterTable: add complex column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN points array<struct<x: double, y: double>>")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType))))))
}
}
test("AlterTable: add nested column with comment") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING $v2Format")
sql(s"ALTER TABLE $t ADD COLUMN points.element.z double COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType),
StructField("z", DoubleType).withComment("doc"))))))
}
}
test("AlterTable: add nested column parent must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ADD COLUMN point.z double")
}
assert(exc.getMessage.contains("point"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: update column type int -> long") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN id TYPE bigint")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType().add("id", LongType))
}
}
test("AlterTable: update nested type float -> double") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point struct<x: float, y: double>) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN point.x TYPE double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType)))))
}
}
test("AlterTable: update column with struct type fails") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN point TYPE struct<x: double, y: double, z: double>")
}
assert(exc.getMessage.contains("point"))
assert(exc.getMessage.contains("update a struct by adding, deleting, or updating its fields"))
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType)))))
}
}
test("AlterTable: update column with array type fails") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<int>) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN points TYPE array<long>")
}
assert(exc.getMessage.contains("update the element by updating points.element"))
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(IntegerType)))
}
}
test("AlterTable: update column array element type") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<int>) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.element TYPE long")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(LongType)))
}
}
test("AlterTable: update column with map type fails") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, m map<string, int>) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN m TYPE map<string, long>")
}
assert(exc.getMessage.contains("update a map by updating m.key or m.value"))
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("m", MapType(StringType, IntegerType)))
}
}
test("AlterTable: update column map value type") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, m map<string, int>) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN m.value TYPE long")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("m", MapType(StringType, LongType)))
}
}
test("AlterTable: update nested type in map key") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<struct<x: float, y: double>, bigint>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.key.x TYPE double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType))), LongType)))
}
}
test("AlterTable: update nested type in map value") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<string, struct<x: float, y: double>>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.value.x TYPE double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StringType, StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType))))))
}
}
test("AlterTable: update nested type in array") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<struct<x: float, y: double>>) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.element.x TYPE double")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType))))))
}
}
test("AlterTable: update column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN data TYPE string")
}
assert(exc.getMessage.contains("data"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: nested update column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN point.x TYPE double")
}
assert(exc.getMessage.contains("point.x"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: update column type must be compatible") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN id TYPE boolean")
}
assert(exc.getMessage.contains("id"))
assert(exc.getMessage.contains("int cannot be cast to boolean"))
}
}
test("AlterTable: update column comment") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN id COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === StructType(Seq(StructField("id", IntegerType).withComment("doc"))))
}
}
test("AlterTable: update column type and comment") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN id TYPE bigint COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === StructType(Seq(StructField("id", LongType).withComment("doc"))))
}
}
test("AlterTable: update nested column comment") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN point.y COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType).withComment("doc")))))
}
}
test("AlterTable: update nested column comment in map key") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<struct<x: double, y: double>, bigint>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.key.y COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType).withComment("doc"))), LongType)))
}
}
test("AlterTable: update nested column comment in map value") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.value.y COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StringType, StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType).withComment("doc"))))))
}
}
test("AlterTable: update nested column comment in array") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING $v2Format")
sql(s"ALTER TABLE $t ALTER COLUMN points.element.y COMMENT 'doc'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType).withComment("doc"))))))
}
}
test("AlterTable: comment update column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN data COMMENT 'doc'")
}
assert(exc.getMessage.contains("data"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: nested comment update column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t ALTER COLUMN point.x COMMENT 'doc'")
}
assert(exc.getMessage.contains("point.x"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: rename column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t RENAME COLUMN id TO user_id")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType().add("user_id", IntegerType))
}
}
test("AlterTable: rename nested column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double>) USING $v2Format")
sql(s"ALTER TABLE $t RENAME COLUMN point.y TO t")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", StructType(Seq(
StructField("x", DoubleType),
StructField("t", DoubleType)))))
}
}
test("AlterTable: rename nested column in map key") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point map<struct<x: double, y: double>, bigint>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t RENAME COLUMN point.key.y TO t")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", MapType(StructType(Seq(
StructField("x", DoubleType),
StructField("t", DoubleType))), LongType)))
}
}
test("AlterTable: rename nested column in map value") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t RENAME COLUMN points.value.y TO t")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StringType, StructType(Seq(
StructField("x", DoubleType),
StructField("t", DoubleType))))))
}
}
test("AlterTable: rename nested column in array element") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING $v2Format")
sql(s"ALTER TABLE $t RENAME COLUMN points.element.y TO t")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType),
StructField("t", DoubleType))))))
}
}
test("AlterTable: rename column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t RENAME COLUMN data TO some_string")
}
assert(exc.getMessage.contains("data"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: nested rename column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t RENAME COLUMN point.x TO z")
}
assert(exc.getMessage.contains("point.x"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: drop column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, data string) USING $v2Format")
sql(s"ALTER TABLE $t DROP COLUMN data")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType().add("id", IntegerType))
}
}
test("AlterTable: drop nested column") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point struct<x: double, y: double, t: double>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t DROP COLUMN point.t")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", StructType(Seq(
StructField("x", DoubleType),
StructField("y", DoubleType)))))
}
}
test("AlterTable: drop nested column in map key") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, point map<struct<x: double, y: double>, bigint>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t DROP COLUMN point.key.y")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("point", MapType(StructType(Seq(
StructField("x", DoubleType))), LongType)))
}
}
test("AlterTable: drop nested column in map value") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points map<string, struct<x: double, y: double>>) " +
s"USING $v2Format")
sql(s"ALTER TABLE $t DROP COLUMN points.value.y")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", MapType(StringType, StructType(Seq(
StructField("x", DoubleType))))))
}
}
test("AlterTable: drop nested column in array element") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int, points array<struct<x: double, y: double>>) USING $v2Format")
sql(s"ALTER TABLE $t DROP COLUMN points.element.y")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.schema === new StructType()
.add("id", IntegerType)
.add("points", ArrayType(StructType(Seq(
StructField("x", DoubleType))))))
}
}
test("AlterTable: drop column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t DROP COLUMN data")
}
assert(exc.getMessage.contains("data"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: nested drop column must exist") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
val exc = intercept[AnalysisException] {
sql(s"ALTER TABLE $t DROP COLUMN point.x")
}
assert(exc.getMessage.contains("point.x"))
assert(exc.getMessage.contains("missing field"))
}
}
test("AlterTable: set location") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t SET LOCATION 's3://bucket/path'")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.properties ===
Map("provider" -> v2Format, "location" -> "s3://bucket/path").asJava)
}
}
test("AlterTable: set table property") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format")
sql(s"ALTER TABLE $t SET TBLPROPERTIES ('test'='34')")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.properties === Map("provider" -> v2Format, "test" -> "34").asJava)
}
}
test("AlterTable: remove table property") {
val t = s"${catalogAndNamespace}table_name"
withTable(t) {
sql(s"CREATE TABLE $t (id int) USING $v2Format TBLPROPERTIES('test' = '34')")
val table = getTableMetadata(t)
assert(table.name === fullTableName(t))
assert(table.properties === Map("provider" -> v2Format, "test" -> "34").asJava)
sql(s"ALTER TABLE $t UNSET TBLPROPERTIES ('test')")
val updated = getTableMetadata(t)
assert(updated.name === fullTableName(t))
assert(updated.properties === Map("provider" -> v2Format).asJava)
}
}
}
| pgandhi999/spark | sql/core/src/test/scala/org/apache/spark/sql/sources/v2/AlterTableTests.scala | Scala | apache-2.0 | 27,949 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package swave.core.impl.util
import scala.annotation.tailrec
import swave.core.macros._
/**
* Intrusive, mutable, single-linked list.
*/
private[swave] abstract class ImsiList[L >: Null <: ImsiList[L]](final var tail: L)
private[swave] object ImsiList {
implicit class ImsiListOps[L >: Null <: ImsiList[L]](private val underlying: L) extends AnyVal {
def isEmpty: Boolean = underlying eq null
def nonEmpty: Boolean = underlying ne null
def size: Int = {
@tailrec def rec(current: L, count: Int): Int =
if (current ne null) rec(current.tail, count + 1) else count
rec(underlying, 0)
}
def last: L = {
@tailrec def rec(last: L, current: L): L =
if (current ne null) rec(current, current.tail) else last
if (nonEmpty) rec(underlying, underlying.tail) else throw new NoSuchElementException("last of empty list")
}
def append(node: L): L =
if (node.nonEmpty) {
if (nonEmpty) {
last.tail = node
underlying
} else node
} else underlying
def reverse: L = {
@tailrec def rec(last: L, current: L): L =
if (current ne null) {
val next = current.tail
current.tail = last
rec(current, next)
} else last
rec(null, underlying)
}
def flatMap[P >: Null <: ImsiList[P]](f: L ⇒ P): P = {
@tailrec def rec(current: L, result: P, resultLast: P): P =
if (current ne null) {
val next = f(current)
if (result ne null) {
resultLast.tail = next
rec(current.tail, result, resultLast.last)
} else rec(current.tail, next, next)
} else result
rec(underlying, null, null)
}
def foreach(f: L ⇒ Unit): Unit = {
@tailrec def rec(current: L): Unit =
if (current ne null) {
f(current)
rec(current.tail)
}
rec(underlying)
}
def foldLeft[T](zero: T)(f: (T, L) ⇒ T): T = {
@tailrec def rec(current: L, acc: T): T =
if (current ne null) rec(current.tail, f(acc, current)) else acc
rec(underlying, zero)
}
/**
* Partitions the list into two disjunct lists.
* The first one contains all nodes that DO satisfy the given predicate
* and the second one all nodes that DO NOT satisfy the predicate.
*/
def partition(f: L ⇒ Boolean): (L, L) = {
@tailrec def rec(current: L, a: L, lastA: L, b: L, lastB: L): (L, L) =
if (current ne null) {
val next = current.tail
current.tail = null
if (f(current)) {
if (lastA ne null) {
lastA.tail = current
rec(next, a, current, b, lastB)
} else rec(next, current, current, b, lastB)
} else {
if (lastB ne null) {
lastB.tail = current
rec(next, a, lastA, b, current)
} else rec(next, a, lastA, current, current)
}
} else (a, b)
rec(underlying, null, null, null, null)
}
/**
* Splits this list after `count` elements and returns the head of the trailing segment.
* The underlying segment then forms a list holding `count` elements.
* Throws an `IllegalArgumentException` if `count == 0 || count > size`.
* (`underlying.isEmpty` requires special treatment in any case!)
*/
def drop(count: Int): L = {
@tailrec def rec(remaining: Int, current: L, last: L): L =
if (remaining == 0) {
requireArg(last ne null)
last.tail = null
current
} else {
requireArg(current ne null)
rec(remaining - 1, current.tail, current)
}
rec(count, underlying, null)
}
/**
* Splits this list at the first element that the given `predicate` returns `true` for
* and returns this element (along with its tail).
* The underlying segment then forms a list holding all dropped elements.
*/
def dropWhile(predicate: L ⇒ Boolean): L = {
@tailrec def rec(current: L, last: L): L =
if (current.nonEmpty) {
if (!predicate(current)) {
if (last ne null) last.tail = null
current
} else rec(current.tail, current)
} else null // return empty list
rec(underlying, null)
}
}
}
| sirthias/swave | core/src/main/scala/swave/core/impl/util/ImsiList.scala | Scala | mpl-2.0 | 4,578 |
package provingground.interface
import provingground._
import translation._
import scala.util.Try
import ujson.{Js, Obj}
import StanfordParser._
import TreeToMath._
import edu.stanford.nlp.trees.Tree
// import org.scalafmt.Scalafmt.format
import scala.util.Try
import scala.concurrent._
// import scala.concurrent.ExecutionContext.Implicits.global
import cask.main.Routes
import cask.util.Logger
object NLPParser{
def parseResult(txt: String): Obj = {
val texParsed: TeXParsed = TeXParsed(txt)
val tree: Tree = texParsed.parsed
val baseExpr: MathExpr = mathExprTree(tree).get
val strictParsed = mathExpr(tree).nonEmpty
def polyExprOpt : Option[MathExpr] =
// None
texParsed.polyParsed.flatMap(mathExpr(_)).headOption
val expr =
if (strictParsed) baseExpr
else {
// texParsed.polyParsed.foreach(t => println(t))
polyExprOpt.getOrElse(baseExpr)
}
def parsed = strictParsed || polyExprOpt.nonEmpty
pprint.log(strictParsed)
pprint.log(parsed)
val proseTree: NlpProse.ProseTree = texParsed.proseTree
// println(proseTree.view)
val code = {
pprint.PPrinter.BlackWhite(expr, height=500)
}
// Try(format(s"object ConstituencyParsed {$expr}").get)
// .getOrElse(s"\\n//could not format:\\n$expr\\n\\n//raw above\\n\\n")
ujson.Obj(
"tree" -> (tree.pennString + "\\n\\n" + pprint.PPrinter
.BlackWhite(FormalExpr.translator(tree), height = 500)),
"expr" -> code.toString,
"parsed" -> ujson.Bool(parsed),
"deptree" -> proseTree.view.replace("\\n", "")
)
}
def parseView(txt: String): Obj = {
val obj = parseResult(txt)
Obj("tree" -> obj("tree"), "expr" -> obj("expr"), "parsed" -> obj("parsed"))
}
}
import NLPParser._
case class ParserRoutes()(implicit cc: castor.Context,
log: cask.Logger) extends cask.Routes {
def log: Logger = new Logger.Console()
@cask.get("/nlp.html")
def nlp() = {
Future(parseResult("Hello World")) // waking up the stanford pipeline
cask.Response(Site.page(mainHTML,
"resources/",
"ProvingGround: Natural language translation",
false),
headers = Seq("Content-Type" -> "text/html")
)
}
@cask.post("/parse")
def parse(request: cask.Request): String = {
val txt = new String(request.readAllBytes())
println(s"parsing: $txt")
ujson.write(parseResult(txt))
}
val mainHTML: String =
"""
| <link rel="stylesheet" href="resources/css/nlp.css">
| <div id="constituency-parser"></div>
|
| <script src="resources/out.js" type="text/javascript" charset="utf-8"></script>
| <script>
| parser.load()
| mantlemenu.add()
| leanlib.load()
| </script>
""".stripMargin
initialize()
}
object ParserCask extends cask.Main {
def allRoutes: Seq[Routes] = Seq(ParserRoutes(), MantleRoutes(), LeanRoutes())
override def port: Int = Try(sys.env("PROVINGGROUND_PORT").toInt).getOrElse(8080)
override def host: String = Try(sys.env("IP")).getOrElse("localhost")
}
| siddhartha-gadgil/ProvingGround | nlp/src/main/scala/provingground/interface/ParserServer.scala | Scala | mit | 3,195 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.message
import java.io._
import java.nio._
import java.nio.channels._
import java.util.concurrent.atomic._
import kafka.utils._
/**
* An on-disk message set. The set can be opened either mutably or immutably. Mutation attempts
* will fail on an immutable message set. An optional limit and offset can be applied to the message set
* which will control the offset into the file and the effective length into the file from which
* messages will be read
*/
@nonthreadsafe
class FileMessageSet private[kafka](private[message] val channel: FileChannel,
private[message] val offset: Long,
private[message] val limit: Long,
val mutable: Boolean,
val needRecover: AtomicBoolean) extends MessageSet with Logging {
private val setSize = new AtomicLong()
private val setHighWaterMark = new AtomicLong()
if(mutable) {
if(limit < Long.MaxValue || offset > 0)
throw new IllegalArgumentException("Attempt to open a mutable message set with a view or offset, which is not allowed.")
if (needRecover.get) {
// set the file position to the end of the file for appending messages
val startMs = System.currentTimeMillis
val truncated = recover()
info("Recovery succeeded in " + (System.currentTimeMillis - startMs) / 1000 +
" seconds. " + truncated + " bytes truncated.")
}
else {
setSize.set(channel.size())
setHighWaterMark.set(sizeInBytes)
channel.position(channel.size)
}
} else {
setSize.set(scala.math.min(channel.size(), limit) - offset)
setHighWaterMark.set(sizeInBytes)
debug("initializing high water mark in immutable mode: " + highWaterMark)
}
/**
* Create a file message set with no limit or offset
*/
def this(channel: FileChannel, mutable: Boolean) =
this(channel, 0, Long.MaxValue, mutable, new AtomicBoolean(false))
/**
* Create a file message set with no limit or offset
*/
def this(file: File, mutable: Boolean) =
this(Utils.openChannel(file, mutable), mutable)
/**
* Create a file message set with no limit or offset
*/
def this(channel: FileChannel, mutable: Boolean, needRecover: AtomicBoolean) =
this(channel, 0, Long.MaxValue, mutable, needRecover)
/**
* Create a file message set with no limit or offset
*/
def this(file: File, mutable: Boolean, needRecover: AtomicBoolean) =
this(Utils.openChannel(file, mutable), mutable, needRecover)
/**
* Return a message set which is a view into this set starting from the given offset and with the given size limit.
*/
def read(readOffset: Long, size: Long): MessageSet = {
new FileMessageSet(channel, this.offset + readOffset, scala.math.min(this.offset + readOffset + size, highWaterMark),
false, new AtomicBoolean(false))
}
/**
* Write some of this set to the given channel, return the ammount written
*/
def writeTo(destChannel: GatheringByteChannel, writeOffset: Long, size: Long): Long =
channel.transferTo(offset + writeOffset, scala.math.min(size, sizeInBytes), destChannel)
/**
* Get an iterator over the messages in the set
*/
override def iterator: Iterator[MessageAndOffset] = {
new IteratorTemplate[MessageAndOffset] {
var location = offset
override def makeNext(): MessageAndOffset = {
// read the size of the item
val sizeBuffer = ByteBuffer.allocate(4)
channel.read(sizeBuffer, location)
if(sizeBuffer.hasRemaining)
return allDone()
sizeBuffer.rewind()
val size: Int = sizeBuffer.getInt()
if (size < Message.MinHeaderSize)
return allDone()
// read the item itself
val buffer = ByteBuffer.allocate(size)
channel.read(buffer, location + 4)
if(buffer.hasRemaining)
return allDone()
buffer.rewind()
// increment the location and return the item
location += size + 4
new MessageAndOffset(new Message(buffer), location)
}
}
}
/**
* The number of bytes taken up by this file set
*/
def sizeInBytes(): Long = setSize.get()
/**
* The high water mark
*/
def highWaterMark(): Long = setHighWaterMark.get()
def checkMutable(): Unit = {
if(!mutable)
throw new IllegalStateException("Attempt to invoke mutation on immutable message set.")
}
/**
* Append this message to the message set
*/
def append(messages: MessageSet): Unit = {
checkMutable()
var written = 0L
while(written < messages.sizeInBytes)
written += messages.writeTo(channel, 0, messages.sizeInBytes)
setSize.getAndAdd(written)
}
/**
* Commit all written data to the physical disk
*/
def flush() = {
checkMutable()
val startTime = SystemTime.milliseconds
channel.force(true)
val elapsedTime = SystemTime.milliseconds - startTime
LogFlushStats.recordFlushRequest(elapsedTime)
debug("flush time " + elapsedTime)
setHighWaterMark.set(sizeInBytes)
debug("flush high water mark:" + highWaterMark)
}
/**
* Close this message set
*/
def close() = {
if(mutable)
flush()
channel.close()
}
/**
* Recover log up to the last complete entry. Truncate off any bytes from any incomplete messages written
*/
def recover(): Long = {
checkMutable()
val len = channel.size
val buffer = ByteBuffer.allocate(4)
var validUpTo: Long = 0
var next = 0L
do {
next = validateMessage(channel, validUpTo, len, buffer)
if(next >= 0)
validUpTo = next
} while(next >= 0)
channel.truncate(validUpTo)
setSize.set(validUpTo)
setHighWaterMark.set(validUpTo)
info("recover high water mark:" + highWaterMark)
/* This should not be necessary, but fixes bug 6191269 on some OSs. */
channel.position(validUpTo)
needRecover.set(false)
len - validUpTo
}
/**
* Read, validate, and discard a single message, returning the next valid offset, and
* the message being validated
*/
private def validateMessage(channel: FileChannel, start: Long, len: Long, buffer: ByteBuffer): Long = {
buffer.rewind()
var read = channel.read(buffer, start)
if(read < 4)
return -1
// check that we have sufficient bytes left in the file
val size = buffer.getInt(0)
if (size < Message.MinHeaderSize)
return -1
val next = start + 4 + size
if(next > len)
return -1
// read the message
val messageBuffer = ByteBuffer.allocate(size)
var curr = start + 4
while(messageBuffer.hasRemaining) {
read = channel.read(messageBuffer, curr)
if(read < 0)
throw new IllegalStateException("File size changed during recovery!")
else
curr += read
}
messageBuffer.rewind()
val message = new Message(messageBuffer)
if(!message.isValid)
return -1
else
next
}
}
trait LogFlushStatsMBean {
def getFlushesPerSecond: Double
def getAvgFlushMs: Double
def getTotalFlushMs: Long
def getMaxFlushMs: Double
def getNumFlushes: Long
}
@threadsafe
class LogFlushStats extends LogFlushStatsMBean {
private val flushRequestStats = new SnapshotStats
def recordFlushRequest(requestMs: Long) = flushRequestStats.recordRequestMetric(requestMs)
def getFlushesPerSecond: Double = flushRequestStats.getRequestsPerSecond
def getAvgFlushMs: Double = flushRequestStats.getAvgMetric
def getTotalFlushMs: Long = flushRequestStats.getTotalMetric
def getMaxFlushMs: Double = flushRequestStats.getMaxMetric
def getNumFlushes: Long = flushRequestStats.getNumRequests
}
object LogFlushStats extends Logging {
private val LogFlushStatsMBeanName = "kafka:type=kafka.LogFlushStats"
private val stats = new LogFlushStats
Utils.registerMBean(stats, LogFlushStatsMBeanName)
def recordFlushRequest(requestMs: Long) = stats.recordFlushRequest(requestMs)
}
| piavlo/operations-debs-kafka | core/src/main/scala/kafka/message/FileMessageSet.scala | Scala | apache-2.0 | 8,927 |
package net.mrkeks.clave.game.characters
import org.denigma.threejs.Vector3
import net.mrkeks.clave.game.PositionedObjectData
object MonsterData {
abstract sealed class State
case class Idle(var strollCoolDown: Double = 3000.0) extends State
case class MoveTo(tar: Vector3) extends State
case class ChargeJumpTo(tar: Vector3, var progress: Double = 0.0) extends State
case class JumpTo(tar: Vector3, from: Vector3, var ySpeed: Double = 0.0) extends State
case class PushedTo(tar: Vector3, var ySpeed: Double = 0.0) extends State
}
trait MonsterData
extends PositionedObjectData {
import MonsterData._
var state: State = Idle()
} | benkeks/clave | src/main/scala/net/mrkeks/clave/game/characters/MonsterData.scala | Scala | gpl-3.0 | 658 |
package pro.civitaspo.embulk.filter.copy
import java.util.{Optional, List => JList}
import org.embulk.config.{
Config,
ConfigDefault,
ConfigDiff,
ConfigException,
ConfigSource,
TaskSource,
Task => EmbulkTask
}
import org.embulk.exec.TransactionStage.{
EXECUTOR_BEGIN,
EXECUTOR_COMMIT,
FILTER_BEGIN,
FILTER_COMMIT,
INPUT_BEGIN,
INPUT_COMMIT,
OUTPUT_BEGIN,
OUTPUT_COMMIT,
RUN
}
import org.embulk.plugin.PluginType
import org.embulk.spi.{
Exec,
ExecutorPlugin,
FileOutputRunner,
FilterPlugin,
InputPlugin,
OutputPlugin,
Page,
ProcessState,
ProcessTask,
Schema
}
import org.embulk.spi.util.Filters
import org.slf4j.{Logger, LoggerFactory}
import pro.civitaspo.embulk.filter.copy.plugin.{
ReuseInputExecutorPlugin,
PipeInputPlugin
}
import scala.util.chaining._
object BreakinBulkLoader {
trait Task extends EmbulkTask {
@Config("name")
@ConfigDefault("null")
def getName: Optional[String]
@Config("exec")
@ConfigDefault("{}")
def getExec: ConfigSource
@Config("filters")
@ConfigDefault("[]")
def getFilters: JList[ConfigSource]
@Config("out")
def getOut: ConfigSource
// NOTE: When embulk is run as a server or using an union plugin inside
// another union plugin, the bulk loads that have the same
// loaderName cannot run twice or more because LoaderState is shared.
// So, the transaction id is used to distinguish the bulk loads.
def setTransactionId(execId: String): Unit
def getTransactionId: String
}
case class Result(
configDiff: ConfigDiff,
ignoredExceptions: Seq[Throwable]
)
}
case class BreakinBulkLoader(task: BreakinBulkLoader.Task, idx: Int) {
import implicits._
private val logger: Logger =
LoggerFactory.getLogger(classOf[BreakinBulkLoader])
private lazy val state: LoaderState = LoaderState.getOrInitialize(loaderName)
private lazy val loaderName: String =
s"transaction[${task.getTransactionId}]:copy[$idx]:" + task.getName
.getOrElse {
s"filters[${filterPluginTypes.map(_.getName).mkString(",")}]" +
s".out[${outputPluginType.getName}]"
}
private lazy val inputTask: ConfigSource =
Exec.newConfigSource().set("name", loaderName)
private lazy val inputTaskCount: Int =
executorTask.get(classOf[Int], "max_threads")
private lazy val executorTask: ConfigSource = task.getExec.tap(c =>
c.set(
"max_threads",
c.get(classOf[Int], "max_threads", Utils.getNumberOfCores * 2)
)
)
private lazy val filterTasks: Seq[ConfigSource] = task.getFilters
private lazy val filterPluginTypes: Seq[PluginType] =
Filters.getPluginTypes(filterTasks)
private lazy val filterPlugins: Seq[FilterPlugin] =
Filters.newFilterPlugins(Exec.session(), filterPluginTypes)
private lazy val outputTask: ConfigSource = task.getOut
private lazy val outputPluginType: PluginType =
outputTask.get(classOf[PluginType], "type")
private lazy val outputPlugin: OutputPlugin =
Exec.newPlugin(classOf[OutputPlugin], outputPluginType)
def run(schema: Schema): Unit = {
ThreadNameContext.switch(loaderName) { _ =>
val inputPlugin: InputPlugin =
PipeInputPlugin(schema, inputTaskCount, state.consumePages)
val executorPlugin: ExecutorPlugin = ReuseInputExecutorPlugin(inputPlugin)
try {
runInput(inputPlugin) {
runFilters {
runExecutor(executorPlugin) { executor =>
runOutput {
execute(executor)
}
}
}
}
}
catch {
case ex: Throwable
if state.isAllTasksCommitted && state.isAllTransactionsCommitted =>
logger.warn(
s"Threw exception on the stage: ${state.getTransactionStage.getOrElse("None")}," +
s" but all tasks and transactions are committed.",
ex
)
}
}
}
def sendPage(page: Page): Unit = ThreadNameContext.switch(loaderName) { _ =>
state.sendPage(page)
}
def cleanup(): Unit = {
ThreadNameContext.switch(loaderName) { _ =>
state.sendSentinels()
val outputTaskSource: TaskSource = outputPlugin match {
case _: FileOutputRunner =>
FileOutputRunner.getFileOutputTaskSource(
state.getOutputTaskSource.get
)
case _ => state.getOutputTaskSource.get
}
outputPlugin.cleanup(
outputTaskSource,
state.getExecutorSchema.get,
state.getOutputTaskCount.get,
state.getOutputTaskReports.flatten
)
state.cleanup()
}
}
def getResult: BreakinBulkLoader.Result =
ThreadNameContext.switch(loaderName) { _ => buildResult() }
private def lastFilterSchema: Schema =
state.getFilterSchemas.map(_.last).getOrElse {
throw new ConfigException(
"'filterSchemas' must be set. Call #runFilters before."
)
}
private def buildResult(): BreakinBulkLoader.Result = {
BreakinBulkLoader.Result(
configDiff = Exec.newConfigDiff().tap { configDiff: ConfigDiff =>
state.getInputConfigDiff.foreach(configDiff.setNested("in", _))
// NOTE: BreakinBulkLoader does not support PipeOutputPlugin configuration.
// state.getOutputConfigDiff.foreach(configDiff.setNested("out", _))
},
ignoredExceptions = state.getExceptions
)
}
private def newProcessTask: ProcessTask = {
new ProcessTask(
null,
outputPluginType,
filterPluginTypes,
state.getInputTaskSource.get,
state.getOutputTaskSource.get,
state.getFilterTaskSources.get,
state.getFilterSchemas.get,
state.getExecutorSchema.get,
Exec.newTaskSource()
)
}
// scalafmt: { maxColumn = 130 }
private def runInput(inputPlugin: InputPlugin)(f: => Unit): Unit = {
state.setTransactionStage(INPUT_BEGIN)
val inputControl: InputPlugin.Control =
(inputTaskSource: TaskSource, inputSchema: Schema, inputTaskCount: Int) => {
state.setInputSchema(inputSchema)
state.setInputTaskSource(inputTaskSource)
state.setInputTaskCount(inputTaskCount)
f
state.setTransactionStage(INPUT_COMMIT)
state.getAllInputTaskReports
}
val inputConfigDiff: ConfigDiff = inputPlugin.transaction(inputTask, inputControl)
state.setInputConfigDiff(inputConfigDiff)
}
private def runFilters(f: => Unit): Unit = {
val inputSchema: Schema = state.getInputSchema.getOrElse {
throw new ConfigException("'inputSchema' must be set. Call #runInput before.")
}
state.setTransactionStage(FILTER_BEGIN)
val filtersControl: Filters.Control =
(filterTaskSources: JList[TaskSource], filterSchemas: JList[Schema]) => {
state.setFilterTaskSources(filterTaskSources)
state.setFilterSchemas(filterSchemas)
f
state.setTransactionStage(FILTER_COMMIT)
}
Filters.transaction(filterPlugins, filterTasks, inputSchema, filtersControl)
}
private def runExecutor(executorPlugin: ExecutorPlugin)(f: ExecutorPlugin.Executor => Unit): Unit = {
val inputTaskCount: Int = state.getInputTaskCount.getOrElse {
throw new ConfigException("'inputTaskCount' must be set. Call #runInput before.")
}
state.setTransactionStage(EXECUTOR_BEGIN)
val executorControl: ExecutorPlugin.Control =
(executorSchema: Schema, outputTaskCount: Int, executor: ExecutorPlugin.Executor) => {
state.setExecutorSchema(executorSchema)
state.setOutputTaskCount(outputTaskCount)
f(executor)
state.setTransactionStage(EXECUTOR_COMMIT)
}
executorPlugin.transaction(executorTask, lastFilterSchema, inputTaskCount, executorControl)
}
private def runOutput(f: => Unit): Unit = {
val executorSchema: Schema = state.getExecutorSchema.getOrElse {
throw new ConfigException("'executorSchema' must be set. Call #runExecutor before.")
}
val outputTaskCount: Int = state.getOutputTaskCount.getOrElse {
throw new ConfigException("'outputTaskCount' must be set. Call #runExecutor before.")
}
state.setTransactionStage(OUTPUT_BEGIN)
val outputControl: OutputPlugin.Control =
(outputTaskSource: TaskSource) => {
state.setOutputTaskSource(outputTaskSource)
f
state.setTransactionStage(OUTPUT_COMMIT)
state.getAllOutputTaskReports
}
val outputConfigDiff: ConfigDiff =
outputPlugin.transaction(outputTask, executorSchema, outputTaskCount, outputControl)
state.setOutputConfigDiff(outputConfigDiff)
}
private def execute(executor: ExecutorPlugin.Executor): Unit = {
val processState: ProcessState = state.newProcessState
processState.initialize(state.getInputTaskCount.get, state.getOutputTaskCount.get)
state.setTransactionStage(RUN)
if (!state.isAllTasksCommitted) {
executor.execute(newProcessTask, processState)
if (!state.isAllTasksCommitted) throw state.buildRepresentativeException
}
if (!state.isAllTasksCommitted) {
throw new RuntimeException(
s"${state.countUncommittedInputTasks} input tasks" +
s" and ${state.countUncommittedOutputTasks} output tasks failed."
)
}
}
// scalafmt: { maxColumn = 80 }
}
| civitaspo/embulk-filter-copy | src/main/scala/pro/civitaspo/embulk/filter/copy/BreakinBulkLoader.scala | Scala | mit | 9,301 |
extension (name: String)
def isWildcard = ???
end extension
extension (name: String)
def f = ???
end extension
| dotty-staging/dotty | tests/pos/i8241.scala | Scala | apache-2.0 | 116 |
package org.plummtw.jinrou.data
import scala.xml._
import net.liftweb._
import net.liftweb.mapper._
import http._
import js._
import util._
import S._
import SHtml._
import Helpers._
import org.plummtw.jinrou.model._
import org.plummtw.jinrou.enum._
import org.plummtw.jinrou.util._
trait ItemOption {
def option_map : scala.collection.immutable.Map[String,String]
}
class ItemData (action: MTypeEnum.Value, str: String, name: String, targetable_boolean: Boolean, weight_no: Int)
extends ActionData(action, str, name, targetable_boolean) {
def weight = weight_no
override def toString(): String = "【" + tag_string + "】"
def item_intro(room:Room, room_day:RoomDay, user: UserEntry, user_entrys: List[UserEntry]) : NodeSeq = Seq() //""
def item_pic : NodeSeq = NodeSeq.Empty
// 產生 Item Tag
def generate_action_tag(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry], vote_list:List[ItemVote]) : NodeSeq = {
if (room.status.is == RoomStatusEnum.ENDED.toString)
return Seq(<span></span>)
else if (!user.live.is)
return Seq(<span></span>)
val is_voted = (vote_list.filter(_.actioner_id.is == user.id.is).length != 0)
if (is_voted)
return Seq(<span></span>)
val user_item = ItemEnum.get_item(user.item_flags.is)
if (user_item == ItemNoItem)
return Seq(<span></span>)
if (user_item.targetable)
return Seq(<a href={"up_action.html?room_no=" + room.id.is.toString + "&command=" + user_item.command_name}>{this.toString}</a>)
return Seq(<a href={"javascript:submit_action('" + user_item.command_name + "')"}>{this.toString}</a>)
}
}
object ItemNoItem extends ItemData(MTypeEnum.ITEM_NO_ITEM, "無道具", "item_no_item", false, 0) {
}
object ItemUnluckyPurse extends ItemData(MTypeEnum.ITEM_UNLUCKY_PURSE, "不運錢包", "item_unlucky_purse", true, 8) {
override def item_pic = Seq(<img src="icon/UP.gif" />)
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ItemBlessStaff extends ItemData(MTypeEnum.ITEM_BLESS_STAFF, "祝福之杖", "item_bless_staff", true, 7) {
override def item_pic = Seq(<img src="icon/BS.gif" />)
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.live.is))
}
}
object ItemBlackFeather extends ItemData(MTypeEnum.ITEM_BLACK_FEATHER, "咒縛黑羽", "item_black_feather", true, 6) {
override def item_pic = Seq(<img src="icon/BF.gif" />)
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ItemThiefSecret extends ItemData(MTypeEnum.ITEM_THIEF_SECRET, "盜賊極意", "item_thief_secret", true, 5) {
override def item_pic = Seq(<img src="icon/TS.gif" />)
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ItemVentriloquist extends ItemData(MTypeEnum.ITEM_VENTRILOQUIST, "腹語娃娃!", "item_ventriloquist", false, 5) {
override def item_pic = Seq(<img src="icon/VE.gif" />)
}
object ItemDMessageSeal extends ItemData(MTypeEnum.ITEM_DMESSAGE_SEAL, "封印遺書", "item_dmessage_seal", true, 4) {
override def item_pic = Seq(<img src="icon/DS.gif" />)
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ItemMirrorShield extends ItemData(MTypeEnum.ITEM_MIRROR_SHIELD, "鏡盾捲軸!", "item_mirror_shield", false, 4) {
override def item_pic = Seq(<img src="icon/MS.gif" />)
}
object ItemShamanCrown extends ItemData(MTypeEnum.ITEM_SHAMAN_CROWN, "薩滿冕冠", "item_shaman_crown", true, 3) {
override def item_pic = Seq(<img src="icon/SC.gif" />)
override def item_intro(room:Room, room_day:RoomDay, user: UserEntry, user_entrys: List[UserEntry]) = {
val system_message = SystemMessage.findAll(By(SystemMessage.roomday_id, room_day.id.is),
By(SystemMessage.actioner_id, user.id.is),
By(SystemMessage.mtype, MTypeEnum.ITEM_SHAMAN_CROWN.toString))
val result_augure : NodeSeq =
if (system_message.length != 0) {
val actionee = user_entrys.filter(_.id.is == system_message(0).actionee_id.is)(0)
val actionee_role = RoleEnum.get_role(actionee.role.is.substring(0,1))
val actionee_role_node =
if (actionee_role.role_side == RoomVictoryEnum.VILLAGER_WIN)
actionee_role.role_pic //actionee_role.toString
else
<img src="images/role_result_inhuman.gif" /> // "非人側"
// <img src="images/yes.gif" />
//actionee.handle_name.is + "是" + actionee_role_str + "(" +
//// SubroleEnum.get_subrole(actionee.subrole.is).toString + ")"
Seq(<span>{actionee.handle_name.is}</span>,
<img src="images/yes.gif" />,
<img src="images/parenthesis_left.gif" />,
actionee_role_node,
<img src="images/parenthesis_right.gif" />)
} else NodeSeq.Empty // ""
result_augure
}
}
object ItemWeatherRod extends ItemData(MTypeEnum.ITEM_WEATHER_ROD, "天候棒!", "item_weather_rod", false, 3) with ItemOption {
override def item_pic = Seq(<img src="icon/WR.gif" />)
override def option_map = scala.collection.immutable.TreeMap[String,String](
WeatherEnum.SUNNY.toString -> "晴",
WeatherEnum.CLOUDY.toString -> "陰",
WeatherEnum.RAINY.toString -> "雨",
WeatherEnum.SNOWY.toString -> "雪",
WeatherEnum.MISTY.toString -> "霧"
)
}
object ItemDeathNote extends ItemData(MTypeEnum.ITEM_DEATH_NOTE, "死亡筆記", "item_death_note", true, 2) {
override def item_pic = Seq(<img src="icon/DN.gif" />)
override def targetable_users(room:Room, room_day:RoomDay, user:UserEntry, user_entrys:List[UserEntry]) : List[UserEntry] = {
val result = user_entrys.filter(x=>(x.uname.is != "dummy_boy") && (x.id.is != user.id.is) && (x.live.is))
if ((user.has_flag(UserEntryFlagEnum.RELIGION)) ||
(user.subrole.is == SubroleEnum.SUBPONTIFF.toString))
result.filter(x=>x.hasnt_flag(UserEntryFlagEnum.PONTIFF_AURA))
else
result
}
}
object ItemPandoraBox extends ItemData(MTypeEnum.ITEM_PANDORA_BOX, "潘朵拉箱!", "item_pandora_box", false, 2) {
override def item_pic = Seq(<img src="icon/PB.gif" />)
}
object ItemCubicArrow extends ItemData(MTypeEnum.ITEM_CUBIC_ARROW, "邱比特之箭", "item_cubic_arrow", true, 2) {
override def item_pic = Seq(<img src="icon/CA.gif" />)
}
object ItemPopulationCensus extends ItemData(MTypeEnum.ITEM_POPULATION_CENSUS, "人口普查!", "item_population_census", false, 1) {
override def item_pic = Seq(<img src="icon/PC.gif" />)
override def item_intro(room:Room, room_day:RoomDay, user: UserEntry, user_entrys: List[UserEntry]) = {
val system_message = SystemMessage.findAll(By(SystemMessage.roomday_id, room_day.id.is),
By(SystemMessage.actioner_id, user.id.is),
By(SystemMessage.mtype, MTypeEnum.ITEM_POPULATION_CENSUS.toString))
val result_census : NodeSeq =
if (system_message.length != 0) {
val live_users = user_entrys.filter(_.live.is)
val role_list = RoleEnum.ROLE_MAP.keys.toList.filter(_ != RoleNone)
//var role_text = new StringBuffer("")
var role_seq : NodeSeq = Seq()
role_list.foreach { role =>
var role_number = live_users.filter(_.current_role == role).length
if (role_number > 0) {
//role_text.append(" ")
//role_text.append(RoleEnum.get_role(role).role_name)
role_seq ++= RoleEnum.get_role(role).role_pic
//role_text.append(" ")
//role_text.append(role_number.toString)
role_seq ++= <span>{role_number.toString}</span>
}
}
//role_text.toString
role_seq
} else NodeSeq.Empty // ""
result_census
}
} | Plummtw/jinrou_Lift | src/main/scala/org/plummtw/jinrou/data/ItemData.scala | Scala | apache-2.0 | 9,703 |
/*
* Copyright (C) 2012 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.core.workflow.mole
import org.openmole.core.context.{ Val, Variable }
import org.openmole.core.workflow.dsl._
import org.openmole.core.workflow.sampling._
import org.openmole.core.workflow.task._
import org.openmole.core.workflow.test.TestTask
import org.scalatest._
class MasterCapsuleSpec extends FlatSpec with Matchers {
import org.openmole.core.workflow.test.Stubs._
"A master capsule" should "execute tasks" in {
@volatile var testExecuted = false
val p = Val[String]("p")
val t1 = TestTask { _ + (p → "Test") } set (outputs += p)
val t2 = TestTask { context ⇒
context(p) should equal("Test")
testExecuted = true
context
} set (inputs += p)
val ex = Master(t1) -- Master(t2)
ex.run
testExecuted should equal(true)
}
"A master capsule" should "keep value of a variable from on execution to another" in {
val data = List("A", "A", "B", "C")
val i = Val[String]("i")
val n = Val[Int]("n")
val emptyT = EmptyTask() set ((inputs, outputs) += i)
val select = TestTask { context ⇒
val nVal = context(n)
context + Variable(n, nVal + 1) + Variable(i, (nVal + 1).toString)
} set (
(inputs, outputs) += (n, i),
n := 0
)
val ex = ExplicitSampling(i, data) -< emptyT -- (Master(select, n) -- Slot(emptyT) when "n <= 100")
ex.run
}
"A end of exploration transition" should "end the master slave process" in {
@volatile var selectTaskExecuted = 0
@volatile var endCapsExecuted = 0
val i = Val[Int]("i")
val archive = Val[Array[Int]]("archive")
val sampling = ExplicitSampling(i, 0 until 10)
val exploration = ExplorationTask(sampling)
val model =
EmptyTask() set (
inputs += i,
outputs += i
)
val select = TestTask { context ⇒
assert(context.contains(archive))
selectTaskExecuted += 1
context + Variable(archive, (context(i) :: context(archive).toList) toArray)
} set (
(inputs, outputs) += (archive, i),
archive := Array.empty[Int]
)
val finalTask = TestTask { context ⇒
assert(context.contains(archive))
assert(context(archive).size >= 10 && context(archive).size < 21)
endCapsExecuted += 1
context
} set (
inputs += archive
)
val ex =
exploration -< model -- Master(select, archive) &
select -- Slot(model) &
(select >| finalTask when "archive.size >= 10")
ex.run
endCapsExecuted should equal(1)
}
"A master capsule" should "work with mole tasks" in {
val i = Val[Int]
val t1 = EmptyTask() set ((inputs, outputs) += i)
val mt = MoleTask(t1)
val ex = ExplicitSampling(i, 0 to 100) -< Master(mt, i)
ex.run
}
}
| openmole/openmole | openmole/core/org.openmole.core.workflow/src/test/scala/org/openmole/core/workflow/mole/MasterCapsuleSpec.scala | Scala | agpl-3.0 | 3,475 |
package ru.listok.test
import ru.listok._
import org.scalatest.FunSuite
class ParserTest extends FunSuite {
import Util.toLlist
def parse1(text: String) = {
Parser.read(text) match {
case Right(s) =>
if (s.isEmpty)
Lnil
else {
if (s.length != 1)
fail("a list with more than one elements: " + s)
s.head
}
case Left(m) => fail(m)
}
}
def parsefail(text: String) = {
Parser.read(text) match {
case Right(s) => fail("parse should fail = " + Util.pp(s))
case Left(m) =>
}
}
test ("nil") {
expect(Lnil){parse1("nil")}
}
test ("t") {
expect(Ltrue)(parse1("t"))
}
test ("char") {
expect(Lchar('a')) {parse1("""#\\a""")}
expect(Lchar('W')) {parse1("""#\\W""")}
expect(Lchar('Ы')) {parse1("""#\\Ы""")}
expect(Lchar('λ')) {parse1("""#\\λ""")}
expect(Lchar('-')) {parse1("""#\\-""")}
expect(Lchar('+')) {parse1("""#\\+""")}
expect(Lchar('@')) {parse1("""#\\@""")}
expect(Lchar('?')) {parse1("""#\\?""")}
expect(Lchar('#')) {parse1("""#\\#""")}
expect(Lchar('$')) {parse1("""#\\$""")}
expect(Lchar('~')) {parse1("""#\\~""")}
expect(Lchar('/')) {parse1("""#\\/""")}
expect(Lchar('|')) {parse1("""#\\|""")}
expect(Lchar(',')) {parse1("""#\\,""")}
expect(Lchar('\\'')) {parse1("""#\\'""")}
expect(Lchar('`')) {parse1("""#\\`""")}
expect(Lchar('(')) {parse1("""#\\(""")}
expect(Lchar(')')) {parse1("""#\\)""")}
expect(Lchar('\\\\')) {parse1("""#\\\\""")}
expect(Lchar('1')) {parse1("""#\\1""")}
expect(Lchar('0')) {parse1("""#\\0""")}
expect(Lchar('\\n')) {parse1("""#\\Newline""")}
expect(Lchar(' ')) {parse1("""#\\Space""")}
expect(Lchar('\\r')) {parse1("""#\\Return""")}
expect(Lchar('\\t')) {parse1("""#\\Tab""")}
expect(Lchar('\\f')) {parse1("""#\\Page""")}
}
test ("int") {
expect(Lint(0)) {parse1("0")}
expect(Lint(42)) {parse1("42")}
expect(Lint(-42)) {parse1("-42")}
expect(Lint(2147483647)) {parse1("2147483647")}
expect(Lint(-2147483648)) {parse1("-2147483648")}
}
test ("float") {
expect(Lfloat(0.0)) {parse1("0.0")}
expect(Lfloat(42.42)) {parse1("42.42")}
expect(Lfloat(-42.42)) {parse1("-42.42")}
expect(Lfloat(42.0)) {parse1("42.0")}
expect(Lfloat(-42.0)) {parse1("-42.0")}
expect(Lfloat(4200.0)) {parse1("42e2")}
expect(Lfloat(-4200.0)) {parse1("-42e2")}
}
test ("long") {
expect(Llong(2147483648l)) {parse1("2147483648")}
expect(Llong(-2147483649l)) {parse1("-2147483649")}
expect(Llong(Long.MaxValue)) {parse1("9223372036854775807")}
expect(Llong(Long.MinValue)) {parse1("-9223372036854775808")}
}
test ("bignum") {
expect(Lbignum(BigInt("9223372036854775808"))) {parse1("9223372036854775808")}
expect(Lbignum(BigInt("-9223372036854775809"))) {parse1("-9223372036854775809")}
}
test ("string") {
expect(Lstring("")) {parse1("\\"\\"")}
expect(Lstring("hello")) {parse1("\\"hello\\"")}
expect(Lstring("http://google.com")) {parse1(""""http://google.com"""")}
}
test ("symbol") {
expect(Lsymbol('foo1)){parse1("foo1")}
expect(Lsymbol(Symbol("-"))){parse1("-")}
expect(Lsymbol(Symbol("*"))){parse1("*")}
expect(Lsymbol(Symbol("+"))){parse1("+")}
expect(Lsymbol(Symbol("/"))){parse1("/")}
expect(Lsymbol(Symbol("="))){parse1("=")}
expect(Lsymbol(Symbol("<"))){parse1("<")}
expect(Lsymbol(Symbol(">"))){parse1(">")}
expect(Lsymbol(Symbol("?"))){parse1("?")}
expect(Lsymbol(Symbol("_"))){parse1("_")}
expect(Lsymbol(Symbol("~"))){parse1("~")}
expect(Lsymbol(Symbol("%"))){parse1("%")}
expect(Lsymbol(Symbol("!"))){parse1("!")}
expect(Lsymbol(Symbol("^"))){parse1("^")}
expect(Lsymbol(Symbol("&"))){parse1("&")}
expect(Lsymbol(Symbol("1foo"))){parse1("1foo")}
expect(Lsymbol(Symbol("1foo:bar"))){parse1("1foo:bar")}
expect(Lsymbol(Symbol("+bar+"))){parse1("+bar+")}
expect(Lsymbol(Symbol("*bar*"))){parse1("*bar*")}
expect(Lsymbol(Symbol("foo#"))){parse1("foo#")}
}
test ("list") {
expect(Llist(Nil)) {parse1("()") }
expect(toLlist(Lsymbol('foo), "bar", 1, Nil, true, '\\n', Lkeyword('bar)) ) {
parse1("(foo \\"bar\\" 1 nil t #\\\\Newline :bar)") }
expect(toLlist(List(1, 2), List(3,4,List(true)))) {
parse1("((1 2) (3 4 (t)))") }
}
test ("quote") {
expect(Lquote(Lint(123))) { parse1("(quote 123)") }
expect(Lquote(Lsymbol('abc))) { parse1("(quote abc)") }
expect(Lquote(Lchar('a'))) { parse1("(quote #\\\\a)") }
expect(Lquote(Ltrue)) { parse1("(quote t)") }
expect(Lquote(Lint(123))) { parse1("'123") }
expect(Lquote(Lsymbol('abc))) { parse1("'abc") }
expect(Lquote(Lchar('a'))) { parse1("'#\\\\a") }
expect(Lquote(Ltrue)) { parse1("'t") }
expect(Lquote(LL(Lint(1), Lint(2)))) {parse1("'(1 2)") }
}
test ("lambda") {
expect(
Llambda(
List(Lsymbol('x), Lsymbol('y)),
List(LL(Lsymbol('+), Lint(1), Lsymbol('x), Lsymbol('y)))))
{ parse1("(lambda (x y) (+ 1 x y))") }
}
test ("greek lambda") {
expect(
Llambda(
List(Lsymbol('x), Lsymbol('y)),
List(LL(Lsymbol('+), Lint(1), Lsymbol('x), Lsymbol('y)))))
{ parse1("(λ (x y) (+ 1 x y))") }
}
test ("keyword") {
expect(Lkeyword('abc)) {parse1(":abc")}
expect(Lkeyword(Symbol("1"))) {parse1(":1")}
}
test("defmacro") {
expect(Ldefmacro(
'test,
List(Lsymbol('b)),
List(Lmacrobackquote(LL(Lsymbol('list), Lmacrocomma(Lsymbol('b)))))))
{parse1("""(defmacro test (b) `(list ,b))""")}
}
test ("sform") {
def expect(name: Symbol, args: List[Lcommon])(r: Lcommon) = r match {
case Llist(l) =>
l.head match {
case sf: Lsform if sf.name == name =>
if (l.tail != args)
fail("fail parse sform (invalid args): " + args.toList)
case _ => fail("fail parse sform (invalid sform): " + r.pp)
}
case _ => fail("fail parse sform: " + r.pp)
}
expect('def, List(Lsymbol('x))){parse1("(def x)")}
expect('def, List(Lsymbol('x), Lint(1)))(parse1("(def x 1)"))
expect('def, List(Lsymbol('def), Lint(1)))(parse1("(def def 1)"))
expect('def, List(Lsymbol('defx), Lint(1)))(parse1("(def defx 1)"))
expect('defconstant, List(Lsymbol('x)))(parse1("(defconstant x)"))
expect('defconstant, List(Lsymbol('x), Lnil))(parse1("(defconstant x nil)"))
expect('defun, List(Lsymbol('foo), LL(Lsymbol('x)), Lsymbol('x)))(parse1("(defun foo (x) x)"))
expect('if, List(Lsymbol('x)))(parse1("(if x)"))
expect('if, List(Lsymbol('x), Lnil))(parse1("(if x nil)"))
expect('cond, List(Lsymbol('x)))(parse1("(cond x)"))
expect('do, List(Lsymbol('x)))(parse1("(do x)"))
expect('and, List(Lsymbol('x)))(parse1("(and x)"))
expect('or, List(Lsymbol('x)))(parse1("(or x)"))
expect('setf, List(Lsymbol('setf)))(parse1("(setf setf)"))
expect('spawn, List(Lsymbol('worker), Llist(Nil)))(parse1("(spawn worker ())"))
expect('match, List(Lsymbol('x)))(parse1("(match x)"))
}
test("regex") {
expect("abc"){Parser.parseRegex("#/abc/").regex}
expect("abc/"){Parser.parseRegex("""#/abc\\//""").regex}
expect("///"){Parser.parseRegex("""#/\\/\\/\\//""").regex}
expect("""(\\b[0-9]+\\.([0-9]+\\b)?|\\.[0-9]+\\b)""")
{Parser.parseRegex("""#/(\\b[0-9]+\\.([0-9]+\\b)?|\\.[0-9]+\\b)/""").regex}
expect("""\\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}\\b""")
{Parser.parseRegex("""#/\\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,4}\\b/""").regex}
expect(LL(
Lsymbol('list), Lint(1), Lsymbol(Symbol("/")), Lkeyword(Symbol("x#")), Lstring("#//"), Lregex("(abc)")))
{parse1("""(list 1 / :x# "#//" #/(abc)/)""")}
}
test("spliced comma") {
expect(Lmacrobackquote(LL(
Lint(1),
Lmacrocomma(Lsymbol('a), true),
Lmacrocomma(Lsymbol('b), false),
Lint(2))))
{parse1("`(1 ,@a ,b 2)")}
}
test ("invalid char") {
parsefail("#\\\\ab")
parsefail("#\\\\ a")
}
test ("must fail") {
parsefail(")")
}
test("escape") {
expect(Lstring("\\n\\b\\f\\r\\t")) {parse1(""""\\n\\b\\f\\r\\t"""")}
expect(Lstring("\\033")) {parse1(""""\\e"""")}
}
test("symbols and sforms") {
expect(Lsymbol(Symbol("match-all"))) {parse1("match-all")}
expect(Lsymbol(Symbol("match/all"))) {parse1("match/all")}
expect(Lsymbol(Symbol("matchall"))) {parse1("matchall")}
expect(Lsymbol(Symbol("do-1"))) {parse1("do-1")}
expect(Lsymbol(Symbol("do/1"))) {parse1("do/1")}
expect(Lsymbol(Symbol("do1"))) {parse1("do1")}
expect(LL(Lsymbol(Symbol("match-all")))) {(parse1("(match-all)"))}
expect(LL(Lsymbol(Symbol("match/all")))) {parse1("(match/all)")}
expect(LL(Lsymbol(Symbol("matchall")))) {parse1("(matchall)")}
expect(LL(Lsymbol(Symbol("do-1")))) {parse1("(do-1)")}
expect(LL(Lsymbol(Symbol("do/1")))) {parse1("(do/1)")}
expect(LL(Lsymbol(Symbol("do1")))) {parse1("(do1)")}
expect(LL(Lsymbol(Symbol("all-do")))) {parse1("(all-do)")}
expect(LL(Lsymbol(Symbol("dodo")))) {parse1("(dodo)")}
expect(LL(Lsymbol(Symbol("defmarco-all")))) {parse1("(defmarco-all)")}
expect(LL(Lsymbol(Symbol("lambda-all")))) {parse1("(lambda-all)")}
expect(LL(Lsymbol(Symbol("quote-all")))) {parse1("(quote-all)")}
}
test("comment") {
//try {
expect(Lnil)(parse1(";\\n"))
expect(LL())(parse1("();\\n"))
expect(Lnil)(parse1(";()\\n"))
expect(LL(Lsymbol('+), Lint(1), Lint(2)))(parse1("(+ 1 2);()\\n"))
expect(Lnil)(parse1(" ;()\\n"))
expect(Lnil)(parse1("\\t;()\\n"))
expect(LL())(parse1("\\t();\\n"))
expect(Lnil)(parse1(";()\\n\\t"))
expect(Lnil)(parse1("\\t;()\\n\\t"))
expect(LL())(parse1("\\t()\\n;()\\t\\n\\t"))
expect(Lnil)(parse1(
"""
;1
;2
"""))
expect(Lstring(";"))(parse1("\\";\\"\\n"))
expect(Lnil)(parse1(";\\";\\"\\n"))
//}catch {case e => println(e)}
}
}
| kolyvan/listok | src/test/scala/parser-test.scala | Scala | lgpl-3.0 | 10,075 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.schedulers
import minitest.TestSuite
import monix.execution.ExecutionModel.AlwaysAsyncExecution
import monix.execution.ExecutionModel.{Default => DefaultExecModel}
import monix.execution.Scheduler
import monix.execution.internal.Platform
import scala.concurrent.Promise
object TrampolineSchedulerSuite extends TestSuite[(Scheduler, TestScheduler)] {
def setup(): (Scheduler, TestScheduler) = {
val u = TestScheduler(DefaultExecModel)
val t = TrampolineScheduler(u, DefaultExecModel)
(t, u)
}
def tearDown(env: (Scheduler, TestScheduler)): Unit = {
assert(env._2.state.tasks.isEmpty, "tasks.isEmpty")
}
test("execute async should execute immediately") {
case (s, _) =>
var effect = 0
val p = Promise[Int]()
s.executeAsync { () =>
effect += 1
s.executeAsync { () =>
effect += 2
s.executeAsync { () =>
effect += 3
p.success(effect)
()
}
}
}
// Should already be executed
assertEquals(effect, 1 + 2 + 3)
}
test("execute local should work") {
case (s, _) =>
var effect = 0
s.executeTrampolined { () =>
effect += 1
s.executeTrampolined { () =>
effect += 2
s.executeTrampolined { () =>
effect += 3
}
}
}
assertEquals(effect, 1 + 2 + 3)
}
test("schedule for execution with delay") {
case (s, u) =>
import concurrent.duration._
val p = Promise[Unit]()
val startAt = s.clockRealTime(MILLISECONDS)
s.scheduleOnce(100.millis) { p.success(()); () }
u.tick(100.millis)
val duration = s.clockRealTime(MILLISECONDS) - startAt
assert(duration >= 100, "duration >= 100")
assert(p.future.isCompleted, "p.future.isCompleted")
}
test("report failure should work") {
case (s, u) =>
val ex = new RuntimeException("dummy")
s.reportFailure(ex)
assertEquals(u.state.lastReportedError, ex)
}
test("scheduleWithFixedDelay") {
case (s, u) =>
import concurrent.duration._
var effect = 0
val task = s.scheduleWithFixedDelay(1.second, 1.second) { effect += 1 }
u.tick()
assertEquals(effect, 0)
u.tick(1.second)
assertEquals(effect, 1)
u.tick(1.second)
assertEquals(effect, 2)
task.cancel()
u.tick(1.second)
assertEquals(effect, 2)
}
test("scheduleAtFixedRate") {
case (s, u) =>
import concurrent.duration._
var effect = 0
val task = s.scheduleAtFixedRate(1.second, 1.second) { effect += 1 }
u.tick()
assertEquals(effect, 0)
u.tick(1.second)
assertEquals(effect, 1)
u.tick(1.second)
assertEquals(effect, 2)
task.cancel()
u.tick(1.second)
assertEquals(effect, 2)
}
test("withExecutionModel") {
case (s, _) =>
val em = AlwaysAsyncExecution
val s2 = s.withExecutionModel(em)
assert(s2.isInstanceOf[TrampolineScheduler], "s2.isInstanceOf[TrampolineScheduler]")
assertEquals(s2.executionModel, em)
}
test("on blocking it should fork") {
case (s, u) =>
import concurrent.blocking
if (!Platform.isJVM) ignore("test relevant only for the JVM")
var effect = 0
s.executeAsync { () =>
s.executeAsync { () =>
effect += 20
}
s.executeAsync { () =>
effect += 20
}
effect += 3
blocking { effect += 10 }
effect += 3
}
assertEquals(effect, 16)
u.tickOne()
assertEquals(effect, 56)
}
}
| alexandru/monifu | monix-execution/shared/src/test/scala/monix/execution/schedulers/TrampolineSchedulerSuite.scala | Scala | apache-2.0 | 4,318 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.amaterasu.integration
import org.apache.amaterasu.leader.dsl.GitUtil
import org.scalatest.{FlatSpec, Matchers}
import scala.reflect.io.Path
class GitTests extends FlatSpec with Matchers {
"GitUtil.cloneRepo" should "clone the sample job git repo" in {
val path = Path("repo")
path.deleteRecursively()
GitUtil.cloneRepo("https://github.com/shintoio/amaterasu-job-sample.git", "master")
val exists = new java.io.File("repo/maki.yml").exists
exists should be(true)
}
}
| shintoio/amaterasu | leader/src/test/scala/org/apache/amaterasu/integration/GitTests.scala | Scala | apache-2.0 | 1,319 |
package monocle.bench
import monocle.bench.BenchModel.Nested0
import monocle.bench.input.Nested0Input
import scalaz.Maybe
trait LensBench {
def lensGet0(in: Nested0Input): Int
def lensGet3(in: Nested0Input): Int
def lensGet6(in: Nested0Input): Int
def lensSet0(in: Nested0Input): Nested0
def lensSet3(in: Nested0Input): Nested0
def lensSet6(in: Nested0Input): Nested0
def lensModify0(in: Nested0Input): Nested0
def lensModify3(in: Nested0Input): Nested0
def lensModify6(in: Nested0Input): Nested0
def lensModifyF0(in: Nested0Input): Maybe[Nested0]
def lensModifyF3(in: Nested0Input): Maybe[Nested0]
def lensModifyF6(in: Nested0Input): Maybe[Nested0]
}
| rperry/Monocle | bench/src/main/scala/monocle/bench/LensBench.scala | Scala | mit | 687 |
package mesosphere.marathon.core.history
import akka.actor.Props
import akka.event.EventStream
import mesosphere.marathon.core.history.impl.HistoryActor
import mesosphere.marathon.storage.repository.TaskFailureRepository
/**
* Exposes the history actor, in charge of keeping track of the task failures.
*/
class HistoryModule(
eventBus: EventStream,
taskFailureRepository: TaskFailureRepository) {
lazy val historyActorProps: Props = Props(new HistoryActor(eventBus, taskFailureRepository))
}
| timcharper/marathon | src/main/scala/mesosphere/marathon/core/history/HistoryModule.scala | Scala | apache-2.0 | 509 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.sql
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.TypeExtractor
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.plan.utils.MyPojo
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
import java.sql.{Date, Time, Timestamp}
class CalcTest extends TableTestBase {
private val util = batchTestUtil()
util.addTableSource[(Long, Int, String)]("MyTable", 'a, 'b, 'c)
@Test
def testOnlyProject(): Unit = {
util.verifyExecPlan("SELECT a, c FROM MyTable")
}
@Test
def testProjectWithNaming(): Unit = {
util.verifyExecPlan("SELECT `1-_./Ü`, b, c FROM (SELECT a as `1-_./Ü`, b, c FROM MyTable)")
}
@Test
def testMultiProjects(): Unit = {
util.verifyExecPlan("SELECT c FROM (SELECT a, c FROM MyTable)")
}
@Test
def testOnlyFilter(): Unit = {
util.verifyExecPlan("SELECT * FROM MyTable WHERE b > 0")
}
@Test
def testDisjunctiveFilter(): Unit = {
util.verifyExecPlan("SELECT * FROM MyTable WHERE a < 10 OR a > 20")
}
@Test
def testConjunctiveFilter(): Unit = {
util.verifyExecPlan("SELECT * FROM MyTable WHERE a < 10 AND b > 20")
}
@Test
def testMultiFilters(): Unit = {
util.verifyExecPlan("SELECT * FROM (SELECT * FROM MyTable WHERE b > 0) t WHERE a < 50")
}
@Test
def testProjectAndFilter(): Unit = {
util.verifyExecPlan("SELECT a, b + 1 FROM MyTable WHERE b > 2")
}
@Test
def testIn(): Unit = {
val sql = s"SELECT * FROM MyTable WHERE b IN (1, 3, 4, 5, 6) AND c = 'xx'"
util.verifyExecPlan(sql)
}
@Test
def testNotIn(): Unit = {
val sql = s"SELECT * FROM MyTable WHERE b NOT IN (1, 3, 4, 5, 6) OR c = 'xx'"
util.verifyExecPlan(sql)
}
@Test
def testMultipleFlattening(): Unit = {
util.addTableSource[((Int, Long), (String, Boolean), String)]("MyTable2", 'a, 'b, 'c)
util.verifyExecPlan("SELECT MyTable2.a.*, c, MyTable2.b.* FROM MyTable2")
}
@Test(expected = classOf[ValidationException])
def testInvalidFields(): Unit = {
util.tableEnv.sqlQuery("SELECT a, foo FROM MyTable")
}
@Test
def testPrimitiveMapType(): Unit = {
util.verifyExecPlan("SELECT MAP[b, 30, 10, a] FROM MyTable")
}
@Test
def testNonPrimitiveMapType(): Unit = {
util.verifyExecPlan("SELECT MAP[a, c] FROM MyTable")
}
@Test
def testRowType(): Unit = {
util.verifyExecPlan("SELECT ROW(1, 'Hi', a) FROM MyTable")
}
@Test
def testArrayType(): Unit = {
util.verifyExecPlan("SELECT ARRAY['Hi', 'Hello', c] FROM MyTable")
}
@Test
def testProjectWithDateType(): Unit = {
val sql =
"""
|SELECT a, b, c,
| DATE '1984-07-12',
| TIME '14:34:24',
| TIMESTAMP '1984-07-12 14:34:24'
|FROM MyTable
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testFilterWithDateType(): Unit = {
util.addTableSource[(Long, Date, Time, Timestamp)]("MyTable3", 'a, 'b, 'c, 'd)
val sql =
"""
|SELECT * FROM MyTable3
|WHERE b = DATE '1984-07-12' AND c = TIME '14:34:24' AND d = TIMESTAMP '1984-07-12 14:34:24'
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testPojoType(): Unit = {
util.addTableSource(
"MyTable4",
Array[TypeInformation[_]](TypeExtractor.createTypeInfo(classOf[MyPojo])),
Array("a"))
util.verifyExecPlan("SELECT a FROM MyTable4")
}
@Test
def testMixedType(): Unit = {
util.addTableSource[(String, Int, Timestamp)]("MyTable5", 'a, 'b, 'c)
util.verifyExecPlan("SELECT ROW(a, b, c), ARRAY[12, b], MAP[a, c] FROM MyTable5 " +
"WHERE (a, b, c) = ('foo', 12, TIMESTAMP '1984-07-12 14:34:24')")
}
@Test
def testCollationDeriveOnCalc(): Unit = {
util.verifyExecPlan("SELECT CAST(a AS INT), CAST(b AS VARCHAR) FROM (VALUES (3, 'c')) T(a,b)")
}
@Test
def testOrWithIsNullPredicate(): Unit = {
util.verifyExecPlan("SELECT * FROM MyTable WHERE a = 1 OR a = 10 OR a IS NULL")
}
@Test
def testOrWithIsNullInIf(): Unit = {
util.verifyExecPlan("SELECT IF(c = '' OR c IS NULL, 'a', 'b') FROM MyTable")
}
}
| clarkyzl/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/CalcTest.scala | Scala | apache-2.0 | 5,042 |
package com.twitter.finagle.param
import com.twitter.finagle.service.StatsFilter
import com.twitter.finagle.{stats, tracing, util, Stack}
import com.twitter.util.JavaTimer
/**
* A class eligible for configuring a label used to identify finagle
* clients and servers.
*/
case class Label(label: String) {
def mk(): (Label, Stack.Param[Label]) =
(this, Label.param)
}
object Label {
implicit val param = Stack.Param(Label(""))
}
/**
* A class eligible for configuring a client library name used to identify
* which client library a client is using.
*/
case class ProtocolLibrary(name: String) {
def mk(): (ProtocolLibrary, Stack.Param[ProtocolLibrary]) =
(this, ProtocolLibrary.param)
}
object ProtocolLibrary {
implicit val param = Stack.Param(ProtocolLibrary("not-specified"))
}
/**
* A class eligible for configuring a [[com.twitter.util.Timer]] used
* throughout finagle clients and servers.
*
* @see [[HighResTimer]] for a configuration that needs a more
* fine-grained timer as this is typically implemented via a
* "hashed wheel timer" which is optimized for approximated
* I/O timeout scheduling.
*/
case class Timer(timer: com.twitter.util.Timer) {
def mk(): (Timer, Stack.Param[Timer]) =
(this, Timer.param)
}
object Timer {
implicit val param = Stack.Param(Timer(util.DefaultTimer.twitter))
}
/**
* A class eligible for configuring a high resolution [[com.twitter.util.Timer]]
* such that tasks are run tighter to their schedule.
*
* @see [[Timer]] for a configuration that is appropriate for
* tasks that do not need fine-grained scheduling.
*
* @note it is expected that the resolution should be sub-10 milliseconds.
*/
case class HighResTimer(timer: com.twitter.util.Timer) {
def mk(): (HighResTimer, Stack.Param[HighResTimer]) =
(this, HighResTimer.param)
}
object HighResTimer {
/**
* The default Timer used for configuration.
*
* It is a shared resource and as such, `stop` is ignored.
*/
val Default: com.twitter.util.Timer = util.DefaultTimer.twitter
// todo: make JavaTimer the default instance again CSL-2208
// new JavaTimer(true, Some("HighResTimer")) {
// override def stop(): Unit = ()
// }
implicit val param = Stack.Param(HighResTimer(Default))
}
/**
* A class eligible for configuring a [[java.util.logging.Logger]]
* used throughout finagle clients and servers.
*/
case class Logger(log: java.util.logging.Logger) {
def mk(): (Logger, Stack.Param[Logger]) =
(this, Logger.param)
}
object Logger {
implicit val param = Stack.Param(Logger(util.DefaultLogger))
}
/**
* A class eligible for configuring a
* [[com.twitter.finagle.stats.StatsReceiver]] throughout finagle
* clients and servers.
*/
case class Stats(statsReceiver: stats.StatsReceiver) {
def mk(): (Stats, Stack.Param[Stats]) =
(this, Stats.param)
}
object Stats {
implicit val param = Stack.Param(Stats(stats.DefaultStatsReceiver))
}
/**
* A class eligible for configuring a [[com.twitter.util.Monitor]]
* throughout finagle servers and clients.
*/
case class Monitor(monitor: com.twitter.util.Monitor) {
def mk(): (Monitor, Stack.Param[Monitor]) =
(this, Monitor.param)
}
object Monitor {
implicit val param = Stack.Param(Monitor(util.DefaultMonitor))
}
/**
* A class eligible for configuring a
* [[com.twitter.finagle.util.ReporterFactory]] throughout finagle servers and
* clients.
*/
case class Reporter(reporter: util.ReporterFactory) {
def mk(): (Reporter, Stack.Param[Reporter]) =
(this, Reporter.param)
}
object Reporter {
implicit val param = Stack.Param(Reporter(util.LoadedReporterFactory))
}
/**
* A class eligible for configuring a
* [[com.twitter.finagle.tracing.Tracer]] throughout finagle servers
* and clients.
*/
case class Tracer(tracer: tracing.Tracer) {
def mk(): (Tracer, Stack.Param[Tracer]) =
(this, Tracer.param)
}
object Tracer {
implicit val param = Stack.Param(Tracer(tracing.DefaultTracer))
}
/**
* A class eligible for configuring a
* [[com.twitter.finagle.stats.ExceptionStatsHandler]] throughout finagle servers
* and clients.
*
* NB: Since the default for failures is to be scoped under "failures", if you
* set the default to be in another scope, it may be difficult for engineers
* unfamiliar with your stats to understand your service's key metrics.
*/
case class ExceptionStatsHandler(categorizer: stats.ExceptionStatsHandler)
object ExceptionStatsHandler {
implicit val param = new Stack.Param[ExceptionStatsHandler] {
// Note, this is lazy to avoid potential failures during
// static initialization.
lazy val default = ExceptionStatsHandler(StatsFilter.DefaultExceptions)
}
}
| zfy0701/finagle | finagle-core/src/main/scala/com/twitter/finagle/param/Params.scala | Scala | apache-2.0 | 4,694 |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.mikegagnon.decl
class CycleException(msg: String) extends IllegalArgumentException(msg)
/**
* Directed Acyclic Graph
*
* Subclasses should override:
* - nodes
* - edges
* Provides the following public methods/fields:
* - sorted
* - ancestors
* - subgraph
*/
abstract class Dag[Node] {
// the set of all nodes in this graph
val nodes: Set[Node]
// set of (from, to) edges
val edges: Set[(Node, Node)]
// given a set of (from, to) pairs, return a map that maps each node to the set of nodes that it
// points to
private[decl] def edgesToMap(fromTo: Set[(Node, Node)]): Map[Node, Set[Node]] = fromTo
// yields a map, which maps each "to node" to its list of (from, to) pairs
.groupBy { case (from, _) => from }
// drop the "to node" from the list of pairs
.mapValues { pairs: Set[(Node, Node)] =>
pairs.map { fromTo: (Node, Node) =>
fromTo._2
}
}
.withDefault{ node =>
if (nodes.contains(node)) {
Set[Node]()
} else {
throw new IllegalArgumentException("nodes %s is not in the set of nodes".format(node))
}
}
// maps each node to the set of nodes it points to
private[decl] final lazy val outgoing: Map[Node, Set[Node]] = edgesToMap(edges)
// maps each node to the set of nodes that point to it
private[decl] final lazy val incoming: Map[Node, Set[Node]] =
edgesToMap(edges.map{ case (from, to) => (to, from) })
// set of all root nodes
private[decl] final lazy val roots: Set[Node] = nodes.
filter { node =>
incoming(node).isEmpty
}
// the sorted list of nodes
final lazy val sorted: List[Node] = {
/**
* Algorithm from Wikipedia:
*
* soFar = Empty list that will contain the sorted nodes
* roots = Set of all nodes with no incoming edges (the beginning)
* for each node n in roots do
* visit(n, stack=emptySet)
* function visit(node n, stack)
* if n in stack then CycleDetected
* if n has not been visited yet then
* mark n as visited
* for each node m with an edge from n to m do
* visit(m, stack + n)
* prepend n to L
*/
if (roots.isEmpty && nodes.nonEmpty) {
throw new CycleException("There are no root nodes")
}
val (sortedNodes, _) = roots.foldLeft((List[Node](), Set[Node]())) {
case ((soFar, visited), rootNode) =>
visit(rootNode, soFar, visited, Set())
}
assert(sortedNodes.toSet == nodes)
sortedNodes
}
// returns all ancestors for a particular node
final def ancestors(node: Node, stack: Set[Node] = Set[Node]()): Set[Node] = {
if (stack.contains(node)) {
throw new CycleException("Cycled detected at node %s".format(node))
}
val in = incoming(node)
val newStack = stack + node
in ++ in.flatMap{ parentNode => ancestors(parentNode, newStack) }
}
/**
* Returns the subgraph that contains leaves plus all of leaves's ancestors.
*/
final def subgraph(leaves: Set[Node]): Dag[Node] = {
val subnodes = leaves ++ leaves.flatMap{ ancestors(_) }
val subedges = edges.filter{ case (from, to) =>
subnodes.contains(from) && subnodes.contains(to)
}
new Dag[Node] {
val nodes = subnodes
val edges = subedges
}
}
/**
* For explanation of this algorithm see comment for the sorted value
*
* node: the node currently being visited
* soFar: the sorted list of nodes, so far. This is a depth first search, so the last nodes are
* inserted first and nodes are incrementally prepended.
* visited: the list of nodes that visit has been called upon thus far
* stack: all the nodes in the current visit call stack (used for detecting cycles)
*/
private[decl] final def visit(node: Node, soFar: List[Node], visited: Set[Node],
stack: Set[Node]): (List[Node], Set[Node]) = {
if (stack.contains(node)) {
throw new CycleException("Cycled detected at node %s".format(node))
}
val newStack = stack + node
if (!visited.contains(node)) {
val newSoFarVisited: (List[Node], Set[Node]) = outgoing(node)
.foldLeft((soFar, visited + node)) {
(soFarVisited: (List[Node], Set[Node]), childNode: Node) =>
val soFar: List[Node] = soFarVisited._1
val visited: Set[Node] = soFarVisited._2
visit(childNode, soFar, visited, newStack)
}
val newSoFar: List[Node] = newSoFarVisited._1
val newVisited: Set[Node] = newSoFarVisited._2
(node :: newSoFar, newVisited)
} else {
(soFar, visited)
}
}
}
| mikegagnon/decl | src/main/scala/com/mikegagnon/decl/Dag.scala | Scala | apache-2.0 | 5,221 |
package frameless
import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.expressions.{BoundReference, CreateNamedStruct, Literal}
import org.apache.spark.sql.types.StructType
object TypedExpressionEncoder {
/** In Spark, DataFrame has always schema of StructType
*
* DataFrames of primitive types become records with a single field called "_1".
*/
def targetStructType[A](encoder: TypedEncoder[A]): StructType = {
encoder.targetDataType match {
case x: StructType =>
if (encoder.nullable) StructType(x.fields.map(_.copy(nullable = true)))
else x
case dt => new StructType().add("_1", dt, nullable = encoder.nullable)
}
}
def apply[T: TypedEncoder]: ExpressionEncoder[T] = {
val encoder = TypedEncoder[T]
val schema = targetStructType(encoder)
val in = BoundReference(0, encoder.sourceDataType, encoder.nullable)
val (out, toRowExpressions) = encoder.extractorFor(in) match {
case x: CreateNamedStruct =>
val out = BoundReference(0, encoder.targetDataType, encoder.nullable)
(out, x.flatten)
case other =>
val out = GetColumnByOrdinal(0, encoder.targetDataType)
(out, CreateNamedStruct(Literal("_1") :: other :: Nil).flatten)
}
new ExpressionEncoder[T](
schema = schema,
flat = false,
serializer = toRowExpressions,
deserializer = encoder.constructorFor(out),
clsTag = encoder.classTag
)
}
}
| bamine/frameless | dataset/src/main/scala/frameless/TypedExpressionEncoder.scala | Scala | apache-2.0 | 1,570 |
package lowerthirds
import java.io._
import io._
import scala.util.matching.Regex
object CommandIndex extends App
{
val out = new PrintWriter( "src/lowerthirds/resources/indexout" )
val commands = Source.fromFile( "index" ).getLines.toList.zipWithIndex.sorted
val line = """([^/]+)(?:/([\\w -]+(?:,[\\w -]+)*))?/(.*?(?:\\.\\ \\ |$))(.*)"""r
val numb = """(\\d+)"""r
val brace = """.*[a-z]\\{"""r
val simpleline = "[a-zA-Z]+"r
for ((l, index) <- commands)
{
try
{
if (simpleline.pattern.matcher( l ).matches)
{
out.println( """\\hang\\m\\tb{"\\"""" + l + '}' + " -- `\\\\" + l + "'.")
}
else
{
val line( c, p, sd, rd ) = l
val parms =
if (p eq null)
IndexedSeq.empty
else
p.split( "," ).toIndexedSeq
val desc =
numb replaceSomeIn (sd,
m =>
{
val before = m.before.charAt( m.before.length - 1 )
val after = m.after.charAt( 0 )
if ((before.isLetter || before == '"') || (after.isLetter || after == '"'))
None
else
Some( Regex.quoteReplacement("\\\\s{" + parms(m.group(1).toInt - 1) + "}") )
}
)
val (com, ecom) =
if (brace.pattern.matcher( c ).matches)
(c.substring( 0, c.length - 1 ) + " \\\\{", """ \\tb"}"}""")
else
(c, "}")
out.println( """\\hang\\m\\hb{\\tb{"\\"""" + com + '}' +
(if (parms.isEmpty) "" else parms.mkString( " <", "> <", ">")) + ecom + " -- " + desc)
}
out.println
}
catch
{
case e: MatchError =>
println( "match error on line " + (index + 1) + ": " + l )
sys.exit
case e: Exception =>
println( "error on line " + (index + 1) + ": " + l )
sys.exit
}
}
out.close
} | edadma/lteditor | src/main/scala/CommandIndex.scala | Scala | gpl-3.0 | 1,661 |
package org.opencompare.api.scala.io
import org.opencompare.api.scala.{AbstractFeature, PCM}
import org.opencompare.api.scala.metadata.{Orientation, Positions, ProductsAsColumns}
class ExportMatrixExporter {
def export(pcm: PCM with Positions with Orientation) : ExportMatrix = {
val matrix = new ExportMatrix
matrix.name = pcm.name
val productsStartRow = exportFeatures(pcm, matrix)
exportProducts(pcm, matrix, productsStartRow)
// Transpose matrix if necessary
pcm.orientation match {
case ProductsAsColumns() => matrix.transpose()
case _ =>
}
matrix
}
def exportFeatures(pcm : PCM with Positions, matrix : ExportMatrix) : Int = {
var currentFeatureLevel = List.empty[(AbstractFeature, Int, Int)]
for (feature <- pcm.sortedFeatures()) {
currentFeatureLevel = currentFeatureLevel :+ (feature, 1, 1)
}
var exportCellRows = List.empty[List[ExportCell]]
var noParents = false
while (currentFeatureLevel.nonEmpty && !noParents) {
var nextFeatureLevel = List.empty[(AbstractFeature, Int, Int)]
var row = List.empty[(AbstractFeature, Int, Int)]
// Detect if current level of features has at least one parent
noParents = currentFeatureLevel.forall(l => l._1.parent.isEmpty)
// Analyze hierarchy of features
var i = 0
while (i < currentFeatureLevel.size) {
val (feature, rowspan, colspan) = currentFeatureLevel(i)
// Compute colspan
var newColspan = 1
while ((i + 1) < currentFeatureLevel.size && (feature == currentFeatureLevel(i + 1)._1)) {
i += 1
newColspan += 1
}
// Compute rowspan and prepare for next iteration
val parentGroup = feature.parent
if (parentGroup.isEmpty) {
val newRowspan = rowspan + 1
nextFeatureLevel = nextFeatureLevel :+ (feature, newRowspan, newColspan)
if (noParents) {
row = row :+ (feature, rowspan, newColspan)
}
} else {
row = row :+ (feature, rowspan, newColspan)
nextFeatureLevel = nextFeatureLevel :+ (parentGroup.get, 1, 1)
}
i += 1
}
// Create cells
val exportCellRow = for ((feature, rowspan, colspan) <- row) yield {
val exportCell = new ExportCell(feature.name, feature.name, rowspan, colspan)
exportCell.feature = true
exportCell.inProductsKeyColumn = false
exportCell
}
exportCellRows = exportCellRows :+ exportCellRow
currentFeatureLevel = nextFeatureLevel
}
// Add rows to table
for ((exportCellRow, row) <- exportCellRows.reverse.zipWithIndex) {
var column = 0
while (matrix.isPositionOccupied(row, column)) {
column += 1
}
for (exportCell <- exportCellRow) {
matrix.setCell(exportCell, row, column)
column += exportCell.colspan
}
}
exportCellRows.size
}
def exportProducts(pcm : PCM with Positions, matrix : ExportMatrix, productsStartRow : Int): Unit = {
var row = productsStartRow
for (product <- pcm.sortedProducts()) {
var column = 0
for (feature <- pcm.sortedFeatures()) {
val cellOpt = product.findCell(feature)
cellOpt.foreach { cell =>
val exportCell = new ExportCell(cell.content, cell.rawContent)
exportCell.feature = false
exportCell.inProductsKeyColumn = pcm.productsKey == feature
matrix.setCell(exportCell, row, column)
}
column += 1
}
row += 1
}
}
}
| OpenCompare/OpenCompare | org.opencompare/api-scala/src/main/scala/org/opencompare/api/scala/io/ExportMatrixExporter.scala | Scala | apache-2.0 | 3,592 |
package sk.drunkenpanda.bot.plugins
import sk.drunkenpanda.bot.Message
import sk.drunkenpanda.bot.Ping
import sk.drunkenpanda.bot.Pong
import sk.drunkenpanda.bot.PrivateMessage
import sk.drunkenpanda.bot.Response
trait Plugin {
def respond(message: Message): Option[Message]
def onShutdown(): Unit
}
class EchoPlugin extends Plugin {
private lazy val format = "panda echo (.+?)([\\\\.\\\\!\\\\?]+)?".r
private lazy val echoCount = 3
override def respond(message: Message): Option[Message] = message match {
case PrivateMessage(from, text) => for {
(msg, suffix) <- parseText(text)
} yield prepareResponse(from, msg, suffix)
case _ => None
}
def prepareResponse(to: String, message: String, suffix: String): Message = {
val echo = List.fill(echoCount)(message.split(" ").last).mkString(" ")
Response(to, s"$message $echo$suffix")
}
def parseText(text: String): Option[(String, String)] = text match {
case format(toEcho, null) => Some((toEcho, ""))
case format(toEcho, suffix) => Some((toEcho, suffix))
case _ => None
}
override def onShutdown(): Unit = ()
}
class PongPlugin extends Plugin {
override def respond(message: Message) = message match {
case Ping(hash) => Option(new Pong(hash))
case _ => None
}
override def onShutdown() = ()
}
| DrunkenPandaFans/panda-bot | core/src/main/scala/sk/drunkenpanda/bot/plugins/Plugin.scala | Scala | mit | 1,322 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.support.requisite.{ClassRequisiteImpl, FeatRequisiteImpl, RequiresAllOfClass}
/**
* Icon Heavy Armor Training.png Heavy Armor Champion Passive While in heavy armor, get +12 PRR and
* MRR.
*
* Level 14: Fighter * Note: they all stack with each other.
*/
trait HeavyArmorChampion
extends FeatRequisiteImpl with ClassRequisiteImpl with Passive with RequiresAllOfClass
with FighterBonusFeat {
self: GeneralFeat =>
override def allOfClass: Seq[(HeroicCharacterClass, Int)] =
List((HeroicCharacterClass.Fighter, 14))
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/HeavyArmorChampion.scala | Scala | apache-2.0 | 1,337 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.net.URI
import java.util.Locale
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.QueryPlanningTracker
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, InMemoryCatalog, SessionCatalog}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.internal.SQLConf
trait AnalysisTest extends PlanTest {
protected def extendedAnalysisRules: Seq[Rule[LogicalPlan]] = Nil
protected def getAnalyzer: Analyzer = {
val catalog = new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin)
catalog.createDatabase(
CatalogDatabase("default", "", new URI("loc"), Map.empty),
ignoreIfExists = false)
catalog.createTempView("TaBlE", TestRelations.testRelation, overrideIfExists = true)
catalog.createTempView("TaBlE2", TestRelations.testRelation2, overrideIfExists = true)
catalog.createTempView("TaBlE3", TestRelations.testRelation3, overrideIfExists = true)
catalog.createGlobalTempView("TaBlE4", TestRelations.testRelation4, overrideIfExists = true)
catalog.createGlobalTempView("TaBlE5", TestRelations.testRelation5, overrideIfExists = true)
new Analyzer(catalog) {
override val extendedResolutionRules = EliminateSubqueryAliases +: extendedAnalysisRules
}
}
protected def checkAnalysis(
inputPlan: LogicalPlan,
expectedPlan: LogicalPlan,
caseSensitive: Boolean = true): Unit = {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
val analyzer = getAnalyzer
val actualPlan = analyzer.executeAndCheck(inputPlan, new QueryPlanningTracker)
comparePlans(actualPlan, expectedPlan)
}
}
protected override def comparePlans(
plan1: LogicalPlan,
plan2: LogicalPlan,
checkAnalysis: Boolean = false): Unit = {
// Analysis tests may have not been fully resolved, so skip checkAnalysis.
super.comparePlans(plan1, plan2, checkAnalysis)
}
protected def assertAnalysisSuccess(
inputPlan: LogicalPlan,
caseSensitive: Boolean = true): Unit = {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
val analyzer = getAnalyzer
val analysisAttempt = analyzer.execute(inputPlan)
try analyzer.checkAnalysis(analysisAttempt) catch {
case a: AnalysisException =>
fail(
s"""
|Failed to Analyze Plan
|$inputPlan
|
|Partial Analysis
|$analysisAttempt
""".stripMargin, a)
}
}
}
protected def assertAnalysisError(
inputPlan: LogicalPlan,
expectedErrors: Seq[String],
caseSensitive: Boolean = true): Unit = {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
val analyzer = getAnalyzer
val e = intercept[AnalysisException] {
analyzer.checkAnalysis(analyzer.execute(inputPlan))
}
if (!expectedErrors.map(_.toLowerCase(Locale.ROOT)).forall(
e.getMessage.toLowerCase(Locale.ROOT).contains)) {
fail(
s"""Exception message should contain the following substrings:
|
| ${expectedErrors.mkString("\\n ")}
|
|Actual exception message:
|
| ${e.getMessage}
""".stripMargin)
}
}
}
protected def interceptParseException(
parser: String => Any)(sqlCommand: String, messages: String*): Unit = {
val e = intercept[ParseException](parser(sqlCommand))
messages.foreach { message =>
assert(e.message.contains(message))
}
}
}
| witgo/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala | Scala | apache-2.0 | 4,633 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.http
import scala.util.{ Failure, Success, Try }
import io.gatling.recorder.util.HttpUtils
import io.netty.channel.{ Channel, ChannelFuture, ChannelFutureListener }
import io.netty.handler.codec.http._
import org.asynchttpclient.uri.Uri
object Netty {
implicit class PimpedChannelFuture(val cf: ChannelFuture) extends AnyVal {
def addScalaListener(f: Try[Channel] => Unit): ChannelFuture =
cf.addListener((future: ChannelFuture) => {
val outcome =
if (future.isSuccess) {
Success(future.channel)
} else {
Failure(future.cause)
}
f(outcome)
})
}
implicit class PimpedChannel(val channel: Channel) extends AnyVal {
def reply500AndClose(): Unit =
channel
.writeAndFlush(new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR))
.addListener(ChannelFutureListener.CLOSE)
}
implicit class PimpedFullHttpRequest(val request: FullHttpRequest) extends AnyVal {
def makeRelative: FullHttpRequest = {
val relativeUrl = Uri.create(request.uri).toRelativeUrl
val relativeRequest = new DefaultFullHttpRequest(request.protocolVersion, request.method, relativeUrl, request.content.retain())
relativeRequest.headers.add(request.headers)
relativeRequest
}
def filterSupportedEncodings: FullHttpRequest = {
Option(request.headers.get(HttpHeaderNames.ACCEPT_ENCODING))
.foreach { acceptEncodingValue =>
request.headers.set(HttpHeaderNames.ACCEPT_ENCODING, HttpUtils.filterSupportedEncodings(acceptEncodingValue))
}
request
}
}
}
| wiacekm/gatling | gatling-recorder/src/main/scala/io/gatling/recorder/http/Netty.scala | Scala | apache-2.0 | 2,305 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import monifu.concurrent.Scheduler
import monifu.concurrent.extensions._
import monifu.reactive.Ack.Continue
import monifu.reactive.exceptions.DummyException
import monifu.reactive.{Observer, Observable}
import monifu.reactive.subjects.PublishSubject
import scala.concurrent.Future
import scala.concurrent.duration._
import monifu.reactive.Observable.{unit, empty}
object ConcatOneSuite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val o = Observable.range(0, sourceCount)
.flatMap(i => Observable.unit(i))
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def count(sourceCount: Int) =
sourceCount
def waitFirst = Duration.Zero
def waitNext = Duration.Zero
def observableInError(sourceCount: Int, ex: Throwable) = Some {
val o = createObservableEndingInError(Observable.range(0, sourceCount), ex)
.flatMap(i => Observable.unit(i))
Sample(o, count(sourceCount), sum(sourceCount), waitFirst, waitNext)
}
def sum(sourceCount: Int) = {
sourceCount * (sourceCount - 1) / 2
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
val o = Observable.range(0, sourceCount).flatMap { i =>
if (i == sourceCount-1)
throw ex
else
Observable.unit(i)
}
Sample(o, count(sourceCount-1), sum(sourceCount-1), waitFirst, waitNext)
}
def toList[T](o: Observable[T])(implicit s: Scheduler) = {
o.foldLeft(Vector.empty[T])(_ :+ _).asFuture
.map(_.getOrElse(Vector.empty))
}
test("filter can be expressed in terms of flatMap") { implicit s =>
val obs1 = Observable.range(0, 100).filter(_ % 2 == 0)
val obs2 = Observable.range(0, 100).flatMap(x => if (x % 2 == 0) unit(x) else empty)
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("map can be expressed in terms of flatMap") { implicit s =>
val obs1 = Observable.range(0, 100).map(_ + 10)
val obs2 = Observable.range(0, 100).flatMap(x => unit(x + 10))
val lst1 = toList(obs1)
val lst2 = toList(obs2)
s.tick()
assert(lst1.isCompleted && lst2.isCompleted)
assertEquals(lst1.value.get, lst2.value.get)
}
test("should wait the completion of the current, before subscribing to the next") { implicit s =>
var obs2WasStarted = false
var received = 0L
var wasCompleted = false
val obs1 = PublishSubject[Long]()
val obs2 = Observable.range(1, 100).map { x => obs2WasStarted = true; x }
Observable.from(obs1, obs2).flatten.onSubscribe(new Observer[Long] {
def onNext(elem: Long) = {
received += elem
if (elem == 1000)
Future.delayedResult(1.second)(Continue)
else
Continue
}
def onError(ex: Throwable) = ()
def onComplete() = wasCompleted = true
})
s.tickOne()
assertEquals(received, 0)
obs1.onNext(10)
assertEquals(received, 10)
val f = obs1.onNext(1000)
assertEquals(received, 1010)
f.onComplete(_ => obs1.onComplete())
s.tick()
assert(!obs2WasStarted)
s.tick(1.second)
assert(obs2WasStarted)
assertEquals(received, 1010 + 99 * 50)
assert(wasCompleted)
}
test("should interrupt the streaming on error") { implicit s =>
var obs1WasStarted = false
var obs2WasStarted = false
var wasThrown: Throwable = null
val sub = PublishSubject[Long]()
val obs1 = sub.doOnStart(_ => obs1WasStarted = true)
val obs2 = Observable.range(1, 100).map { x => obs2WasStarted = true; x }
Observable.from(obs1, obs2).flatten.onSubscribe(new Observer[Long] {
def onNext(elem: Long) = Continue
def onError(ex: Throwable) = wasThrown = ex
def onComplete() = ()
})
s.tick()
sub.onNext(1)
assert(obs1WasStarted)
sub.onError(DummyException("dummy"))
s.tick()
assertEquals(wasThrown, DummyException("dummy"))
assert(!obs2WasStarted)
}
}
| sergius/monifu | monifu/shared/src/test/scala/monifu/reactive/internals/operators/ConcatOneSuite.scala | Scala | apache-2.0 | 4,770 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.jsinterop
import scala.language.implicitConversions
import scala.scalajs.runtime.RuntimeLong
import org.junit.Assert._
import org.junit.Assume._
import org.junit.Test
import org.scalajs.testsuite.utils.AssertThrows._
import org.scalajs.testsuite.utils.Platform._
import scala.util.Try
/**
* test the runtime Long implementation directly
* does not depend on magic compiler Long rewriting
*/
class RuntimeLongTest {
// Short builders
def lg(lo: Int, hi: Int): RuntimeLong = new RuntimeLong(lo, hi)
def lg(i: Int): RuntimeLong = new RuntimeLong(i)
// Common values
val MaxVal = lg(0xffffffff, 0x7fffffff)
val MinVal = lg(0, 0x80000000)
val IntMaxVal = lg(Int.MaxValue)
val IntMinVal = lg(Int.MinValue)
val IntMaxValPlus1 = lg(0x80000000, 0)
val IntMinValMinus1 = lg(2147483647, -1)
val MaxSafeDouble = lg(-1, 2097151)
val TwoPow53 = lg(0, 2097152)
val MinSafeDouble = lg(1, -2097152)
val NegTwoPow53 = lg(0, -2097152)
// scala.scalajs.runtime.RuntimeLong
@Test def sanity_of_equality_tests(): Unit = {
assertEquals(123L + (456L << 32), lg(123, 456).toLong)
assertEquals(lg(123, 456), lg(123, 456))
assertEquals(lg(456, 123), lg(456, 123))
assertNotEquals(123L + (4L << 32), lg(123, 456).toLong)
assertNotEquals(lg(123, 4), lg(123, 456))
assertNotEquals(1L + (456L << 32), lg(123, 456).toLong)
assertNotEquals(lg(1, 456), lg(123, 456))
assertNotEquals(123L, lg(123, 456).toLong)
}
@Test def equals_Any(): Unit = {
assertFalse(lg(0, 0).equals(0: Any))
assertFalse(lg(0, 0).equals(null: Any))
assertTrue(lg(0, 0).equals(lg(0, 0): Any))
assertTrue(lg(123, 456).equals(lg(123, 456): Any))
assertTrue(lg(-123, 456).equals(lg(-123, 456): Any))
assertTrue(lg(-123, -456).equals(lg(-123, -456): Any))
assertFalse(lg(123, 456).equals(lg(-123, 456): Any))
assertFalse(lg(123, 456).equals(lg(123, -456): Any))
assertFalse(lg(-123, -456).equals(lg(123, -456): Any))
assertFalse(lg(-123, -456).equals(lg(-123, 456): Any))
}
@Test def hashCode_as_specified_in_j_l_toFloat_strict(): Unit = {
assertEquals(0, lg(0).hashCode())
assertEquals(0, lg(-1).hashCode())
assertEquals(55, lg(55).hashCode())
assertEquals(11, lg(-12).hashCode())
assertEquals(10006548, lg(10006548).hashCode())
assertEquals(1098747, lg(-1098748).hashCode())
assertEquals(957662195, lg(579906195, 461662560).hashCode())
assertEquals(-1075860794, lg(-1403218312, 327367870).hashCode())
assertEquals(1425294575, lg(-1152051636, -274640221).hashCode())
assertEquals(-1863811248, lg(1026519507, -1379463549).hashCode())
assertEquals(-881942797, lg(363765329, -557842270).hashCode())
assertEquals(548587254, lg(21652572, 569942698).hashCode())
assertEquals(-1328999812, lg(55820229, -1281708615).hashCode())
assertEquals(-1756412154, lg(-1843678104, 89453422).hashCode())
assertEquals(-529144798, lg(-1928579430, 1836700344).hashCode())
assertEquals(-1163319584, lg(-181377900, 1335444084).hashCode())
assertEquals(2070477069, lg(1189983760, 1032146717).hashCode())
assertEquals(-1718642695, lg(-1982789145, 274636318).hashCode())
assertEquals(260982265, lg(-2087901827, -1945935740).hashCode())
assertEquals(-385578983, lg(-1911332808, 1729620001).hashCode())
assertEquals(-1362397169, lg(-1920965295, 592125278).hashCode())
assertEquals(1419211160, lg(2017870028, 751907156).hashCode())
assertEquals(-1851816270, lg(1506336851, -933796127).hashCode())
assertEquals(112959880, lg(-1747722429, -1855422773).hashCode())
assertEquals(1715333902, lg(-2139132623, -431847873).hashCode())
assertEquals(-453690224, lg(739274932, -924496860).hashCode())
assertEquals(-1503679197, lg(-1482800071, 29485338).hashCode())
assertEquals(1950154296, lg(237609240, 2048220960).hashCode())
assertEquals(2037562473, lg(-431092385, -1623412426).hashCode())
assertEquals(220707473, lg(2144172772, 1927987317).hashCode())
assertEquals(1902658020, lg(971459211, 1217334127).hashCode())
assertEquals(840583449, lg(-530209544, -763367967).hashCode())
assertEquals(2065572837, lg(-1322671605, -902331922).hashCode())
assertEquals(407536450, lg(1361976000, 1231329666).hashCode())
assertEquals(-1678479110, lg(-96547475, 1640676759).hashCode())
assertEquals(-1558558486, lg(1799144078, -936998300).hashCode())
assertEquals(-110470482, lg(221720683, -195204411).hashCode())
assertEquals(992932874, lg(2080474705, 1194291803).hashCode())
assertEquals(2035378556, lg(-1962255291, -228903623).hashCode())
assertEquals(542449527, lg(-1961045404, -1421226733).hashCode())
assertEquals(-1824846728, lg(1762001719, -96661681).hashCode())
assertEquals(-985103709, lg(568630982, -458482587).hashCode())
assertEquals(37361715, lg(-1237704639, -1275053966).hashCode())
assertEquals(-1555729529, lg(936273516, -1802824213).hashCode())
assertEquals(1534845437, lg(-870754516, -1755138351).hashCode())
assertEquals(-715250396, lg(964079858, -332884522).hashCode())
assertEquals(2003953821, lg(1769001167, 503396434).hashCode())
assertEquals(1631287431, lg(811930233, 1365142270).hashCode())
assertEquals(-1393125048, lg(-280291442, 1136496326).hashCode())
assertEquals(926193137, lg(439731659, 755060794).hashCode())
assertEquals(1141998463, lg(-561661919, -1701561506).hashCode())
assertEquals(480895538, lg(1556104387, 1080665841).hashCode())
assertEquals(-849143869, lg(1931061917, -1099252386).hashCode())
assertEquals(-1840233445, lg(2086961898, -298531087).hashCode())
assertEquals(47538111, lg(-1148008529, -1186490352).hashCode())
assertEquals(540301593, lg(807317094, 271251327).hashCode())
assertEquals(1903332829, lg(1077071399, 826295290).hashCode())
assertEquals(-1325859168, lg(781949710, -1637653074).hashCode())
assertEquals(-1476869146, lg(1778433204, -839352494).hashCode())
assertEquals(84316181, lg(-2038023199, -2088719372).hashCode())
assertEquals(524038724, lg(-1764916235, -1980649039).hashCode())
assertEquals(-794988445, lg(-1796682086, 1148567289).hashCode())
assertEquals(-1285356617, lg(-1606200144, 320886535).hashCode())
assertEquals(1441713710, lg(755146140, 2028753842).hashCode())
assertEquals(365800340, lg(-1851453861, -2073516593).hashCode())
assertEquals(2130603708, lg(-543327214, -1587342674).hashCode())
assertEquals(-1414171289, lg(506958308, -1249713021).hashCode())
assertEquals(-262714124, lg(-2097389477, 1923820719).hashCode())
assertEquals(158195454, lg(-374932306, -523558320).hashCode())
assertEquals(50128093, lg(-902905695, -925752196).hashCode())
assertEquals(-825145129, lg(-397013030, 646399757).hashCode())
assertEquals(-1344834498, lg(1764398539, -956440075).hashCode())
assertEquals(-103814738, lg(-1750710329, 1852419689).hashCode())
assertEquals(-1354282241, lg(-1664538473, 864969320).hashCode())
assertEquals(1408148925, lg(-500471847, -1312439708).hashCode())
assertEquals(1910019874, lg(14748928, 1899600418).hashCode())
assertEquals(1877620608, lg(-1985642880, -431011584).hashCode())
assertEquals(-378358620, lg(494530531, -200582329).hashCode())
assertEquals(492633155, lg(-2067225228, -1718331081).hashCode())
assertEquals(-1581166836, lg(-1799546135, 897340901).hashCode())
assertEquals(174532880, lg(25821759, 200092463).hashCode())
assertEquals(-629188646, lg(403690141, -1032813241).hashCode())
assertEquals(2139225425, lg(-1843541251, -308529236).hashCode())
assertEquals(200043623, lg(1643311840, 1780391559).hashCode())
assertEquals(1992690082, lg(1531597671, 764172997).hashCode())
assertEquals(754072038, lg(638938496, 182932582).hashCode())
assertEquals(-139359279, lg(309356043, -440275494).hashCode())
assertEquals(-1669264515, lg(-541225182, 1128039519).hashCode())
assertEquals(25583899, lg(-387355169, -378598204).hashCode())
assertEquals(1822592670, lg(1787244135, 103129337).hashCode())
assertEquals(1468680630, lg(-1654639624, -890602930).hashCode())
assertEquals(2103231504, lg(-1867306675, -303043235).hashCode())
assertEquals(1159389820, lg(1255224728, 265017316).hashCode())
assertEquals(776506096, lg(119985367, 695098919).hashCode())
assertEquals(-1303579924, lg(-332671386, 1583817866).hashCode())
assertEquals(1108767081, lg(1610629865, 571880320).hashCode())
assertEquals(-1101969936, lg(727577343, -1794328817).hashCode())
assertEquals(-1022615009, lg(730759795, -394092436).hashCode())
assertEquals(-1221218252, lg(-148400203, 1074931585).hashCode())
assertEquals(410005178, lg(181091802, 314250080).hashCode())
assertEquals(1180107886, lg(-1934827635, -889463837).hashCode())
assertEquals(425308062, lg(-1067099255, -650316777).hashCode())
assertEquals(1727927187, lg(1821917070, 174468125).hashCode())
assertEquals(-759140792, lg(474121453, -830281051).hashCode())
assertEquals(1698140938, lg(-402668999, -2100801229).hashCode())
assertEquals(512144461, lg(-615008378, -976157749).hashCode())
}
@Test def toString()(): Unit = {
assertEquals("0", lg(0).toString())
assertEquals("1", lg(1).toString())
assertEquals("-1", lg(-1).toString())
assertEquals(Int.MaxValue.toString(), IntMaxVal.toString())
assertEquals("2147483648", IntMaxValPlus1.toString())
assertEquals(Int.MinValue.toString(), IntMinVal.toString())
assertEquals("-2147483649", IntMinValMinus1.toString())
assertEquals("999999999", lg(999999999).toString())
assertEquals("1000000000", lg(1000000000).toString())
assertEquals("9007199254740991", MaxSafeDouble.toString())
assertEquals("9007199254740992", TwoPow53.toString())
assertEquals("-9007199254740991", MinSafeDouble.toString())
assertEquals("-9007199254740992", NegTwoPow53.toString())
assertEquals("-86922", lg(-86922, -1).toString())
assertEquals("0", lg(0, 0).toString())
assertEquals("-21874015", lg(-21874015, -1).toString())
assertEquals("-2098921896914", lg(1317110830, -489).toString())
assertEquals("80985205273168", lg(-698060208, 18855).toString())
assertEquals("-12451732102972849", lg(858389071, -2899145).toString())
assertEquals("3350", lg(3350, 0).toString())
assertEquals("-92511590195450", lg(2005360390, -21540).toString())
assertEquals("-2", lg(-2, -1).toString())
assertEquals("446248293253325286", lg(1492984294, 103900277).toString())
assertEquals("499596119314678396", lg(116015740, 116321286).toString())
assertEquals("-3205893", lg(-3205893, -1).toString())
assertEquals("-88762100292970", lg(1988813462, -20667).toString())
assertEquals("-1278004", lg(-1278004, -1).toString())
assertEquals("-1", lg(-1, -1).toString())
assertEquals("-305393", lg(-305393, -1).toString())
assertEquals("-2", lg(-2, -1).toString())
assertEquals("80295210784300943", lg(-1678336113, 18695185).toString())
assertEquals("5", lg(5, 0).toString())
assertEquals("21", lg(21, 0).toString())
assertEquals("64", lg(64, 0).toString())
assertEquals("39146094", lg(39146094, 0).toString())
assertEquals("-1725731", lg(-1725731, -1).toString())
assertEquals("-768047304243556260", lg(-874655652, -178824949).toString())
assertEquals("-2726923242838", lg(380990122, -635).toString())
assertEquals("-1781092907033", lg(1318520807, -415).toString())
assertEquals("-213275", lg(-213275, -1).toString())
assertEquals("7662405832810", lg(184176746, 1784).toString())
assertEquals("-154157877107", lg(460945549, -36).toString())
assertEquals("-929963900939521435", lg(1586508389, -216524094).toString())
assertEquals("-6872", lg(-6872, -1).toString())
assertEquals("31842553544728", lg(-333987816, 7413).toString())
assertEquals("567569520305426", lg(-1817926382, 132147).toString())
assertEquals("19649016", lg(19649016, 0).toString())
assertEquals("-1349346", lg(-1349346, -1).toString())
assertEquals("9479824673588660", lg(-1372338764, 2207193).toString())
assertEquals("3521781", lg(3521781, 0).toString())
assertEquals("1740", lg(1740, 0).toString())
assertEquals("0", lg(0, 0).toString())
assertEquals("92834698468", lg(-1654582044, 21).toString())
assertEquals("-80139798970631138", lg(100400158, -18659001).toString())
assertEquals("30058", lg(30058, 0).toString())
assertEquals("-611022189550002", lg(1332815438, -142265).toString())
assertEquals("514941281681226", lg(472694602, 119894).toString())
assertEquals("2454759250363", lg(-1962042949, 571).toString())
assertEquals("14860137468144958", lg(1595551038, 3459895).toString())
assertEquals("-79255", lg(-79255, -1).toString())
assertEquals("2290122305310796", lg(-1501556660, 533210).toString())
assertEquals("-755641947927852310", lg(-463451414, -175936602).toString())
assertEquals("-2621852156570472370", lg(-771329970, -610447526).toString())
assertEquals("-37956135735", lg(698569929, -9).toString())
assertEquals("853219", lg(853219, 0).toString())
assertEquals("901", lg(901, 0).toString())
assertEquals("4385596303898", lg(434694682, 1021).toString())
assertEquals("-972597865", lg(-972597865, -1).toString())
assertEquals("-8057379", lg(-8057379, -1).toString())
assertEquals("-14968", lg(-14968, -1).toString())
assertEquals("-98204964", lg(-98204964, -1).toString())
assertEquals("335479", lg(335479, 0).toString())
assertEquals("-429441918886", lg(54810714, -100).toString())
assertEquals("9798741", lg(9798741, 0).toString())
assertEquals("135908509698671494", lg(-896875642, 31643665).toString())
assertEquals("-141095409221912371", lg(233027789, -32851335).toString())
assertEquals("-9040837797787104", lg(-359183840, -2104985).toString())
assertEquals("-889", lg(-889, -1).toString())
assertEquals("3222082994", lg(-1072884302, 0).toString())
assertEquals("-1454853", lg(-1454853, -1).toString())
assertEquals("547641844425", lg(-2113969463, 127).toString())
assertEquals("2528132853", lg(-1766834443, 0).toString())
assertEquals("242", lg(242, 0).toString())
assertEquals("-1655763891", lg(-1655763891, -1).toString())
assertEquals("82", lg(82, 0).toString())
assertEquals("-120254181", lg(-120254181, -1).toString())
assertEquals("-210088", lg(-210088, -1).toString())
assertEquals("-2", lg(-2, -1).toString())
assertEquals("250255458324299", lg(598888267, 58267).toString())
assertEquals("-100656997", lg(-100656997, -1).toString())
assertEquals("-24097181761", lg(1672622015, -6).toString())
assertEquals("206088", lg(206088, 0).toString())
assertEquals("-593", lg(-593, -1).toString())
assertEquals("-99542049", lg(-99542049, -1).toString())
assertEquals("421501", lg(421501, 0).toString())
assertEquals("-2", lg(-2, -1).toString())
assertEquals("-101", lg(-101, -1).toString())
assertEquals("3", lg(3, 0).toString())
assertEquals("14967492854", lg(2082590966, 3).toString())
assertEquals("-1528445803513883", lg(-86853659, -355870).toString())
assertEquals("26760588095306", lg(-1353126070, 6230).toString())
assertEquals("12452686330472", lg(1576139368, 2899).toString())
assertEquals("-130630407827875", lg(1022479965, -30415).toString())
assertEquals("-10281777615", lg(-1691843023, -3).toString())
assertEquals("-90497242609445", lg(2013284571, -21071).toString())
assertEquals("-13935178716929", lg(1990158591, -3245).toString())
assertEquals("-11308540", lg(-11308540, -1).toString())
assertEquals("545166", lg(545166, 0).toString())
assertEquals("-1043705339124703", lg(1778574369, -243007).toString())
assertEquals("510", lg(510, 0).toString())
assertEquals("-2485453027", lg(1809514269, -1).toString())
assertEquals("-15103", lg(-15103, -1).toString())
assertEquals("-168776672025670194", lg(-779514418, -39296382).toString())
}
@Test def toByte(): Unit = {
assertEquals(0, lg(0).toByte)
assertEquals(-1, lg(-1).toByte)
assertEquals(0x98.toByte, lg(0xfedcba98, 0x76543210).toByte)
assertEquals(102, lg(-1755353242, -1245269156).toByte)
assertEquals(77, lg(-359135667, 1391746928).toByte)
assertEquals(-47, lg(-957203503, 1516742479).toByte)
assertEquals(-22, lg(-1928741654, 1162703256).toByte)
assertEquals(-113, lg(-1698228849, 1497186951).toByte)
assertEquals(-84, lg(-68041812, -2115448390).toByte)
assertEquals(33, lg(1534301729, 1468418695).toByte)
assertEquals(113, lg(1101829489, -514588123).toByte)
assertEquals(12, lg(-1437577204, 1896338488).toByte)
assertEquals(86, lg(-857671082, -1304076936).toByte)
assertEquals(-36, lg(-292818212, -1485650549).toByte)
assertEquals(88, lg(1044510040, 147719255).toByte)
assertEquals(107, lg(-1166136469, 78076997).toByte)
assertEquals(61, lg(500131901, 248541787).toByte)
assertEquals(99, lg(1863435363, -1465266670).toByte)
assertEquals(-76, lg(136483252, 1662447178).toByte)
assertEquals(0, lg(1787939584, 1303926235).toByte)
assertEquals(-69, lg(2105657787, 845433223).toByte)
assertEquals(26, lg(-1298285542, -1826340261).toByte)
assertEquals(64, lg(-766959552, -326327606).toByte)
}
@Test def toShort(): Unit = {
assertEquals(0, lg(0).toShort)
assertEquals(-1, lg(-1).toShort)
assertEquals(0xba98.toShort, lg(0xfedcba98, 0x76543210).toShort)
assertEquals(-670, lg(1925512546, -812328457).toShort)
assertEquals(-15861, lg(2028716555, -1639243756).toShort)
assertEquals(9963, lg(-1970657557, -1904990267).toShort)
assertEquals(18394, lg(-1012119590, -1704668195).toShort)
assertEquals(-7956, lg(848486636, -810351120).toShort)
assertEquals(21453, lg(2103989197, 955793808).toShort)
assertEquals(22979, lg(-237938237, -703399620).toShort)
assertEquals(8452, lg(666247428, -1109641927).toShort)
assertEquals(-26563, lg(1824561213, -872828437).toShort)
assertEquals(-5754, lg(-10950266, -1779965318).toShort)
assertEquals(11796, lg(1251814932, -491043391).toShort)
assertEquals(18020, lg(-117750172, -366379322).toShort)
assertEquals(3768, lg(-2095575368, 965048164).toShort)
assertEquals(-4579, lg(-177410531, 1454361289).toShort)
assertEquals(-29102, lg(-359035310, -790126871).toShort)
assertEquals(30020, lg(1486058820, 1675509542).toShort)
assertEquals(-13051, lg(268881157, -342358099).toShort)
assertEquals(-2720, lg(-1089211040, 747294820).toShort)
assertEquals(4726, lg(1163661942, 1708185440).toShort)
assertEquals(-16878, lg(-1363821038, -1952481751).toShort)
}
@Test def toInt(): Unit = {
assertEquals(0, lg(0).toInt)
assertEquals(-1, lg(-1).toInt)
assertEquals(0xfedcba98, lg(0xfedcba98, 0x76543210).toInt)
assertEquals(-1869423218, lg(-1869423218, -5516698).toInt)
assertEquals(450655357, lg(450655357, -521592408).toInt)
assertEquals(-596464514, lg(-596464514, 629510497).toInt)
assertEquals(1668957409, lg(1668957409, 1231040344).toInt)
assertEquals(-313016061, lg(-313016061, 283507721).toInt)
assertEquals(-406779255, lg(-406779255, 1389322213).toInt)
assertEquals(-1125423893, lg(-1125423893, -436921025).toInt)
assertEquals(1491309031, lg(1491309031, 948401259).toInt)
assertEquals(360542935, lg(360542935, -1033853853).toInt)
assertEquals(178673916, lg(178673916, -2045867551).toInt)
assertEquals(-1167644863, lg(-1167644863, 738699232).toInt)
assertEquals(-1852739075, lg(-1852739075, 950841298).toInt)
assertEquals(-1965326912, lg(-1965326912, 1694989583).toInt)
assertEquals(-141857741, lg(-141857741, -1197558189).toInt)
assertEquals(-938893686, lg(-938893686, 1763555645).toInt)
assertEquals(-1178638558, lg(-1178638558, 299067184).toInt)
assertEquals(-1296424902, lg(-1296424902, -1694453755).toInt)
assertEquals(204387309, lg(204387309, -240738711).toInt)
assertEquals(-942136876, lg(-942136876, -527367452).toInt)
assertEquals(-1703892744, lg(-1703892744, 240186844).toInt)
}
@Test def toLong(): Unit = {
assertEquals(0L, lg(0).toLong)
assertEquals(-1L, lg(-1).toLong)
assertEquals(0x76543210fedcba98L, lg(0xfedcba98, 0x76543210).toLong)
assertEquals(6907420169189163269L, lg(-85753595, 1608259083).toLong)
assertEquals(-6558938415102325809L, lg(539593679, -1527121853).toLong)
assertEquals(-7633462319206780754L, lg(-379998034, -1777303946).toLong)
assertEquals(-4051533910437546682L, lg(-655641274, -943321249).toLong)
assertEquals(-3890339056676572253L, lg(1727460259, -905790147).toLong)
assertEquals(-3091543614186826784L, lg(1824805856, -719806090).toLong)
assertEquals(2806266116723834799L, lg(948567983, 653384746).toLong)
assertEquals(-1741184441450532748L, lg(-957910924, -405401095).toLong)
assertEquals(3395924718030703835L, lg(-433042213, 790675337).toLong)
assertEquals(-7712245542997911283L, lg(889526541, -1795647094).toLong)
assertEquals(-2751064647855401745L, lg(1316066543, -640532153).toLong)
assertEquals(5225909624054208018L, lg(1913378322, 1216751901).toLong)
assertEquals(1334025594846136121L, lg(-434813127, 310602037).toLong)
assertEquals(-1574909139329823322L, lg(1689963942, -366687109).toLong)
assertEquals(-9142211941778525044L, lg(754250892, -2128587091).toLong)
assertEquals(-5517402195275269807L, lg(-1817691823, -1284620305).toLong)
assertEquals(7612683537409046411L, lg(-222627957, 1772466007).toLong)
assertEquals(-2955859733488660001L, lg(-1282993697, -688214725).toLong)
assertEquals(462084382441397543L, lg(799857959, 107587404).toLong)
assertEquals(8801656334077465992L, lg(2076251528, 2049295309).toLong)
}
@Test def toFloat_strict(): Unit = {
assumeTrue("Assumed strict floats", hasStrictFloats)
assertEquals(0, lg(0).toFloat)
assertEquals(-1, lg(-1).toFloat)
if (!isInFullOpt) {
assertEquals(9.223372E18f, MaxVal.toFloat)
assertEquals(-9.223372E18f, MinVal.toFloat)
} else {
// Closure seems to incorrectly rewrite the constant on the right :-(
assertEquals(9.223372E18f, MaxVal.toFloat, 1E4f)
assertEquals(-9.223372E18f, MinVal.toFloat, 1E4f)
}
assertEquals(4.7971489E18f, lg(-1026388143, 1116923232).toFloat)
assertEquals(-2.24047663E18f, lg(-1288678667, -521651607).toFloat)
assertEquals(4.59211416E18f, lg(1192262605, 1069184891).toFloat)
assertEquals(3.38942079E18f, lg(-180353617, 789161022).toFloat)
assertEquals(-6.8076878E18f, lg(-1158443188, -1585038363).toFloat)
assertEquals(7.4159717E18f, lg(906981906, 1726665521).toFloat)
assertEquals(-1.85275997E18f, lg(2042933575, -431379283).toFloat)
assertEquals(5.7344188E18f, lg(599900903, 1335148382).toFloat)
assertEquals(3.20410168E18f, lg(1458166084, 746013039).toFloat)
assertEquals(-7.2310311E18f, lg(1956524672, -1683605603).toFloat)
assertEquals(7.7151362E18f, lg(478583639, 1796320118).toFloat)
assertEquals(1.41365268E18f, lg(-1645816617, 329141676).toFloat)
assertEquals(-3.03197918E18f, lg(184187116, -705937657).toFloat)
assertEquals(-4.04287594E18f, lg(659513335, -941305424).toFloat)
assertEquals(-7.8204678E18f, lg(770505156, -1820844549).toFloat)
assertEquals(-5.9733025E18f, lg(929928858, -1390767911).toFloat)
assertEquals(1.1261721E18f, lg(-1475096259, 262207373).toFloat)
assertEquals(4.00884963E18f, lg(787691795, 933383012).toFloat)
assertEquals(-1.43511611E18f, lg(1189057493, -334139018).toFloat)
assertEquals(3.81415059E18f, lg(-618946450, 888051141).toFloat)
}
@Test def toDouble(): Unit = {
assertEquals(0, lg(0).toDouble)
assertEquals(-1, lg(-1).toDouble)
if (!isInFullOpt) {
assertEquals(9.223372036854776E18, MaxVal.toDouble)
assertEquals(-9.223372036854776E18, MinVal.toDouble)
} else {
// Closure seems to incorrectly rewrite the constant on the right :-(
assertEquals(9.223372036854776E18, MaxVal.toDouble, 1E4)
assertEquals(-9.223372036854776E18, MinVal.toDouble, 1E4)
}
assertEquals(3.4240179834317537E18, lg(-151011088, 797216310).toDouble)
assertEquals(8.5596043411285968E16, lg(-508205099, 19929381).toDouble)
assertEquals(-3.1630346897289943E18, lg(1249322201, -736451403).toDouble)
assertEquals(-4.4847682439933604E18, lg(483575860, -1044191477).toDouble)
assertEquals(-6.4014772289576371E17, lg(-1526343930, -149046007).toDouble)
assertEquals(-1.76968119148756736E18, lg(531728928, -412036011).toDouble)
assertEquals(-8.5606671350959739E18, lg(-734111585, -1993185640).toDouble)
assertEquals(-9.0403963253949932E18, lg(-1407864332, -2104881296).toDouble)
assertEquals(-6.4988752582247977E18, lg(-1712351423, -1513137310).toDouble)
assertEquals(-7.7788492399114394E17, lg(1969244733, -181115448).toDouble)
assertEquals(7.6357174849871442E18, lg(-907683842, 1777829016).toDouble)
assertEquals(1.25338659134517658E18, lg(-815927209, 291826806).toDouble)
assertEquals(-3.1910241505692349E18, lg(463523496, -742968207).toDouble)
assertEquals(7.4216510087652332E18, lg(1482622807, 1727987781).toDouble)
assertEquals(-8.189046896086654E18, lg(1170040143, -1906661060).toDouble)
assertEquals(6.8316272807487539E18, lg(-85609173, 1590612176).toDouble)
assertEquals(-8.0611115909320561E18, lg(-1212811257, -1876873801).toDouble)
assertEquals(1.7127521901359959E18, lg(-648802816, 398781194).toDouble)
assertEquals(-6.4442523492577423E18, lg(-1484519186, -1500419423).toDouble)
assertEquals(-1.71264450938175027E18, lg(-2016996893, -398756124).toDouble)
}
@Test def fromDouble(): Unit = {
import RuntimeLong.{fromDouble => fromD}
val twoPow63 = 9.223372036854776E18
val twoPow63NextUp = 9.223372036854778E18
val twoPow63NextDown = 9.2233720368547748E18
// Specials
assertEquals(lg(0), fromD(0.0))
assertEquals(lg(0), fromD(-0.0))
assertEquals(lg(0), fromD(Double.NaN))
assertEquals(MaxVal, fromD(Double.PositiveInfinity))
assertEquals(MinVal, fromD(Double.NegativeInfinity))
// Corner cases
assertEquals(lg(0), fromD(Double.MinPositiveValue))
assertEquals(lg(0), fromD(-Double.MinPositiveValue))
assertEquals(MaxVal, fromD(twoPow63))
assertEquals(MaxVal, fromD(twoPow63NextUp))
if (!isInFullOpt) {
// GCC incorrectly rewrites the Double constants on the rhs
assertEquals(lg(-1024, 2147483647), fromD(twoPow63NextDown))
assertEquals(MinVal, fromD(-twoPow63))
}
assertEquals(MinVal, fromD(-twoPow63NextUp))
assertEquals(lg(1024, -2147483648), fromD(-twoPow63NextDown))
// Absolute value too big
assertEquals(MaxVal, fromD(1.5623101234432471E19))
assertEquals(MaxVal, fromD(1.0425697303244048E19))
assertEquals(MaxVal, fromD(1.500625248806836E19))
assertEquals(MinVal, fromD(-1.5623101234432471E19))
assertEquals(MinVal, fromD(-1.0425697303244048E19))
assertEquals(MinVal, fromD(-1.500625248806836E19))
// Normal cases
assertEquals(lg(-235867169, -1408375), fromD(-6.048920506403873E15))
assertEquals(lg(-69250108, 1979931), fromD(8.503743119053764E15))
assertEquals(lg(-305079043, 917242), fromD(3.939528382405885E15))
assertEquals(lg(687182505, -933310), fromD(-4.008535239847255E15))
assertEquals(lg(-268193171, -177333), fromD(-7.61635408727443E14))
assertEquals(lg(-1529111384, 564485), fromD(2.424447379938472E15))
assertEquals(lg(1128309745, -1082296), fromD(-4.648424796281871E15))
assertEquals(lg(-418524847, 1986827), fromD(8.533360864252241E15))
assertEquals(lg(615477490, -646039), fromD(-2.774715761463054E15))
assertEquals(lg(-1546293262, 815087), fromD(3.500774757068786E15))
assertEquals(lg(455797153, -1037726), fromD(-4.456998776411743E15))
assertEquals(lg(587409995, 1185272), fromD(5.090705064274507E15))
assertEquals(lg(-1405692887, -769407), fromD(-3.304575013039063E15))
assertEquals(lg(667130924, 412), fromD(1.770193656876E12))
assertEquals(lg(632602096, -506779), fromD(-2.176598598697488E15))
assertEquals(lg(1820137888, 955044), fromD(4.101884566378912E15))
assertEquals(lg(682339811, 951155), fromD(4.085180300766691E15))
assertEquals(lg(1394139649, -1084392), fromD(-4.657426781904383E15))
assertEquals(lg(-677499131, 663585), fromD(2.850079490584325E15))
assertEquals(lg(805667746, 1417318), fromD(6.087335263699874E15))
assertEquals(lg(990918920, -1563103), fromD(-6.713475274360568E15))
assertEquals(lg(-1427573595, 969167), fromD(4.162543436756133E15))
assertEquals(lg(-699306484, -1852353), fromD(-7.955791959986676E15))
assertEquals(lg(-1807820942, 1218020), fromD(5.231358553020274E15))
assertEquals(lg(1243383338, 349241), fromD(1.499979916805674E15))
assertEquals(lg(-479557118, 1183372), fromD(5.08254785441229E15))
assertEquals(lg(1413560577, 654135), fromD(2.809489845729537E15))
assertEquals(lg(-2047369879, 1135596), fromD(4.877349929065833E15))
assertEquals(lg(-741161617, -1594192), fromD(-6.846998949739153E15))
assertEquals(lg(-2115502919, 1443312), fromD(6.198980017388729E15))
assertEquals(lg(1015092168, 1152178), fromD(4.948567844262856E15))
assertEquals(lg(-1340352375, -863152), fromD(-3.707206656862071E15))
assertEquals(lg(1990353383, -2017544), fromD(-8.665283507887641E15))
assertEquals(lg(-1683508387, -666397), fromD(-2.862150709693603E15))
assertEquals(lg(2095665836, 369587), fromD(1.587366173692588E15))
assertEquals(lg(229204175, 77510), fromD(3.32903144317135E14))
assertEquals(lg(-1988104885, 1374301), fromD(5.902580156722507E15))
assertEquals(lg(-1032158224, -233238), fromD(-1.001746319375376E15))
assertEquals(lg(1321723055, -121058), fromD(-5.19938829196113E14))
assertEquals(lg(-1959869514, -1892991), fromD(-8.130332101524554E15))
assertEquals(lg(-1173650161, -412038), fromD(-1.769686613392113E15))
assertEquals(lg(-1692936735, -1697943), fromD(-7.292607053441567E15))
assertEquals(lg(-1368921565, 621023), fromD(2.667276401109539E15))
}
@Test def comparisons(): Unit = {
def test(x: RuntimeLong, y: RuntimeLong, expected: Int): Unit = {
assertEquals(expected, x.compareTo(y).signum)
assertEquals(expected, x.compareTo(y.toLong: java.lang.Long).signum)
assertEquals(expected == 0, x.equals(y))
assertEquals(expected != 0, x.notEquals(y))
assertEquals(expected < 0, x < y)
assertEquals(expected <= 0, x <= y)
assertEquals(expected > 0, x > y)
assertEquals(expected >= 0, x >= y)
}
test(lg(0), lg(0), 0)
test(lg(0), lg(1), -1)
test(lg(0), lg(-1), 1)
test(MaxVal, MinVal, 1)
test(MinVal, MaxVal, -1)
// Positive and negative numbers requiring lo to be compared via unsigned
test(lg(0x87654321, 0x654789ab), lg(0x12345678, 0x654789ab), 1)
test(lg(0x87654321, 0x89abcdef), lg(0x12345678, 0x89abcdef), 1)
// Whitebox corner cases
test(lg(-1, 0), lg(0, 0), 1)
test(lg(0, 0), lg(-1, 0), -1)
test(lg(173547161, -1884162399), lg(173547161, -1884162399), 0)
test(lg(-1131022787, -472928681), lg(-1131022787, -472928681), 0)
test(lg(-1426164191, 1230100202), lg(-1426164191, 1230100202), 0)
test(lg(-865774626, 1656835920), lg(-865774626, 1656835920), 0)
test(lg(323675568, -725625271), lg(323675568, -725625271), 0)
test(lg(-480943595, -1454872354), lg(-480943595, -1454872354), 0)
test(lg(-626788852, 1037229194), lg(-626788852, 1037229194), 0)
test(lg(-717389653, 232764759), lg(-717389653, 232764759), 0)
test(lg(-861190423, -1233377930), lg(-861190423, -1233377930), 0)
test(lg(-424759090, 2081288998), lg(-424759090, 2081288998), 0)
test(lg(-1092215366, 753517982), lg(349136582, -103427916), 1)
test(lg(363609757, -1151024787), lg(472951646, -1802702403), 1)
test(lg(604332601, 1869576376), lg(1642523661, 1083165388), 1)
test(lg(309732766, 1349689861), lg(1287300335, 1464464808), -1)
test(lg(-1309668929, -965374553), lg(-1952664258, 53355972), -1)
test(lg(1881957750, 388099413), lg(1843907319, -1819358211), 1)
test(lg(-969542710, 864289013), lg(-1025874755, 1102102911), -1)
test(lg(-1425636748, -220185411), lg(1184140796, 40447497), -1)
test(lg(242386079, 452246653), lg(435337552, -956883630), 1)
test(lg(-1007383056, 344856628), lg(-195994328, 635205577), -1)
test(lg(-1652098619, 2042392045), lg(819672742, -2139008380), 1)
test(lg(1423590080, 1919857862), lg(918443721, 1202178673), 1)
test(lg(-1726296442, 302493002), lg(314727886, 1583734481), -1)
test(lg(-2124336701, 769721099), lg(461146322, -591528218), 1)
test(lg(1544826993, -689540243), lg(-1107003972, -1622786326), 1)
test(lg(2050227802, 951848379), lg(-774454951, 1675192386), -1)
test(lg(251298779, -327163776), lg(767615943, 1531730165), -1)
test(lg(1890888425, 761833495), lg(1870917399, 2027251288), -1)
test(lg(594868313, 126374530), lg(-1567484882, -1199917303), 1)
test(lg(-914360997, -703435655), lg(2049249771, -1581791194), 1)
test(lg(-732484281, -738997306), lg(1445589646, 1910084021), -1)
test(lg(340771740, 1351224018), lg(459324247, 1301544548), 1)
test(lg(-940710332, 1344186742), lg(-1143672211, 1112189558), 1)
test(lg(-804347876, 364046111), lg(-4317439, -1733157379), 1)
test(lg(914214836, -1226397169), lg(-299522125, 1393423940), -1)
test(lg(1244546642, 1821771770), lg(44151604, -1398558064), 1)
test(lg(-2094640323, -1469168677), lg(-263524564, 88152070), -1)
test(lg(-124567753, -93039352), lg(-200449699, -30383890), -1)
test(lg(161119306, -1098626173), lg(-137189625, 1289988889), -1)
test(lg(-2052616761, 846341515), lg(-150583666, 1044666783), -1)
test(lg(-10359669, -1628837253), lg(165345114, 1529503183), -1)
test(lg(1717988228, 1622548180), lg(834798590, -1907713185), 1)
test(lg(-1416372109, -353311343), lg(-722195813, -2060788759), 1)
test(lg(980620531, -300588346), lg(-889348218, 1805452697), -1)
test(lg(-465681479, 556544868), lg(-684386776, 724207906), -1)
test(lg(1720493596, 1118244444), lg(2048914469, -789300492), 1)
test(lg(-1259678249, -1557339417), lg(-1908141376, -468055129), -1)
test(lg(1374750478, 1591281700), lg(1107931774, 1073828802), 1)
test(lg(1307860622, -1769647645), lg(-1521056504, 1476896409), -1)
test(lg(1870719065, -606069057), lg(1219817813, -1063559023), 1)
test(lg(-526519712, 1166848880), lg(-748095992, 59925642), 1)
test(lg(-1011429486, -2053277854), lg(537284118, 1714076830), -1)
test(lg(-669104363, -107157886), lg(1647426475, -1784147450), 1)
test(lg(-389860398, 693324889), lg(1047633230, -1757663140), 1)
test(lg(-200206281, 96771163), lg(613429570, -1206384633), 1)
test(lg(-1436571081, -2050819200), lg(-665572561, 644211697), -1)
test(lg(620796821, -567816428), lg(-109412350, -624638338), 1)
test(lg(858464866, -2104597302), lg(-987329519, 1189618105), -1)
test(lg(-1342634556, -1517778924), lg(-693373055, 142499537), -1)
test(lg(1839280888, -168388422), lg(-1645740821, -1967920957), 1)
}
@Test def bitwise_not_~(): Unit = {
assertEquals(lg(1664374422, 327449892), ~lg(-1664374423, -327449893))
assertEquals(lg(-2033180390, -1179462631), ~lg(2033180389, 1179462630))
assertEquals(lg(-1134559214, 581653069), ~lg(1134559213, -581653070))
assertEquals(lg(-304074638, -795726117), ~lg(304074637, 795726116))
assertEquals(lg(-1711832787, 1153070599), ~lg(1711832786, -1153070600))
assertEquals(lg(-1526506637, 966114536), ~lg(1526506636, -966114537))
assertEquals(lg(4362923, 1155261397), ~lg(-4362924, -1155261398))
assertEquals(lg(-1976846289, -68873334), ~lg(1976846288, 68873333))
assertEquals(lg(-980717878, -1171857118), ~lg(980717877, 1171857117))
assertEquals(lg(1087568370, 543704246), ~lg(-1087568371, -543704247))
assertEquals(lg(466027718, 693030605), ~lg(-466027719, -693030606))
assertEquals(lg(457333958, 1344424074), ~lg(-457333959, -1344424075))
assertEquals(lg(-1195369388, -1211454825), ~lg(1195369387, 1211454824))
assertEquals(lg(1637646574, 618600148), ~lg(-1637646575, -618600149))
assertEquals(lg(1882417448, 81477816), ~lg(-1882417449, -81477817))
assertEquals(lg(-755550612, -520392566), ~lg(755550611, 520392565))
assertEquals(lg(-754282895, -1550447287), ~lg(754282894, 1550447286))
assertEquals(lg(949172349, -708028075), ~lg(-949172350, 708028074))
assertEquals(lg(1587810906, -1344614950), ~lg(-1587810907, 1344614949))
assertEquals(lg(-1761617639, -353615615), ~lg(1761617638, 353615614))
assertEquals(lg(-153730678, 249152220), ~lg(153730677, -249152221))
assertEquals(lg(-189227914, 2071190797), ~lg(189227913, -2071190798))
assertEquals(lg(-853867870, 445686068), ~lg(853867869, -445686069))
assertEquals(lg(-779434875, 417640992), ~lg(779434874, -417640993))
assertEquals(lg(1997707715, -1100729422), ~lg(-1997707716, 1100729421))
assertEquals(lg(1171311729, -1236578928), ~lg(-1171311730, 1236578927))
assertEquals(lg(-833922040, 1773972621), ~lg(833922039, -1773972622))
assertEquals(lg(1414648869, 1222586075), ~lg(-1414648870, -1222586076))
assertEquals(lg(1123832582, -1270176018), ~lg(-1123832583, 1270176017))
assertEquals(lg(1163066309, 237396271), ~lg(-1163066310, -237396272))
assertEquals(lg(-1826566063, 509270117), ~lg(1826566062, -509270118))
assertEquals(lg(-450318543, 1650640099), ~lg(450318542, -1650640100))
assertEquals(lg(1461907704, -27364749), ~lg(-1461907705, 27364748))
assertEquals(lg(1012261256, 1691289854), ~lg(-1012261257, -1691289855))
assertEquals(lg(-1929178874, 1804481536), ~lg(1929178873, -1804481537))
assertEquals(lg(-888719200, -1846455123), ~lg(888719199, 1846455122))
assertEquals(lg(984231682, -867292444), ~lg(-984231683, 867292443))
assertEquals(lg(2105026705, -16146223), ~lg(-2105026706, 16146222))
assertEquals(lg(1742028653, -1648876191), ~lg(-1742028654, 1648876190))
assertEquals(lg(1922039594, -60702355), ~lg(-1922039595, 60702354))
assertEquals(lg(264728648, 275960741), ~lg(-264728649, -275960742))
assertEquals(lg(1237639032, -1761272007), ~lg(-1237639033, 1761272006))
assertEquals(lg(1118919822, 901486922), ~lg(-1118919823, -901486923))
assertEquals(lg(18001220, -1121574637), ~lg(-18001221, 1121574636))
assertEquals(lg(2122002356, -1370943785), ~lg(-2122002357, 1370943784))
assertEquals(lg(2006182035, -1422441078), ~lg(-2006182036, 1422441077))
assertEquals(lg(1314896174, 460075839), ~lg(-1314896175, -460075840))
assertEquals(lg(1829402918, -1031934892), ~lg(-1829402919, 1031934891))
assertEquals(lg(-2138673173, -107590306), ~lg(2138673172, 107590305))
assertEquals(lg(1382443514, -56307753), ~lg(-1382443515, 56307752))
}
@Test def bitwise_or_|(): Unit = {
assertEquals(lg(1467334397, -608514), lg(1198889513, -170491266) | lg(356560637, 1244673694))
assertEquals(lg(-1645778056, 796647391), lg(-1930990792, 627822941) | lg(-1849669008, 185716690))
assertEquals(lg(2121785322, -3735189), lg(711185578, -154795743) | lg(1446469570, -104529814))
assertEquals(lg(401988479, 1357601567), lg(356565628, 275405582) | lg(380967239, 1356925723))
assertEquals(lg(-167780425, -167778583), lg(1968397619, -447093015) | lg(-1242708043, 1353146913))
assertEquals(lg(-34603479, -565777), lg(-2121965024, -76133937) | lg(2104409609, -1365814226))
assertEquals(lg(-537280529, -10535202), lg(1496398822, -548061626) | lg(-556169301, -245689186))
assertEquals(lg(2132402169, -1093993487), lg(856203065, -1102382704) | lg(1276763344, 377524977))
assertEquals(lg(500957183, -5777537), lg(474066920, -215674305) | lg(366737695, 530830706))
assertEquals(lg(-1077937506, 1876426559), lg(-1543310820, 664058893) | lg(1002387606, 1826081595))
assertEquals(lg(-2121745, -302649859), lg(1606847457, -857707283) | lg(-82108753, 628476252))
assertEquals(lg(2113649662, -9748643), lg(703699686, -1218298019) | lg(1575693246, -565500071))
assertEquals(lg(1845274268, 1608495102), lg(1281663616, 1255777790) | lg(1708663964, 1604300502))
assertEquals(lg(-174066179, 1861146349), lg(-1315547660, 1726760037) | lg(-442781559, 235328140))
assertEquals(lg(2139059199, -40115785), lg(2014986997, -1130692301) | lg(124088654, 1637408903))
assertEquals(lg(-4195861, -679630869), lg(1653153899, 1412277603) | lg(-1615398494, -682581111))
assertEquals(lg(601802239, 1937620978), lg(551077237, 1349033186) | lg(597575118, 1662855120))
assertEquals(lg(-1383162189, -1107312899), lg(613289137, -1123701660) | lg(-1383294317, 369006329))
assertEquals(lg(-141299717, -576585865), lg(-418175046, -593383309) | lg(1468132939, 360734532))
assertEquals(lg(1998808831, -86066691), lg(1428236018, -1294026291) | lg(572735565, 1213340152))
assertEquals(lg(-1680360554, -738459673), lg(-1949058688, -1013245209) | lg(416580246, 300148007))
assertEquals(lg(-1073808964, -183288105), lg(-1746245220, 1427323605) | lg(-1185613404, -469621610))
assertEquals(lg(1475346349, 1845485055), lg(1445648649, 701317455) | lg(1407661733, 1287118327))
assertEquals(lg(-33566733, -268503975), lg(-1861500445, 764080137) | lg(-33812527, -411163560))
assertEquals(lg(-286605413, 1602191341), lg(-1408712806, 393166157) | lg(1323973395, 1580353248))
assertEquals(lg(-553947394, -2013546505), lg(-2072304578, -2142600249) | lg(-625840402, -2018265417))
assertEquals(lg(-553746946, -140321), lg(450125308, 1742298015) | lg(-999674466, -89794491))
assertEquals(lg(-16643, -68193313), lg(1239068904, -68194107) | lg(-1092247939, -639552609))
assertEquals(lg(-52733444, -1159005505), lg(-2075047684, -1706497393) | lg(-119858776, -1461536706))
assertEquals(lg(-121509406, 1048526839), lg(-1065293728, 1045575815) | lg(943802850, 4130803))
assertEquals(lg(1844952571, -1327497834), lg(1688647147, -1327540094) | lg(1767049400, -1609892586))
assertEquals(lg(-5046291, -1345721876), lg(-207425559, 231270892) | lg(515004644, -1349918716))
assertEquals(lg(-1075861506, -67698709), lg(781813534, 1274454635) | lg(-1814682890, -1182466103))
assertEquals(lg(2144796219, -17303617), lg(1792206347, -54265949) | lg(931436592, -625499620))
assertEquals(lg(-874545153, -1611301156), lg(-1957992337, 421859924) | lg(1138122674, -1896513908))
assertEquals(lg(-1218644010, -67141891), lg(-1220262128, 1790926509) | lg(-2107837994, -245286664))
assertEquals(lg(-2555905, 2146160604), lg(-485426246, 2122993116) | lg(-1077361187, 795578180))
assertEquals(lg(999978447, 2129346287), lg(713580935, 2059541733) | lg(957494730, 1688940106))
assertEquals(lg(-836113, 1983903423), lg(-181332639, 608154803) | lg(787627150, 1378378253))
assertEquals(lg(-273220891, -1242040457), lg(-944448827, -1528432780) | lg(-374967708, 364320051))
assertEquals(lg(-52433921, -1615929419), lg(1822361801, -1626992863) | lg(-1865553026, -1867721804))
assertEquals(lg(-1646593, -1583649), lg(-333036705, -39743141) | lg(-136127263, -404241201))
assertEquals(lg(-105959457, -50406273), lg(1342309595, 143297662) | lg(-1448137844, -50933699))
assertEquals(lg(-480707585, -87100434), lg(-514802766, 718197230) | lg(1113082335, -259890518))
assertEquals(lg(-73693249, -555903498), lg(-476348284, -1025699402) | lg(1518405435, 1545110880))
assertEquals(lg(-1646871041, -403194029), lg(-2058311589, 1135057747) | lg(-1664731675, -1535754941))
assertEquals(lg(-203423937, -34342961), lg(333362997, -34482226) | lg(-205173969, 1754490115))
assertEquals(lg(2083487743, -159909991), lg(2083354303, -2043490039) | lg(1344953817, -195725679))
assertEquals(lg(-134268937, -680984614), lg(-942983837, -683124136) | lg(909452980, -1021249590))
assertEquals(lg(-17107060, -35914117), lg(-402624124, -505696678) | lg(-688199800, 2110291577))
}
@Test def bitwise_and_&(): Unit = {
assertEquals(lg(-2012982272, 17896961), lg(-1973652216, 353474049) & lg(-576365513, -1546420349))
assertEquals(lg(440467456, -805024688), lg(2054268182, -735220496) & lg(-1706223071, -653894309))
assertEquals(lg(-1073741824, -2144861952), lg(-761230816, -1888512251) & lg(-988806710, -256349768))
assertEquals(lg(-1977056222, -1878455803), lg(-834874333, -101893315) & lg(-1964333382, -1877225849))
assertEquals(lg(-1069166300, 304091682), lg(-767041747, 1403541430) & lg(-320482908, 442929698))
assertEquals(lg(193986570, 67633664), lg(1538292767, 67928849) & lg(261587146, 2097883842))
assertEquals(lg(167772308, 35669040), lg(448790964, 1852174074) & lg(-284620129, 35804464))
assertEquals(lg(540801, 554500096), lg(123267521, 1965916169) & lg(-401979731, 588194498))
assertEquals(lg(-1878826824, 268436097), lg(-1725202754, 324931273) & lg(-1240211271, 948007557))
assertEquals(lg(306780164, 8388625), lg(1044995460, -1447811559) & lg(1381579300, 378161591))
assertEquals(lg(29904144, 12096051), lg(1640550232, -1980050765) & lg(-1613988461, 381206391))
assertEquals(lg(-963297278, 537741320), lg(-810205145, 832395272) & lg(-153237294, -1368559681))
assertEquals(lg(-2138566639, -1881372656), lg(-2087037677, -539042218) & lg(-1930915595, -1879201391))
assertEquals(lg(348136448, 1461360), lg(936077102, 1888906741) & lg(-590306112, 153013360))
assertEquals(lg(-2147459072, 50628864), lg(-1520343420, -480326676) & lg(-1031638712, 463833361))
assertEquals(lg(-805279656, -972355264), lg(-603625122, -837874740) & lg(-266310439, -433325742))
assertEquals(lg(1763723264, 1095287337), lg(2101242821, 1363798717) & lg(-337523686, -1007893653))
assertEquals(lg(1296302405, 1947206722), lg(-849542331, 2084521938) & lg(1866786159, -179258269))
assertEquals(lg(1275593362, 814484868), lg(1283984114, 1922846117) & lg(-42342754, 948944324))
assertEquals(lg(1081520, 35397649), lg(18451376, 39592223) & lg(-300891980, 43819665))
assertEquals(lg(539714600, -1617688304), lg(1772840110, -1611388521) & lg(876572201, -1080057992))
assertEquals(lg(268660738, 1111507460), lg(-1792575438, 1131693597) & lg(2026108738, -691967420))
assertEquals(lg(-1977139054, 2393104), lg(-1977130853, 1105495064) & lg(-289941322, 37545108))
assertEquals(lg(-2145341308, -1333516032), lg(-1590955612, -1330697458) & lg(-924798828, -1177272879))
assertEquals(lg(-1503395487, -299827136), lg(-285931035, -293654078) & lg(-1486596765, -31342500))
assertEquals(lg(1233401994, 34091008), lg(1237743775, -1293389691) & lg(1803860874, 1175174664))
assertEquals(lg(-932558672, 270533826), lg(-839976008, 900736195) & lg(-362132238, -668577850))
assertEquals(lg(117477888, 473995424), lg(1202887172, 484547048) & lg(793351913, -1622877017))
assertEquals(lg(302600257, -2030040226), lg(1393155525, -2025583778) & lg(-1164217783, -416769026))
assertEquals(lg(145293649, 536871648), lg(-658787467, -1534848013) & lg(770509273, 861439716))
assertEquals(lg(1546608834, 302001248), lg(1550840002, 1588870758) & lg(2084528882, 302148833))
assertEquals(lg(201606209, -695465177), lg(481609689, -152204489) & lg(1279544421, -561242137))
assertEquals(lg(608207492, -2112820352), lg(-1529763097, -1978531900) & lg(641783708, -2039026814))
assertEquals(lg(270672860, -1476361723), lg(887514076, -129985897) & lg(423346174, -1364800691))
assertEquals(lg(606102544, -503185240), lg(1736270961, -223672071) & lg(748709016, -498985816))
assertEquals(lg(144970344, 74547586), lg(413438572, 628333003) & lg(-1964689415, -2039117914))
assertEquals(lg(0, 33646849), lg(-1441786846, -952014445) & lg(1364118108, 582220621))
assertEquals(lg(886489100, -1836576552), lg(-167845571, -610782244) & lg(920048140, -1832380167))
assertEquals(lg(181408260, 8425760), lg(1070668735, 1223734716) & lg(1255200260, 310500128))
assertEquals(lg(18633796, 1494253868), lg(565998918, 2102701486) & lg(1230790357, -651115716))
assertEquals(lg(1242169472, 1074954242), lg(1259021457, -988117846) & lg(-95497780, 2025257730))
assertEquals(lg(202639938, 134272082), lg(236334914, 210367602) & lg(-1388488109, 672191707))
assertEquals(lg(955253125, 1994661641), lg(2029259749, 2012495659) & lg(-1125022313, -17866867))
assertEquals(lg(134242336, 1377566768), lg(2078335024, -748696528) & lg(-1944488853, 1455161657))
assertEquals(lg(883214088, 536873986), lg(1962270604, 747650594) & lg(1051641707, -1606005365))
assertEquals(lg(203000132, 19923458), lg(504991188, 623990339) & lg(-1919047324, 331123498))
assertEquals(lg(274893395, 1881151488), lg(409659995, 1887189252) & lg(384277491, 1973591160))
assertEquals(lg(115235, 335685459), lg(872793907, 353626075) & lg(34859627, 1988247415))
assertEquals(lg(538493100, 441057288), lg(-1407266644, 441386073) & lg(1635378940, -548742904))
assertEquals(lg(839516176, 671232089), lg(844761371, 1022505085) & lg(1930384912, 688275291))
}
@Test def bitwise_xor_^(): Unit = {
assertEquals(lg(1342248740, -313223199), lg(690404572, -1279287229) ^ lg(2032643064, 1592473506))
assertEquals(lg(-1691405730, 274213753), lg(1880634009, 1433776255) ^ lg(-348716857, 1160616710))
assertEquals(lg(882329013, -513228751), lg(-958227509, 287282926) ^ lg(-227156354, -260614433))
assertEquals(lg(1416185065, -1664302164), lg(-266860160, 1815641996) ^ lg(-1536078487, -252396512))
assertEquals(lg(-1268929640, 1388542260), lg(1278830943, 22194981) ^ lg(-127614265, 1402065425))
assertEquals(lg(2107251545, -1588280474), lg(-865349911, -84319450) ^ lg(-1309551184, 1538105408))
assertEquals(lg(-1128180942, 150893828), lg(-1973252863, -1969367363) ^ lg(916708915, -2107399239))
assertEquals(lg(-721878765, 35051090), lg(2098389933, -3394272) ^ lg(-1444158786, -35986574))
assertEquals(lg(-1863503396, 535478572), lg(533612062, -1712875225) ^ lg(-1893500990, -2045945845))
assertEquals(lg(1732708730, -1611595623), lg(799833325, 2072025633) ^ lg(1223390615, -462316872))
assertEquals(lg(-757432261, -1755342186), lg(570370215, 1665373667) ^ lg(-215635812, -199487627))
assertEquals(lg(755676969, 926086823), lg(-1440978805, 1756956707) ^ lg(-2028544094, 1603010180))
assertEquals(lg(1331057947, 1347408402), lg(-1788434031, -203193594) ^ lg(-634323830, -1548988140))
assertEquals(lg(596183682, -256181831), lg(-1101798994, 1399594232) ^ lg(-1646597332, -1546197695))
assertEquals(lg(1360009516, 182700672), lg(-1432962218, -1631098948) ^ lg(-75062662, -1809535684))
assertEquals(lg(594798246, -124892913), lg(699430210, 902448324) ^ lg(180589540, -851178037))
assertEquals(lg(-1331407219, 1819608371), lg(-1873118605, -20501824) ^ lg(553528574, -1833816077))
assertEquals(lg(1679931669, 470452622), lg(-693963147, 616673404) ^ lg(-1300017312, 952842738))
assertEquals(lg(1861159718, -1488989292), lg(1250421224, 1104113895) ^ lg(610853582, -420437133))
assertEquals(lg(1056597675, -102857583), lg(-611286212, -1550148499) ^ lg(-445979241, 1514412284))
assertEquals(lg(255992058, 1610836280), lg(1704771515, 1382796179) ^ lg(1792974657, 845718187))
assertEquals(lg(315376042, 566682776), lg(1042258124, 728098489) ^ lg(752081254, 178455073))
assertEquals(lg(-185728083, -2076881789), lg(-1887944331, 1039677246) ^ lg(2073445080, -1177715779))
assertEquals(lg(22829354, 1511361245), lg(1986213921, -1875380784) ^ lg(2000642315, -903708915))
assertEquals(lg(-1209040105, 1698106233), lg(365179043, -418125319) ^ lg(-1574194252, -2111511936))
assertEquals(lg(-2034371369, -364230501), lg(-376038790, 1936322298) ^ lg(1865150125, -1725716895))
assertEquals(lg(-324294323, -1435696355), lg(182372182, -1389399582) ^ lg(-428511717, 121795327))
assertEquals(lg(-1632322296, 110394084), lg(408417754, -547668779) ^ lg(-2031925038, -640727503))
assertEquals(lg(1545363539, -418308022), lg(1515701412, 860890032) ^ lg(105620727, -733936646))
assertEquals(lg(-2124553361, 1571601224), lg(144626057, 2121098703) ^ lg(-1983696154, 599907975))
assertEquals(lg(-508527758, 679546956), lg(1716685092, -647833300) ^ lg(-2015169962, -236730016))
assertEquals(lg(-703803607, -1904715404), lg(-2016515438, -1674300757) ^ lg(1371710907, 306998239))
assertEquals(lg(-1295788899, 1052686696), lg(-547404938, -860356684) ^ lg(1838979051, -234273060))
assertEquals(lg(-1416482745, -1744821078), lg(1034397763, 1158948099) ^ lg(-1774872572, -585891415))
assertEquals(lg(-420256974, -1759976200), lg(1755131065, -847055172) ^ lg(-1905373301, 1520046660))
assertEquals(lg(-1978435977, -1613559541), lg(755114159, 1707687361) ^ lg(-1492035880, -98945846))
assertEquals(lg(1517584033, -1108617107), lg(1110955283, -394871226) ^ lg(407088050, 1436378667))
assertEquals(lg(1706214170, -555203143), lg(729918767, -1047522396) ^ lg(1311993397, 527980061))
assertEquals(lg(-278231087, -1148948163), lg(-1533968339, 1826223468) ^ lg(1274742780, -681737135))
assertEquals(lg(-204001370, 1220298027), lg(230297309, -219465279) ^ lg(-26402437, -1168671510))
assertEquals(lg(-1169385448, -2039889677), lg(-1364422220, 1487677662) ^ lg(350226860, -557455315))
assertEquals(lg(791138554, 668046473), lg(-1049451753, 1883174397) ^ lg(-296389651, 1475305844))
assertEquals(lg(2103687665, 1121138741), lg(-895088167, 1303802204) ^ lg(-1211781080, 258296169))
assertEquals(lg(-387978954, 908804328), lg(1409034242, -1162000487) ^ lg(-1155284684, -1936324751))
assertEquals(lg(1265820840, 1142688859), lg(861082066, -475962819) ^ lg(2015491450, -1480757658))
assertEquals(lg(1490973918, -277478122), lg(-288714491, 1935424926) ^ lg(-1240144421, -1674954616))
assertEquals(lg(1839163014, 362842460), lg(-699164585, -731232280) ^ lg(-1144193327, -1043673420))
assertEquals(lg(634920094, -2001579101), lg(683993930, 248552821) ^ lg(220002260, -2040344874))
assertEquals(lg(-831642917, -817908795), lg(640417317, 298956382) ^ lg(-398074626, -554826341))
assertEquals(lg(857398449, 1711937081), lg(-1493347776, 1187436882) ^ lg(-1779986703, 550293355))
}
@Test def shift_left_<<(): Unit = {
assertEquals(lg(1065353216, -691528727), lg(-1875389825, 1268606893) << -73329513)
assertEquals(lg(671088640, -1046568266), lg(869553861, -291578632) << -339545061)
assertEquals(lg(0, 0), lg(543726956, -1753066291) << -809014658)
assertEquals(lg(-754974720, -1479892363), lg(-895322669, 847749031) << 1030973528)
assertEquals(lg(0, 1696595968), lg(1598039634, 819660072) << 82069876)
assertEquals(lg(0, -763223040), lg(-151740279, -595601314) << 503039850)
assertEquals(lg(0, -1360527360), lg(-1702267427, 1115684531) << 1171866675)
assertEquals(lg(508125184, -784066052), lg(-807341493, 286689824) << -1938771891)
assertEquals(lg(-551288832, 439734876), lg(-382832750, -2134078182) << 1537970769)
assertEquals(lg(-1409069728, 1129787), lg(-580904341, 939559401) << 1856717061)
assertEquals(lg(1711276032, 1295846454), lg(-198125160, 663832884) << 1561097110)
assertEquals(lg(-1004724328, -940313723), lg(-1199332365, -1728151952) << 858801923)
assertEquals(lg(-1029298112, -1523092059), lg(773140802, -181814355) << 1110910853)
assertEquals(lg(536870912, 200145086), lg(1601160689, 869229832) << -338843811)
assertEquals(lg(0, -1735502848), lg(-1919381932, -201750119) << -813015128)
assertEquals(lg(-1727917056, 2104066035), lg(-52019067, -102802849) << -2122946486)
assertEquals(lg(0, 771751936), lg(-456947922, 1170727731) << 2126487160)
assertEquals(lg(0, -710836224), lg(1756719200, -1702547414) << -32425558)
assertEquals(lg(0, -1073741824), lg(97072750, 409070577) << 1222452733)
assertEquals(lg(0, -1182793728), lg(1177105779, 212324545) << -834196361)
assertEquals(lg(0, 1543503872), lg(1395605166, -1743726419) << -1762017159)
assertEquals(lg(0, -67108864), lg(703808254, 1939941481) << 1042647417)
assertEquals(lg(0, 1207959552), lg(-702184622, -618243162) << -753853766)
assertEquals(lg(-58458112, -1619174179), lg(-1368457662, 1747275710) << 1382741393)
assertEquals(lg(0, -299542812), lg(-74885703, 1342895995) << 1929734882)
assertEquals(lg(0, -1585446912), lg(-61401466, -496528012) << -129147274)
assertEquals(lg(1888485376, 630678170), lg(-660169692, 1479330149) << 289081298)
assertEquals(lg(0, -536870912), lg(-421237721, 1011668330) << 370873533)
assertEquals(lg(0, 102137856), lg(-821818323, -2029348763) << -916638609)
assertEquals(lg(0, -1073741824), lg(-1246065172, -1572087360) << 1493241980)
assertEquals(lg(1156516188, -1812425640), lg(578258094, -906212820) << 2074806145)
assertEquals(lg(0, 1370357760), lg(61151968, -1770168701) << -2062208020)
assertEquals(lg(-402653184, 1642287002), lg(1013576541, 460756940) << -902835237)
assertEquals(lg(-1744830464, 1690731362), lg(-1731171245, 771836652) << 868975579)
assertEquals(lg(-417260032, 563566725), lg(1123258511, 1049676716) << 575477257)
assertEquals(lg(411626816, -1915897795), lg(-779579692, 1222433667) << 1238257604)
assertEquals(lg(0, -2147483648), lg(-1102469156, -543766743) << 553354173)
assertEquals(lg(0, -1909156352), lg(843520587, -517185932) << 1899246569)
assertEquals(lg(0, -487976960), lg(-510775647, -896837143) << 1487779500)
assertEquals(lg(-1148788736, -847308273), lg(-1594115986, -186853391) << -119255604)
assertEquals(lg(0, 1940424228), lg(-588635767, 1047291343) << 2089738146)
assertEquals(lg(1726279680, 2137615428), lg(-1002017201, -986188138) << 800913356)
assertEquals(lg(0, 1650633728), lg(1813551275, -400674286) << -1609938966)
assertEquals(lg(-1207959552, 897838789), lg(-1333929801, 254558182) << -1518372133)
assertEquals(lg(0, -1104224256), lg(834127324, 878312672) << -923142549)
assertEquals(lg(-504160320, 305586753), lg(126340223, -2008491127) << -252023418)
assertEquals(lg(0, 0), lg(510931784, -1313923431) << 1174528765)
assertEquals(lg(-1449390900, -1602240664), lg(711394099, -400560166) << -967606846)
assertEquals(lg(0, 1162928128), lg(1319282800, -1994311032) << 1237159401)
assertEquals(lg(-1749421258, 1809275319), lg(-874710629, -1242845989) << 484063041)
}
@Test def shift_logical_right_>>>(): Unit = {
assertEquals(lg(1982185809, 4856), lg(88517143, 1273092247) >>> 2099569298)
assertEquals(lg(40, 0), lg(-1987462914, 1361836721) >>> -2053535175)
assertEquals(lg(258, 0), lg(1513792977, 1085974656) >>> -303705162)
assertEquals(lg(-1589724844, 2), lg(-2071249600, 1411897130) >>> 1015183069)
assertEquals(lg(827423626, 419765), lg(-1560865755, 214919778) >>> 1191603401)
assertEquals(lg(376475826, 25773988), lg(944265510, -995896821) >>> 485744647)
assertEquals(lg(291969293, 528), lg(1131824263, -2080089658) >>> -386336938)
assertEquals(lg(185, 0), lg(-827478170, -1185129975) >>> 2048537528)
assertEquals(lg(45022, 0), lg(-916869993, -1344352401) >>> -791372688)
assertEquals(lg(587, 0), lg(588931659, -1830830904) >>> -1259543946)
assertEquals(lg(-684574597, 28915), lg(473794659, 947514265) >>> -1409717873)
assertEquals(lg(3, 0), lg(471518489, -940479957) >>> -847604034)
assertEquals(lg(11, 0), lg(-818287716, 1547586919) >>> -216455813)
assertEquals(lg(266, 0), lg(-2088976011, -2057680935) >>> 787633143)
assertEquals(lg(-800511856, 59336150), lg(306848777, -497453644) >>> 1584315654)
assertEquals(lg(25694, 0), lg(-1689341833, -927188015) >>> 1300572337)
assertEquals(lg(237982231, 3229829), lg(396954515, 413418119) >>> 1180537031)
assertEquals(lg(1319611409, 10188), lg(1478732342, 1335401807) >>> -1668840943)
assertEquals(lg(-530293557, 9), lg(-1326271298, -1643756084) >>> -2118687716)
assertEquals(lg(26, 0), lg(1205635051, 875594107) >>> 350453433)
assertEquals(lg(1698203097, 57089), lg(-2049358216, -553556680) >>> -1203541232)
assertEquals(lg(-308392901, 40188), lg(1278981121, -1661145698) >>> 254766480)
assertEquals(lg(-1667461656, 7259908), lg(1313272948, 929268302) >>> 1175504903)
assertEquals(lg(99018, 0), lg(1982277801, -1050318135) >>> 629735727)
assertEquals(lg(16237, 0), lg(-610510955, 1064153335) >>> 577897264)
assertEquals(lg(689994, 0), lg(1859860682, 1413109554) >>> 243415787)
assertEquals(lg(4088, 0), lg(1757351444, -7991214) >>> -1844808396)
assertEquals(lg(48441534, 0), lg(-1277568919, -1194709070) >>> -2102413146)
assertEquals(lg(42961906, 0), lg(-1768551066, 1342559) >>> 365466523)
assertEquals(lg(1946, 0), lg(1051996382, -213518283) >>> -717261067)
assertEquals(lg(-605712863, 10), lg(451444747, -1380034334) >>> -675522340)
assertEquals(lg(8, 0), lg(605006440, -1956088854) >>> 192236860)
assertEquals(lg(-152492078, 258), lg(-384174131, -2122615661) >>> -1278414057)
assertEquals(lg(-1650335224, 9146646), lg(-1579022332, -1953425763) >>> 2134440904)
assertEquals(lg(175996054, 0), lg(-433112808, -1479030417) >>> -1873327132)
assertEquals(lg(771890457, 0), lg(-1786180708, 385945228) >>> 1526047775)
assertEquals(lg(868056695, -1200391723), lg(868056695, -1200391723) >>> 93595840)
assertEquals(lg(88233, 0), lg(1335240662, -1403745666) >>> 1625850351)
assertEquals(lg(21, 0), lg(-681452715, -1446696044) >>> -742234373)
assertEquals(lg(200097858, 0), lg(301750839, 1600782865) >>> 1678034787)
assertEquals(lg(1, 0), lg(-2077889650, 445749598) >>> 363036476)
assertEquals(lg(-1160719403, 3135), lg(-1633078438, 1644025478) >>> -1297864237)
assertEquals(lg(27660, 0), lg(1159483779, 906375175) >>> -1204888593)
assertEquals(lg(1096217739, 131290637), lg(179807326, 1050325098) >>> -1598422013)
assertEquals(lg(61, 0), lg(952383136, -193355640) >>> 415626042)
assertEquals(lg(12362394, 0), lg(972435428, -1130194211) >>> -1259042456)
assertEquals(lg(-924965860, 8483), lg(605823642, 555993310) >>> 1780437072)
assertEquals(lg(88, 0), lg(665774635, 184915839) >>> 1729784373)
assertEquals(lg(27109, 0), lg(-263808048, -741669613) >>> -204793551)
assertEquals(lg(-5828381, 10), lg(-954198224, 369053217) >>> 768150041)
}
@Test def shift_arithmetic_right_>>(): Unit = {
assertEquals(lg(144041519, 2813487), lg(-1780076655, 720252680) >> -1316031160)
assertEquals(lg(1519, 0), lg(234061537, 796729805) >> 1452874739)
assertEquals(lg(-935479627, 124), lg(1523206972, 1046748891) >> 1356453463)
assertEquals(lg(-15335, -1), lg(1866043067, -2009962307) >> 393061105)
assertEquals(lg(5, 0), lg(89507691, 183545611) >> -1980770119)
assertEquals(lg(-1283367734, 14309038), lg(-1062312593, 1831556953) >> 1545082311)
assertEquals(lg(523169438, 0), lg(-1568293714, 523169438) >> -2119005984)
assertEquals(lg(-1704853904, -731301), lg(-2013675422, -748851607) >> 511130378)
assertEquals(lg(345569760, -46), lg(-521585277, -770402055) >> -1176556648)
assertEquals(lg(1777038301, 61), lg(-145701849, 257587932) >> -1512809002)
assertEquals(lg(-51, -1), lg(-973180026, -1694110170) >> 2083093369)
assertEquals(lg(-5, -1), lg(1761120319, -539393529) >> -207994821)
assertEquals(lg(-587262921, -3246345), lg(-30904807, -1662128199) >> -638486135)
assertEquals(lg(-10706, -1), lg(1812122560, -701571284) >> 611632432)
assertEquals(lg(7484398, 100362842), lg(119750375, 1605805472) >> 244039684)
assertEquals(lg(1, 0), lg(269986751, 1459449758) >> -439796226)
assertEquals(lg(7, 0), lg(-1969890020, 2011804532) >> -652735044)
assertEquals(lg(-2130588861, 98), lg(-1582649974, 826310885) >> 613066583)
assertEquals(lg(-669931160, -697), lg(756433442, -1459944907) >> -775565931)
assertEquals(lg(933146972, -1), lg(1678061064, -1680910162) >> -531660641)
assertEquals(lg(1601141595, 1298147), lg(1870355258, 332325727) >> -434372344)
assertEquals(lg(-1047936567, -129548), lg(1886551280, -2122502046) >> -763866098)
assertEquals(lg(-72307, -1), lg(-1169141408, -592336405) >> -1841005139)
assertEquals(lg(72262, 0), lg(686282122, 295988927) >> 69079212)
assertEquals(lg(-1582088844, -23862710), lg(1825529126, -1527213400) >> 1371712838)
assertEquals(lg(70395261, 0), lg(633149491, 1126324183) >> 1948323684)
assertEquals(lg(-329, -1), lg(-363762029, -1377253181) >> -1243200330)
assertEquals(lg(1924403917, -21), lg(-1694234908, -689608667) >> 728732313)
assertEquals(lg(-62655, -1), lg(1319661865, -2053067582) >> -777879057)
assertEquals(lg(-1472236443, 19900875), lg(-1472236443, 19900875) >> 373478400)
assertEquals(lg(-1, -1), lg(-1719111010, -1766452468) >> 942391743)
assertEquals(lg(5131, 0), lg(-624682758, 1345231635) >> -813574478)
assertEquals(lg(9, 0), lg(1316519660, 314590421) >> -641829383)
assertEquals(lg(-14492, -1), lg(-1380652891, -474856510) >> -920501329)
assertEquals(lg(40, 0), lg(-2084688189, 1352268039) >> -177471111)
assertEquals(lg(-868447412, 13901269), lg(507881044, 1779362534) >> -508943033)
assertEquals(lg(-37529, -1), lg(1742323077, -1229747072) >> 401183471)
assertEquals(lg(376386, 0), lg(346182810, 770838817) >> 797274667)
assertEquals(lg(-1822, -1), lg(828281422, -477411393) >> 1298272370)
assertEquals(lg(1021967080, -2560), lg(-341778503, -671026265) >> 532386578)
assertEquals(lg(-1683940185, 34921), lg(-1907127360, 1144311248) >> -2131012273)
assertEquals(lg(-121723, -1), lg(756366897, -1994294687) >> -1642432978)
assertEquals(lg(-644688038, 9473), lg(-1363894143, 1241756453) >> 1681307793)
assertEquals(lg(-278047, -1), lg(1708006412, -1138876437) >> 2010442220)
assertEquals(lg(872834, 0), lg(-664430929, 446891142) >> -1707024855)
assertEquals(lg(-1, -1), lg(-1904131429, -938887) >> -829231944)
assertEquals(lg(-2101780246, 11998), lg(-1043053889, 1572668786) >> 309495249)
assertEquals(lg(-11427, -1), lg(563683687, -1497656119) >> -176819791)
assertEquals(lg(201, 0), lg(-627312011, 421917318) >> 2056663541)
assertEquals(lg(-104838948, -3), lg(-904956287, -543423347) >> -617227620)
}
@Test def negate_-(): Unit = {
assertEquals(lg(0), -lg(0))
assertEquals(lg(1), -lg(-1))
assertEquals(lg(-1), -lg(1))
assertEquals(lg(1, -2147483648), -MaxVal)
assertEquals(MinVal, -MinVal)
assertEquals(lg(0, -1), -lg(0, 1))
assertEquals(lg(792771844, -1518464955), -lg(-792771844, 1518464954))
assertEquals(lg(1313283210, -1172119606), -lg(-1313283210, 1172119605))
assertEquals(lg(-1034897743, -341494686), -lg(1034897743, 341494685))
assertEquals(lg(-924881290, 1614058538), -lg(924881290, -1614058539))
assertEquals(lg(-1636891236, -1405401040), -lg(1636891236, 1405401039))
assertEquals(lg(2044349674, -477271433), -lg(-2044349674, 477271432))
assertEquals(lg(1426086684, -1493816436), -lg(-1426086684, 1493816435))
assertEquals(lg(-2125201680, 1667846199), -lg(2125201680, -1667846200))
assertEquals(lg(161054645, -1272528725), -lg(-161054645, 1272528724))
assertEquals(lg(-1013390126, -1323844683), -lg(1013390126, 1323844682))
assertEquals(lg(-1028806094, -691441881), -lg(1028806094, 691441880))
assertEquals(lg(1060422114, -11477649), -lg(-1060422114, 11477648))
assertEquals(lg(1366334123, -2046238761), -lg(-1366334123, 2046238760))
assertEquals(lg(1307711795, 940346049), -lg(-1307711795, -940346050))
assertEquals(lg(421687960, -250174762), -lg(-421687960, 250174761))
assertEquals(lg(379452754, -843386803), -lg(-379452754, 843386802))
assertEquals(lg(-1251296999, 1144268297), -lg(1251296999, -1144268298))
assertEquals(lg(-690359429, -1676679602), -lg(690359429, 1676679601))
assertEquals(lg(1952563749, -882544420), -lg(-1952563749, 882544419))
assertEquals(lg(-1420900897, -1865273591), -lg(1420900897, 1865273590))
assertEquals(lg(115947827, -832851217), -lg(-115947827, 832851216))
assertEquals(lg(-1834973959, -1423776005), -lg(1834973959, 1423776004))
assertEquals(lg(1376766876, 1519617584), -lg(-1376766876, -1519617585))
assertEquals(lg(-1845217535, 724725865), -lg(1845217535, -724725866))
assertEquals(lg(-1133294381, 699400553), -lg(1133294381, -699400554))
assertEquals(lg(113507585, 615978889), -lg(-113507585, -615978890))
assertEquals(lg(-1839784424, 1163726652), -lg(1839784424, -1163726653))
assertEquals(lg(1065777168, 1301742163), -lg(-1065777168, -1301742164))
assertEquals(lg(334075220, -1058529734), -lg(-334075220, 1058529733))
assertEquals(lg(1443112398, 1148167880), -lg(-1443112398, -1148167881))
assertEquals(lg(1647739462, 12310882), -lg(-1647739462, -12310883))
assertEquals(lg(1461318149, 518941731), -lg(-1461318149, -518941732))
assertEquals(lg(56833825, -162898592), -lg(-56833825, 162898591))
assertEquals(lg(-680096727, -1760413869), -lg(680096727, 1760413868))
assertEquals(lg(461541717, -1103626950), -lg(-461541717, 1103626949))
assertEquals(lg(1287248387, 1483137214), -lg(-1287248387, -1483137215))
assertEquals(lg(-1681467124, -1197977023), -lg(1681467124, 1197977022))
assertEquals(lg(-310946355, 885055747), -lg(310946355, -885055748))
assertEquals(lg(-717629012, -1299204708), -lg(717629012, 1299204707))
assertEquals(lg(800584851, 350245993), -lg(-800584851, -350245994))
assertEquals(lg(1911014238, -441020786), -lg(-1911014238, 441020785))
assertEquals(lg(-1647080824, -1197295589), -lg(1647080824, 1197295588))
assertEquals(lg(-925751968, -479541400), -lg(925751968, 479541399))
assertEquals(lg(-656919119, 1574890072), -lg(656919119, -1574890073))
assertEquals(lg(-1833364814, 432106462), -lg(1833364814, -432106463))
assertEquals(lg(-315730911, -1990201785), -lg(315730911, 1990201784))
assertEquals(lg(1218524771, -572482048), -lg(-1218524771, 572482047))
assertEquals(lg(276668811, 2002398729), -lg(-276668811, -2002398730))
assertEquals(lg(1489416833, 834462753), -lg(-1489416833, -834462754))
assertEquals(lg(2066446588, 688546120), -lg(-2066446588, -688546121))
}
@Test def plus_+(): Unit = {
assertEquals(lg(802149732, -566689627), lg(-202981355, -566689628) + lg(1005131087, 0))
assertEquals(lg(902769101, 1674149440), lg(1153016325, 1674149440) + lg(-250247224, -1))
assertEquals(lg(1128646485, -1965159800), lg(1701699755, -1965159800) + lg(-573053270, -1))
assertEquals(lg(66936416, -973893589), lg(-1183294843, -973893590) + lg(1250231259, 0))
assertEquals(lg(-155818001, 449544496), lg(-2145882999, 449544496) + lg(1990064998, 0))
assertEquals(lg(-1244599644, -917980205), lg(-528276750, -917980205) + lg(-716322894, -1))
assertEquals(lg(580594010, 1794016499), lg(-1061043923, 1794016498) + lg(1641637933, 0))
assertEquals(lg(-1874551871, 1883156001), lg(-315483661, 1883156001) + lg(-1559068210, -1))
assertEquals(lg(-611587809, 95409025), lg(-1899047326, 95409025) + lg(1287459517, 0))
assertEquals(lg(-1393747885, 1167571449), lg(-705065818, 1167571449) + lg(-688682067, -1))
assertEquals(lg(1135734754, -607437553), lg(-192210545, -607437554) + lg(1327945299, 0))
assertEquals(lg(545472170, -2007097641), lg(11453726, -2007097641) + lg(534018444, 0))
assertEquals(lg(-1984029353, -1191350400), lg(1809973610, -1191350400) + lg(500964333, 0))
assertEquals(lg(1031291620, 108684756), lg(972641234, 108684756) + lg(58650386, 0))
assertEquals(lg(-1375760766, 127758048), lg(-1511325903, 127758048) + lg(135565137, 0))
assertEquals(lg(640679472, 429508922), lg(-942832491, 429508921) + lg(1583511963, 0))
assertEquals(lg(-820503583, -594798242), lg(1500842230, -594798242) + lg(1973621483, 0))
assertEquals(lg(1875301895, 910473912), lg(-1088230684, 910473912) + lg(-1331434717, -1))
assertEquals(lg(-1755864971, 378724963), lg(798219431, 378724963) + lg(1740882894, 0))
assertEquals(lg(468052904, -683558197), lg(-1763683665, -683558197) + lg(-2063230727, -1))
assertEquals(lg(-1488850347, -1636478025), lg(627629519, -1636478024) + lg(-2116479866, -1))
assertEquals(lg(915882407, -338305025), lg(-526665240, -338305026) + lg(1442547647, 0))
assertEquals(lg(-950882103, -466473801), lg(-1265295286, -466473801) + lg(314413183, 0))
assertEquals(lg(-673278223, -1417005301), lg(-1412852606, -1417005301) + lg(739574383, 0))
assertEquals(lg(-1565299836, -2035157269), lg(708993121, -2035157269) + lg(2020674339, 0))
assertEquals(lg(638729196, 1182702858), lg(847269791, 1182702858) + lg(-208540595, -1))
assertEquals(lg(-1453651445, -1902383955), lg(97084677, -1902383954) + lg(-1550736122, -1))
assertEquals(lg(1116569659, -606967004), lg(-267181534, -606967005) + lg(1383751193, 0))
assertEquals(lg(529048030, 1063184820), lg(-904322265, 1063184819) + lg(1433370295, 0))
assertEquals(lg(-499260224, 101142421), lg(1841727454, 101142421) + lg(1953979618, 0))
assertEquals(lg(1452864874, 1045175929), lg(-1716387490, 1045175929) + lg(-1125714932, -1))
assertEquals(lg(982736721, 1506316757), lg(-1020814821, 1506316756) + lg(2003551542, 0))
assertEquals(lg(-1478064805, 1107506955), lg(467820886, 1107506956) + lg(-1945885691, -1))
assertEquals(lg(1436947166, -57552832), lg(-103701719, -57552833) + lg(1540648885, 0))
assertEquals(lg(3887456, -414981457), lg(1280780483, -414981457) + lg(-1276893027, -1))
assertEquals(lg(939083871, 606376864), lg(-1505747919, 606376864) + lg(-1850135506, -1))
assertEquals(lg(-1161495325, -606274238), lg(-1797917239, -606274238) + lg(636421914, 0))
assertEquals(lg(2146013782, 52949338), lg(-551974000, 52949338) + lg(-1596979514, -1))
assertEquals(lg(-159062053, -623553409), lg(484182807, -623553408) + lg(-643244860, -1))
assertEquals(lg(1680160313, 371486519), lg(1170065239, 371486519) + lg(510095074, 0))
assertEquals(lg(-2071737549, -251530660), lg(553737773, -251530660) + lg(1669491974, 0))
assertEquals(lg(793877651, -324566030), lg(1363264202, -324566030) + lg(-569386551, -1))
assertEquals(lg(1897556965, 1255689015), lg(1461362302, 1255689015) + lg(436194663, 0))
assertEquals(lg(-540868058, 718534179), lg(-1463314706, 718534179) + lg(922446648, 0))
assertEquals(lg(2547531, -716998232), lg(-1684072850, -716998233) + lg(1686620381, 0))
assertEquals(lg(-1709813271, -2086072551), lg(-183257712, -2086072551) + lg(-1526555559, -1))
assertEquals(lg(-2134341942, -1223154956), lg(-485818523, -1223154956) + lg(-1648523419, -1))
assertEquals(lg(1634619686, -1934382665), lg(392330048, -1934382665) + lg(1242289638, 0))
assertEquals(lg(-1409927090, -75135322), lg(1907808353, -75135322) + lg(977231853, 0))
assertEquals(lg(-1393001322, 1362535802), lg(88305723, 1362535803) + lg(-1481307045, -1))
}
@Test def minus_-(): Unit = {
// Whitebox corner case
assertEquals(lg(-1), lg(0) - lg(1))
assertEquals(lg(1318078695, 462416044), lg(406229717, 462416044) - lg(-911848978, -1))
assertEquals(lg(459412414, 466142261), lg(873646396, 466142261) - lg(414233982, 0))
assertEquals(lg(1749422706, -573388520), lg(-2077914189, -573388520) - lg(467630401, 0))
assertEquals(lg(855866353, -1980988131), lg(-789253983, -1980988132) - lg(-1645120336, -1))
assertEquals(lg(1858485462, 1825277273), lg(-482388232, 1825277273) - lg(1954093602, 0))
assertEquals(lg(1211608504, -1077757379), lg(-1616159373, -1077757379) - lg(1467199419, 0))
assertEquals(lg(-1391411781, -1825579414), lg(-105778670, -1825579414) - lg(1285633111, 0))
assertEquals(lg(1573921037, -2018677385), lg(1306759468, -2018677385) - lg(-267161569, -1))
assertEquals(lg(2075838974, -289291128), lg(618139116, -289291128) - lg(-1457699858, -1))
assertEquals(lg(600013127, -1980710784), lg(1736445522, -1980710784) - lg(1136432395, 0))
assertEquals(lg(-558434179, 21136449), lg(-1970971750, 21136449) - lg(-1412537571, -1))
assertEquals(lg(-343650116, 229693364), lg(-1491842755, 229693364) - lg(-1148192639, -1))
assertEquals(lg(1686071974, -2064363005), lg(2125082313, -2064363005) - lg(439010339, 0))
assertEquals(lg(-1587252411, -1887690341), lg(922634658, -1887690341) - lg(-1785080227, -1))
assertEquals(lg(-992416688, 1754335328), lg(478015362, 1754335329) - lg(1470432050, 0))
assertEquals(lg(1718268050, -845578935), lg(-1788952896, -845578935) - lg(787746350, 0))
assertEquals(lg(1316319511, -1479013672), lg(-1177368338, -1479013672) - lg(1801279447, 0))
assertEquals(lg(1568876561, -2147323821), lg(1761081661, -2147323821) - lg(192205100, 0))
assertEquals(lg(-1122491731, 1604940224), lg(261772552, 1604940225) - lg(1384264283, 0))
assertEquals(lg(1556996455, 1018615990), lg(-1441241840, 1018615990) - lg(1296729001, 0))
assertEquals(lg(-52258673, -155632234), lg(907527568, -155632233) - lg(959786241, 0))
assertEquals(lg(1911811399, 1534910973), lg(1509034771, 1534910973) - lg(-402776628, -1))
assertEquals(lg(1234505303, -718856464), lg(-344668006, -718856465) - lg(-1579173309, -1))
assertEquals(lg(1263823751, 1792314521), lg(-2096618226, 1792314521) - lg(934525319, 0))
assertEquals(lg(-1901870284, -977488448), lg(1861956484, -977488448) - lg(-531140528, -1))
assertEquals(lg(170060904, -1532994269), lg(-691455907, -1532994270) - lg(-861516811, -1))
assertEquals(lg(-417244722, -946809431), lg(-693769914, -946809431) - lg(-276525192, -1))
assertEquals(lg(1392505816, -834216711), lg(-1698674051, -834216711) - lg(1203787429, 0))
assertEquals(lg(339105023, -930632047), lg(1453492556, -930632047) - lg(1114387533, 0))
assertEquals(lg(1588670098, -422836102), lg(-516102112, -422836103) - lg(-2104772210, -1))
assertEquals(lg(-1793332542, 1839759286), lg(1194707556, 1839759286) - lg(-1306927198, -1))
assertEquals(lg(-1933743595, -1652840750), lg(1188016800, -1652840750) - lg(-1173206901, -1))
assertEquals(lg(1172675504, 1790839027), lg(-1268512415, 1790839027) - lg(1853779377, 0))
assertEquals(lg(-2038245078, 275932678), lg(-777434907, 275932678) - lg(1260810171, 0))
assertEquals(lg(-640120196, 658575618), lg(607917442, 658575619) - lg(1248037638, 0))
assertEquals(lg(-939204613, -2089057829), lg(-1490388970, -2089057829) - lg(-551184357, -1))
assertEquals(lg(-2089897031, 992436418), lg(-1342917439, 992436418) - lg(746979592, 0))
assertEquals(lg(-767046771, -1192540532), lg(-1045496394, -1192540532) - lg(-278449623, -1))
assertEquals(lg(735191894, -683257085), lg(1555450000, -683257085) - lg(820258106, 0))
assertEquals(lg(2026420598, 481753248), lg(1022728181, 481753248) - lg(-1003692417, -1))
assertEquals(lg(-2132649422, 1411964223), lg(2028304312, 1411964223) - lg(-134013562, -1))
assertEquals(lg(1346424260, -217374406), lg(704117341, -217374406) - lg(-642306919, -1))
assertEquals(lg(-692878557, 278237510), lg(313351245, 278237511) - lg(1006229802, 0))
assertEquals(lg(-1545280043, 2054685372), lg(2076724262, 2054685372) - lg(-672962991, -1))
assertEquals(lg(1156651977, 261806288), lg(1990098163, 261806288) - lg(833446186, 0))
assertEquals(lg(-244547539, 1626774417), lg(1425435353, 1626774418) - lg(1669982892, 0))
assertEquals(lg(-125857115, -1714068645), lg(2084724465, -1714068645) - lg(-2084385716, -1))
assertEquals(lg(-2124426763, -543675020), lg(-1799809279, -543675020) - lg(324617484, 0))
assertEquals(lg(-2145169231, -602489858), lg(1972622018, -602489858) - lg(-177176047, -1))
assertEquals(lg(408960051, 967789979), lg(883147297, 967789979) - lg(474187246, 0))
}
@Test def times_*(): Unit = {
assertEquals(lg(-1056314208, 1039912134), lg(-1436299491, 1172705251) * lg(1721031968, 0))
assertEquals(lg(15417694, -1235494072), lg(-1754547158, 1592794750) * lg(-850659149, -1))
assertEquals(lg(-1312839754, -486483117), lg(-582562130, 1508550574) * lg(-2054981347, -1))
assertEquals(lg(-377676239, 1969822597), lg(-517256163, 1107889737) * lg(324089381, 0))
assertEquals(lg(-1426078720, -1379092277), lg(1862517504, -2146745095) * lg(2043533548, 0))
assertEquals(lg(-1611894400, 514550890), lg(-1341087062, 93674761) * lg(1272468928, 0))
assertEquals(lg(88803236, -172420721), lg(-1911825604, 1026411170) * lg(244738503, 0))
assertEquals(lg(1486387579, 668666773), lg(2102189793, 425022510) * lg(750432219, 0))
assertEquals(lg(913918418, 2124658288), lg(-1628887094, 2043879870) * lg(-1367964491, -1))
assertEquals(lg(-1067082241, 864193319), lg(454909009, -1096315634) * lg(-461844145, -1))
assertEquals(lg(949541055, 403324299), lg(-1346593793, -331776468) * lg(1495188289, 0))
assertEquals(lg(-232871624, -1943313306), lg(39946028, -363039140) * lg(-1134101206, -1))
assertEquals(lg(-528828160, -1884969955), lg(769959254, -432157368) * lg(-488368768, -1))
assertEquals(lg(913322937, -2105457977), lg(1975078475, 1181124823) * lg(-1852476533, -1))
assertEquals(lg(1594278208, 943829214), lg(-2118478876, -1521449422) * lg(-235907376, -1))
assertEquals(lg(-50678328, 2146883835), lg(-192590815, -1552754278) * lg(990887112, 0))
assertEquals(lg(1779498513, -1732099612), lg(-74714605, 386143916) * lg(1634792395, 0))
assertEquals(lg(982209626, 857499597), lg(1839773441, -590412588) * lg(799604314, 0))
assertEquals(lg(1806268816, -990479821), lg(1395571130, -1228992407) * lg(1440046952, 0))
assertEquals(lg(1683728223, -957382628), lg(-1094818235, 1759139279) * lg(-156634285, -1))
assertEquals(lg(-1590791694, 595489480), lg(853844787, 525523561) * lg(600761926, 0))
assertEquals(lg(1353714367, 146465211), lg(-903115469, 793487771) * lg(1986597957, 0))
assertEquals(lg(1421874569, -1462441210), lg(-830036223, 830164681) * lg(-1711884663, -1))
assertEquals(lg(-962035602, -2086325336), lg(1514898873, 1802395563) * lg(1763957470, 0))
assertEquals(lg(213232144, -1084932179), lg(-1931885288, 136587512) * lg(-241565738, -1))
assertEquals(lg(-915935202, 1495104097), lg(571274323, 1264898114) * lg(1823828906, 0))
assertEquals(lg(1116543789, -1473151538), lg(-15708939, -2105030313) * lg(48280153, 0))
assertEquals(lg(-1230228445, -570579388), lg(1792017337, -1626094957) * lg(301685947, 0))
assertEquals(lg(1335719116, 1447187791), lg(-1942632452, -691115342) * lg(-889918259, -1))
assertEquals(lg(1398640985, -1330552693), lg(-683458011, -1409200935) * lg(-996910555, -1))
assertEquals(lg(-402621042, 1775759707), lg(562125786, -1303526635) * lg(-1761056509, -1))
assertEquals(lg(129149596, -78429064), lg(2115902292, -1194658096) * lg(-1549721205, -1))
assertEquals(lg(1706925885, 1413499189), lg(1852083423, 330104035) * lg(1414822755, 0))
assertEquals(lg(-722178384, 1850552711), lg(-1623207532, 1442771787) * lg(-948878276, -1))
assertEquals(lg(545021767, -1389368834), lg(-898643831, 773279296) * lg(1294488911, 0))
assertEquals(lg(1541594150, 820379725), lg(421823854, 802578424) * lg(1394107269, 0))
assertEquals(lg(-279324848, 1175391379), lg(1589092022, 237831212) * lg(-763790472, -1))
assertEquals(lg(2089067814, 975727054), lg(-1247207721, -370556328) * lg(1449901386, 0))
assertEquals(lg(-1977714127, -377823390), lg(109386811, 368962517) * lg(1406834819, 0))
assertEquals(lg(1759713497, -312922364), lg(2135299059, -798752868) * lg(-1861488893, -1))
assertEquals(lg(1030024362, -795941843), lg(-695671854, 1917612060) * lg(2083344781, 0))
assertEquals(lg(-704748314, 388197332), lg(250669253, -442179349) * lg(-552836178, -1))
assertEquals(lg(758103782, -158300478), lg(1237744278, 206295616) * lg(-1547545223, -1))
assertEquals(lg(-629736326, 810097466), lg(492775518, 1691641907) * lg(1172634963, 0))
assertEquals(lg(610754048, 1997636055), lg(-1549380722, 49835026) * lg(-1645815552, -1))
assertEquals(lg(1696857284, 1549588995), lg(1850430325, -1942955614) * lg(-295254732, -1))
assertEquals(lg(-66011146, -376837532), lg(-1276671498, -1984743584) * lg(-1583554303, -1))
assertEquals(lg(2033040344, -167450557), lg(-2127158934, -2058421178) * lg(1620104636, 0))
assertEquals(lg(-1886196376, -31345953), lg(69958717, -772556465) * lg(21655944, 0))
assertEquals(lg(-38147573, -1269583268), lg(406538265, -107036516) * lg(2077087683, 0))
}
@Test def divide_/(): Unit = {
expectThrows(classOf[ArithmeticException], lg(0) / lg(0))
expectThrows(classOf[ArithmeticException], lg(5, 0) / lg(0))
expectThrows(classOf[ArithmeticException], lg(0, 5) / lg(0))
expectThrows(classOf[ArithmeticException], lg(-1) / lg(0))
expectThrows(classOf[ArithmeticException], lg(-1, 0) / lg(0))
assertEquals(IntMaxValPlus1, IntMinVal / lg(-1))
assertEquals(lg(-1), IntMinVal / IntMaxValPlus1)
assertEquals(IntMinVal, IntMaxValPlus1 / lg(-1))
assertEquals(lg(-1), IntMaxValPlus1 / IntMinVal)
assertEquals(lg(1, -2147483648), MaxVal / lg(-1))
assertEquals(MinVal, MinVal / lg(1))
assertEquals(MinVal, MinVal / lg(-1))
// int32 / int32
assertEquals(lg(1, 0), lg(-10426835, -1) / lg(-6243356, -1))
assertEquals(lg(-291, -1), lg(49659080, 0) / lg(-170373, -1))
assertEquals(lg(3, 0), lg(97420, 0) / lg(27521, 0))
assertEquals(lg(26998, 0), lg(-9881291, -1) / lg(-366, -1))
assertEquals(lg(0, 0), lg(-40, -1) / lg(81, 0))
assertEquals(lg(0, 0), lg(-6007, -1) / lg(-326806, -1))
assertEquals(lg(-1, -1), lg(202, 0) / lg(-112, -1))
assertEquals(lg(0, 0), lg(0, 0) / lg(47, 0))
assertEquals(lg(323816, 0), lg(22667160, 0) / lg(70, 0))
assertEquals(lg(0, 0), lg(254, 0) / lg(-307349204, -1))
assertEquals(lg(0, 0), lg(-17, -1) / lg(-44648, -1))
assertEquals(lg(-40, -1), lg(39646, 0) / lg(-976, -1))
assertEquals(lg(0, 0), lg(9, 0) / lg(315779722, 0))
assertEquals(lg(0, 0), lg(-2674, -1) / lg(-3051991, -1))
assertEquals(lg(0, 0), lg(-37697, -1) / lg(2015928, 0))
assertEquals(lg(0, 0), lg(-13, -1) / lg(-31, -1))
assertEquals(lg(0, 0), lg(6, 0) / lg(-334, -1))
assertEquals(lg(8, 0), lg(-15989, -1) / lg(-1918, -1))
assertEquals(lg(8746, 0), lg(-113261535, -1) / lg(-12950, -1))
assertEquals(lg(55322, 0), lg(-6362112, -1) / lg(-115, -1))
assertEquals(lg(0, 0), lg(455, 0) / lg(13919, 0))
assertEquals(lg(36190, 0), lg(293468259, 0) / lg(8109, 0))
assertEquals(lg(1, 0), lg(-48287007, -1) / lg(-27531186, -1))
assertEquals(lg(349634, 0), lg(1048904, 0) / lg(3, 0))
assertEquals(lg(0, 0), lg(-34, -1) / lg(3949717, 0))
assertEquals(lg(-1, -1), lg(1449, 0) / lg(-983, -1))
assertEquals(lg(-18537151, -1), lg(18537151, 0) / lg(-1, -1))
assertEquals(lg(0, 0), lg(14037, 0) / lg(23645, 0))
assertEquals(lg(-4, -1), lg(1785, 0) / lg(-398, -1))
assertEquals(lg(0, 0), lg(346, 0) / lg(2198158, 0))
assertEquals(lg(-802, -1), lg(-3517419, -1) / lg(4381, 0))
assertEquals(lg(-6, -1), lg(6, 0) / lg(-1, -1))
assertEquals(lg(39, 0), lg(-822, -1) / lg(-21, -1))
assertEquals(lg(0, 0), lg(3629, 0) / lg(282734, 0))
assertEquals(lg(-92367, -1), lg(-278856469, -1) / lg(3019, 0))
assertEquals(lg(0, 0), lg(-13, -1) / lg(37, 0))
assertEquals(lg(0, 0), lg(-4, -1) / lg(47150459, 0))
assertEquals(lg(0, 0), lg(-26, -1) / lg(-210691, -1))
assertEquals(lg(0, 0), lg(-21294, -1) / lg(156839456, 0))
assertEquals(lg(0, 0), lg(-5, -1) / lg(-25644, -1))
assertEquals(lg(0, 0), lg(-1009, -1) / lg(28100, 0))
assertEquals(lg(-857, -1), lg(16282815, 0) / lg(-18989, -1))
assertEquals(lg(-7, -1), lg(-2201086, -1) / lg(276963, 0))
assertEquals(lg(-300, -1), lg(11412578, 0) / lg(-37989, -1))
assertEquals(lg(0, 0), lg(8406900, 0) / lg(239727371, 0))
assertEquals(lg(0, 0), lg(-1, -1) / lg(-479069, -1))
assertEquals(lg(0, 0), lg(4, 0) / lg(-21776, -1))
assertEquals(lg(-16812960, -1), lg(-16812960, -1) / lg(1, 0))
assertEquals(lg(0, 0), lg(10873, 0) / lg(57145, 0))
assertEquals(lg(0, 0), lg(-1, -1) / lg(-7, -1))
// int32 / int53
assertEquals(lg(0, 0), lg(-6975858, -1) / lg(42227636, 14))
assertEquals(lg(0, 0), lg(-1, -1) / lg(370644892, 82735))
assertEquals(lg(0, 0), lg(43, 0) / lg(-1602218381, 49))
assertEquals(lg(0, 0), lg(4063968, 0) / lg(973173538, 23810))
assertEquals(lg(0, 0), lg(-388987094, -1) / lg(-241988155, 1723))
assertEquals(lg(0, 0), lg(5939808, 0) / lg(-1882484681, 12))
assertEquals(lg(0, 0), lg(7, 0) / lg(-385609304, 1342))
assertEquals(lg(0, 0), lg(-1175803932, -1) / lg(297649103, 2408))
assertEquals(lg(0, 0), lg(464610492, 0) / lg(829919518, 2777))
assertEquals(lg(0, 0), lg(214483, 0) / lg(1502817270, 8078))
// int32 / big
assertEquals(lg(0, 0), lg(211494165, 0) / lg(1365318534, 14804989))
assertEquals(lg(0, 0), lg(5353, 0) / lg(-1032992082, -394605386))
assertEquals(lg(0, 0), lg(2926, 0) / lg(26982087, -226814570))
assertEquals(lg(0, 0), lg(-6, -1) / lg(-1339229562, -580578613))
assertEquals(lg(0, 0), lg(-8, -1) / lg(-108570365, 4920615))
assertEquals(lg(0, 0), lg(-585878041, -1) / lg(551925027, -1296114209))
assertEquals(lg(0, 0), lg(-4, -1) / lg(474545806, 64068407))
assertEquals(lg(0, 0), lg(34, 0) / lg(-137127086, -18652281))
assertEquals(lg(0, 0), lg(785315, 0) / lg(-881374655, 29722835))
assertEquals(lg(0, 0), lg(713146, 0) / lg(1442548271, 2727525))
// int53 / int32
assertEquals(lg(-578207, -1), lg(397755625, 53271) / lg(-395701427, -1))
assertEquals(lg(-560062154, 0), lg(-1680186460, 2) / lg(3, 0))
assertEquals(lg(-926675094, 18), lg(1514942014, 56) / lg(3, 0))
assertEquals(lg(-162400270, -1), lg(713597492, 1154) / lg(-30524, -1))
assertEquals(lg(-9, -1), lg(2028377478, 1) / lg(-691707459, -1))
assertEquals(lg(135006, 0), lg(1387175556, 73) / lg(2332622, 0))
assertEquals(lg(-200274428, -13), lg(1756997282, 1397) / lg(-116, -1))
assertEquals(lg(1125157, 0), lg(-1655346723, 0) / lg(2346, 0))
assertEquals(lg(997096, 0), lg(198249458, 5686) / lg(24492497, 0))
assertEquals(lg(1369365326, -302), lg(873090497, 11162) / lg(-37, -1))
assertEquals(lg(-2166511, -1), lg(360057887, 3519) / lg(-6976354, -1))
assertEquals(lg(1680790298, -2), lg(1115898639, 48) / lg(-30, -1))
assertEquals(lg(92036331, 1), lg(154624251, 955) / lg(935, 0))
assertEquals(lg(23215066, 0), lg(806830498, 1063) / lg(196698, 0))
assertEquals(lg(-13221428, -1), lg(-220365267, 21359) / lg(-6938757, -1))
assertEquals(lg(-973041595, -2009), lg(759822848, 648657) / lg(-323, -1))
assertEquals(lg(171873494, 1659), lg(-1180673754, 486098) / lg(293, 0))
assertEquals(lg(1583541189, 785), lg(1387172319, 769661) / lg(980, 0))
assertEquals(lg(-917576, -1), lg(-305851327, 2) / lg(-13709, -1))
assertEquals(lg(456092, 0), lg(577374631, 17) / lg(161353, 0))
assertEquals(lg(404991630, 376), lg(809983260, 752) / lg(2, 0))
assertEquals(lg(495082175, 39), lg(495082175, 39) / lg(1, 0))
assertEquals(lg(90893135, 0), lg(1455620681, 30929) / lg(1461502, 0))
assertEquals(lg(799104733, 0), lg(1388707384, 34362) / lg(184688, 0))
assertEquals(lg(1094556328, -70011), lg(2105854641, 140021) / lg(-2, -1))
assertEquals(lg(-1819673734, 1), lg(1310105355, 427420) / lg(271150, 0))
assertEquals(lg(-119338773, -6), lg(-236557650, 35455) / lg(-7052, -1))
assertEquals(lg(32825, 0), lg(-1127581476, 0) / lg(96492, 0))
assertEquals(lg(-57018115, -1), lg(2004387480, 7243) / lg(-545624, -1))
assertEquals(lg(-5950946, -1), lg(381447319, 2213) / lg(-1597249, -1))
assertEquals(lg(-811421531, -4249), lg(-1860702702, 12744) / lg(-3, -1))
assertEquals(lg(4741011, 0), lg(-548164065, 6487) / lg(5877480, 0))
assertEquals(lg(-1064193809, 45), lg(-476290317, 131491) / lg(2874, 0))
assertEquals(lg(228327608, 0), lg(499912484, 1) / lg(21, 0))
assertEquals(lg(99111506, 0), lg(-1509435894, 8467) / lg(366943, 0))
assertEquals(lg(-1209485521, -1), lg(-1580093356, 5) / lg(-20, -1))
assertEquals(lg(-319956618, -1), lg(1299112295, 55074) / lg(-739295, -1))
assertEquals(lg(-62197, -1), lg(-1405948570, 43) / lg(-3015755, -1))
assertEquals(lg(9087, 0), lg(1405130313, 57) / lg(27093454, 0))
assertEquals(lg(345582531, 0), lg(-1804200888, 1989226) / lg(24722497, 0))
assertEquals(lg(-1424974, -1), lg(-1642507127, 886) / lg(-2672324, -1))
assertEquals(lg(1991351, 0), lg(-1276796892, 35) / lg(77004, 0))
assertEquals(lg(1193137, 0), lg(-1200759296, 816) / lg(2939970, 0))
assertEquals(lg(573585390, 0), lg(399171813, 123795) / lg(926969, 0))
assertEquals(lg(1683063904, -942), lg(1649267984, 229752) / lg(-244, -1))
assertEquals(lg(-6019138, -1), lg(-387146187, 7364) / lg(-5255245, -1))
assertEquals(lg(-123416174, 28), lg(149703916, 19121) / lg(660, 0))
assertEquals(lg(-40732946, -1), lg(-1582312743, 7920) / lg(-835168, -1))
assertEquals(lg(715821610, 298), lg(1431643220, 596) / lg(2, 0))
assertEquals(lg(-570078780, -1), lg(-1717918737, 8458) / lg(-63727, -1))
// int53 / int53
assertEquals(lg(1, 0), lg(-1232398900, 28871) / lg(13989713, 22345))
assertEquals(lg(0, 0), lg(-916994839, 12266) / lg(1713571419, 15301))
assertEquals(lg(32, 0), lg(1133414946, 229) / lg(256531666, 7))
assertEquals(lg(368, 0), lg(134792921, 3907) / lg(-1656790262, 10))
assertEquals(lg(1, 0), lg(1532393452, 52260) / lg(-701373106, 31864))
assertEquals(lg(0, 0), lg(193990135, 1460) / lg(867607428, 6918))
assertEquals(lg(0, 0), lg(867672590, 1) / lg(-1315044816, 987593))
assertEquals(lg(0, 0), lg(-978844610, 2) / lg(720710523, 209))
assertEquals(lg(0, 0), lg(-297570329, 1) / lg(-2127979750, 195738))
assertEquals(lg(0, 0), lg(-1035330427, 5) / lg(-2091513925, 70))
assertEquals(lg(0, 0), lg(1037142987, 15) / lg(-485498951, 30819))
assertEquals(lg(0, 0), lg(744551901, 15) / lg(-604684037, 1587))
assertEquals(lg(67766, 0), lg(1341710951, 232724) / lg(1864827988, 3))
assertEquals(lg(694, 0), lg(-409318148, 157818) / lg(517165426, 227))
assertEquals(lg(1, 0), lg(1908192460, 110512) / lg(-61974596, 95795))
assertEquals(lg(0, 0), lg(946490654, 498) / lg(-1889366637, 1163))
assertEquals(lg(12, 0), lg(1765257877, 34422) / lg(728455544, 2851))
assertEquals(lg(0, 0), lg(-1725136864, 84) / lg(1122821677, 14720))
assertEquals(lg(1, 0), lg(1854803780, 2) / lg(-302860117, 1))
assertEquals(lg(131, 0), lg(380756581, 107) / lg(-806772264, 0))
assertEquals(lg(0, 0), lg(1868292481, 1134) / lg(691774521, 33775))
assertEquals(lg(0, 0), lg(-1515810361, 98) / lg(2038289788, 198))
assertEquals(lg(315, 0), lg(-1943767475, 31777) / lg(-1513506636, 100))
assertEquals(lg(0, 0), lg(1508904915, 18) / lg(1834666309, 976))
assertEquals(lg(1, 0), lg(1430753947, 3772) / lg(-1853122145, 3615))
assertEquals(lg(2340149, 0), lg(-1654852151, 1195820) / lg(-2100231332, 0))
assertEquals(lg(0, 0), lg(1011710080, 18) / lg(-616681449, 57))
assertEquals(lg(14, 0), lg(-495370429, 356832) / lg(-34555439, 25233))
assertEquals(lg(131, 0), lg(744211838, 511) / lg(-475809581, 3))
assertEquals(lg(0, 0), lg(1135128265, 67) / lg(163864249, 972))
assertEquals(lg(1, 0), lg(954856869, 5120) / lg(1474096435, 3606))
assertEquals(lg(0, 0), lg(1544045220, 1) / lg(85376495, 2353))
assertEquals(lg(8, 0), lg(1367437144, 53) / lg(2010850631, 6))
assertEquals(lg(0, 0), lg(-1398730804, 13) / lg(-2055007528, 52))
assertEquals(lg(0, 0), lg(1598156017, 13) / lg(-1006929331, 160))
assertEquals(lg(0, 0), lg(738323529, 41) / lg(-1508093984, 10361))
assertEquals(lg(0, 0), lg(-1788797806, 31) / lg(588557582, 575930))
assertEquals(lg(76, 0), lg(-913009845, 1002) / lg(204577043, 13))
assertEquals(lg(0, 0), lg(1908599465, 6) / lg(1058868127, 3383))
assertEquals(lg(0, 0), lg(-634312634, 75) / lg(-850292534, 332928))
assertEquals(lg(0, 0), lg(-1679695022, 148) / lg(-1395453213, 912))
assertEquals(lg(0, 0), lg(456310936, 71) / lg(487720864, 1590813))
assertEquals(lg(0, 0), lg(-1724925398, 0) / lg(-273170277, 38))
assertEquals(lg(0, 0), lg(-6742076, 15) / lg(192793866, 175))
assertEquals(lg(50, 0), lg(337939061, 2094205) / lg(880147944, 41142))
assertEquals(lg(0, 0), lg(-998413092, 0) / lg(-1758700885, 29))
assertEquals(lg(0, 0), lg(1986052307, 3) / lg(-2092246422, 47))
assertEquals(lg(0, 0), lg(-109615093, 1) / lg(-2066395387, 20016))
assertEquals(lg(127, 0), lg(-1147373454, 901) / lg(313439710, 7))
assertEquals(lg(0, 0), lg(-792716629, 66379) / lg(2017337246, 250513))
// int53 / big
assertEquals(lg(0, 0), lg(291278707, 13808) / lg(941639833, -14430466))
assertEquals(lg(0, 0), lg(-857819626, 204588) / lg(-1909684886, -709519130))
assertEquals(lg(0, 0), lg(-978105991, 7435) / lg(-306472275, 158306339))
assertEquals(lg(0, 0), lg(75049741, 248171) / lg(-1574105194, 64879257))
assertEquals(lg(0, 0), lg(136051120, 621) / lg(-1671784392, 102642869))
assertEquals(lg(0, 0), lg(-448460356, 2858) / lg(71740423, -16715717))
assertEquals(lg(0, 0), lg(-1266403435, 2) / lg(-1022999838, 25812014))
assertEquals(lg(0, 0), lg(552733494, 22) / lg(241731505, -33191170))
assertEquals(lg(0, 0), lg(1366167794, 115591) / lg(191854687, -2136953))
assertEquals(lg(0, 0), lg(1329114439, 80951) / lg(-51187101, 1471052997))
// big / int32
assertEquals(lg(422668131, 6), lg(-1495113094, 168518701) / lg(27633219, 0))
assertEquals(lg(932715295, 204683), lg(-1211847018, -609137255) / lg(-2976, -1))
assertEquals(lg(189814434, 0), lg(-457166837, -15040808) / lg(-340331202, -1))
assertEquals(lg(-1116045071, -1131771), lg(-104570473, -117704108) / lg(104, 0))
assertEquals(lg(-784306379, 14408), lg(453828098, -10187034) / lg(-707, -1))
assertEquals(lg(-284027201, 2002401), lg(1911518920, 168201762) / lg(84, 0))
assertEquals(lg(-862273257, -2), lg(610589058, 36481453) / lg(-30381877, -1))
assertEquals(lg(-761280647, -71), lg(410700182, 503953004) / lg(-7181145, -1))
assertEquals(lg(-1212582262, -2538), lg(194917334, -8806907) / lg(3471, 0))
assertEquals(lg(-1201233065, 4), lg(852311155, 9671380) / lg(2048884, 0))
assertEquals(lg(1324107666, 0), lg(-1028681544, 4163983) / lg(13506586, 0))
assertEquals(lg(-354367044, 6361111), lg(-708734088, 12722223) / lg(2, 0))
assertEquals(lg(-292170842, -76359), lg(1693696214, 18402294) / lg(-241, -1))
assertEquals(lg(2104544550, -41349584), lg(-1932788158, 206747917) / lg(-5, -1))
assertEquals(lg(-1928473941, -17816), lg(1427262980, -60732866) / lg(3409, 0))
assertEquals(lg(-1929237164, -681), lg(-677896940, 2512898) / lg(-3693, -1))
assertEquals(lg(1550060300, -35), lg(-926729663, -9677195) / lg(279372, 0))
assertEquals(lg(-1706875941, 0), lg(-405257725, -2271799) / lg(-3770075, -1))
assertEquals(lg(1540708852, 10909), lg(-1893733008, -6491069) / lg(-595, -1))
assertEquals(lg(-1563665409, -358), lg(-1343018634, -2584815) / lg(7233, 0))
assertEquals(lg(278715917, -374389), lg(-1224507547, 122799570) / lg(-328, -1))
assertEquals(lg(1421525100, 0), lg(-2082712791, -15998594) / lg(-48337828, -1))
assertEquals(lg(1574832373, -2193811), lg(-2147318181, -32907160) / lg(15, 0))
assertEquals(lg(-1260116915, -61610), lg(1074158039, 118905936) / lg(-1930, -1))
assertEquals(lg(130856059, -15612), lg(1270835097, -2201288) / lg(141, 0))
assertEquals(lg(-110248455, 2347), lg(320077861, -446108079) / lg(-189997, -1))
assertEquals(lg(-1659387265, 122), lg(1075676628, 54005547) / lg(440453, 0))
assertEquals(lg(-144903831, 18), lg(-1800001035, 54578889) / lg(2877683, 0))
assertEquals(lg(-1312994937, -23952), lg(-654120591, 33364168) / lg(-1393, -1))
assertEquals(lg(-178073210, -1), lg(302695822, -2432394) / lg(58667176, 0))
assertEquals(lg(1316938460, 142), lg(523451067, -54366538) / lg(-382038, -1))
assertEquals(lg(-1457978633, 17556853), lg(-78968601, 52670560) / lg(3, 0))
assertEquals(lg(-1760960552, 505129611), lg(-773046192, -1010259224) / lg(-2, -1))
assertEquals(lg(1210355204, 2314), lg(1515488136, -21874592) / lg(-9452, -1))
assertEquals(lg(-1625685934, 862807773), lg(-1043595428, -1725615548) / lg(-2, -1))
assertEquals(lg(184379181, 4), lg(-1217231978, 1516494005) / lg(375097846, 0))
assertEquals(lg(1243945230, 0), lg(-1873413508, -236381131) / lg(-816152673, -1))
assertEquals(lg(-1540093941, -876), lg(265593875, 26513736) / lg(-30289, -1))
assertEquals(lg(-1304692919, 543912), lg(106204837, -839801203) / lg(-1544, -1))
assertEquals(lg(-806250591, 23), lg(815576040, -55524975) / lg(-2331779, -1))
assertEquals(lg(-2106907248, -3), lg(-2053929476, -1795047022) / lg(720742474, 0))
assertEquals(lg(893100234, -124), lg(1552099699, 65024502) / lg(-525272, -1))
assertEquals(lg(-1109915706, 1255), lg(-194253417, -12405472) / lg(-9879, -1))
assertEquals(lg(-1177955013, 0), lg(412309016, 112344162) / lg(154800321, 0))
assertEquals(lg(-1975688052, -51023804), lg(343591192, -102047607) / lg(2, 0))
assertEquals(lg(-728332094, -309956), lg(1756765281, 8058834) / lg(-26, -1))
assertEquals(lg(10173004, 1227), lg(1762668787, -960735493) / lg(-782994, -1))
assertEquals(lg(1157067129, 5766), lg(1523935530, -109345767) / lg(-18963, -1))
assertEquals(lg(1226263794, 42306948), lg(-1256703941, 1438436241) / lg(34, 0))
assertEquals(lg(1502167534, -439314), lg(-444491016, -6150392) / lg(14, 0))
// big / int53
assertEquals(lg(88399, 0), lg(-1883357942, 360257606) / lg(1478768728, 4075))
assertEquals(lg(-45459, -1), lg(-1991900757, -48856999) / lg(-1087694619, 1074))
assertEquals(lg(4395497, 0), lg(518426119, 218946975) / lg(-808940852, 49))
assertEquals(lg(3198134, 0), lg(-946567777, 600381050) / lg(-1165957306, 187))
assertEquals(lg(470, 0), lg(257885254, 845979705) / lg(792779187, 1798424))
assertEquals(lg(92, 0), lg(1278680372, 6485140) / lg(1376461023, 70263))
assertEquals(lg(167728, 0), lg(1445602310, 420550818) / lg(1397186900, 2507))
assertEquals(lg(25700177, 0), lg(1822058703, 522114268) / lg(1355449555, 20))
assertEquals(lg(-35822646, -1), lg(532749659, -130990067) / lg(-1474774415, 3))
assertEquals(lg(-348, -1), lg(1329707986, -2121642) / lg(-63366094, 6086))
assertEquals(lg(-2179, -1), lg(1028585430, -118524228) / lg(1655878874, 54392))
assertEquals(lg(1187, 0), lg(203502475, 42252914) / lg(36519512, 35581))
assertEquals(lg(3223, 0), lg(341088508, 35053507) / lg(917391400, 10874))
assertEquals(lg(23608500, 0), lg(1454135412, 69933847) / lg(-162213744, 2))
assertEquals(lg(7286803, 0), lg(1674604578, 10565585) / lg(1932570831, 1))
assertEquals(lg(-137450, -1), lg(-1910257093, -16610962) / lg(-640594227, 120))
assertEquals(lg(114592, 0), lg(1080864951, 17606069) / lg(-1542196664, 153))
assertEquals(lg(61, 0), lg(-1419644278, 13937517) / lg(-919779905, 227700))
assertEquals(lg(-247360, -1), lg(-1958380469, -855713410) / lg(1631833189, 3459))
assertEquals(lg(-61725, -1), lg(1951473618, -4122677) / lg(-899615165, 66))
assertEquals(lg(2226, 0), lg(1521276132, 182952467) / lg(346742782, 82171))
assertEquals(lg(-997, -1), lg(-1003647481, -7808320) / lg(-228453385, 7826))
assertEquals(lg(36, 0), lg(-875689390, 4467236) / lg(-590010750, 120938))
assertEquals(lg(56005, 0), lg(1189085620, 611543209) / lg(1619962756, 10919))
assertEquals(lg(-90057, -1), lg(-1072173311, -18503031) / lg(1971480267, 205))
assertEquals(lg(-9, -1), lg(767303802, -3407362) / lg(-339044225, 352939))
assertEquals(lg(62240, 0), lg(427996893, 482974074) / lg(-736462105, 7759))
assertEquals(lg(-1774, -1), lg(842450255, -4396651) / lg(859272322, 2477))
assertEquals(lg(-153400, -1), lg(1640433988, -2618618) / lg(302672196, 17))
assertEquals(lg(2145, 0), lg(-361322518, 63967358) / lg(-1922353888, 29810))
assertEquals(lg(106042, 0), lg(-1774479550, 43276853) / lg(472456506, 408))
assertEquals(lg(-381407, -1), lg(-1756338345, -38928780) / lg(283612141, 102))
assertEquals(lg(1217514, 0), lg(-495049835, 37161263) / lg(-2052025512, 30))
assertEquals(lg(-17, -1), lg(1606509747, -10876159) / lg(1068727249, 635715))
assertEquals(lg(4880327, 0), lg(-1857686692, 1918485655) / lg(454913535, 393))
assertEquals(lg(-1023070, -1), lg(-502107392, -511268482) / lg(-1118977400, 499))
assertEquals(lg(439, 0), lg(-909192131, 45216813) / lg(1442986382, 102923))
assertEquals(lg(2171202, 0), lg(259184089, 14858724) / lg(-671961291, 6))
assertEquals(lg(-5332527, -1), lg(1737846340, -614952982) / lg(1379175047, 115))
assertEquals(lg(-435180, -1), lg(-406629212, -528407898) / lg(973577032, 1214))
assertEquals(lg(27837, 0), lg(-597461306, 538945619) / lg(-1867966522, 19360))
assertEquals(lg(-396, -1), lg(-1906945200, -371170760) / lg(151858506, 936902))
assertEquals(lg(-115583279, -1), lg(-1366510, -207691415) / lg(-872314548, 1))
assertEquals(lg(-6783543, -1), lg(-1280665444, -104856505) / lg(1964875665, 15))
assertEquals(lg(-1464006069, -1), lg(897601097, -1352132581) / lg(-328204224, 0))
assertEquals(lg(11599107, 0), lg(-496529216, 32992512) / lg(-668292521, 2))
assertEquals(lg(842, 0), lg(1819966537, 311969505) / lg(-879441284, 370147))
assertEquals(lg(43514, 0), lg(433235702, 408255734) / lg(573404298, 9382))
assertEquals(lg(-230, -1), lg(1693350453, -4127304) / lg(-1671879801, 17931))
assertEquals(lg(249094, 0), lg(-492682302, 64433722) / lg(-1408841594, 258))
// big / big
assertEquals(lg(-10, -1), lg(1450795502, -706709103) / lg(742056886, 64843937))
assertEquals(lg(0, 0), lg(-392893244, 72026637) / lg(1419676270, 875736789))
assertEquals(lg(-2, -1), lg(-1861146463, 8382761) / lg(-724412724, -3000735))
assertEquals(lg(0, 0), lg(1373482238, 23344691) / lg(1835527248, -294342355))
assertEquals(lg(-37, -1), lg(1956796392, 107480459) / lg(-560958184, -2839471))
assertEquals(lg(3, 0), lg(422228275, 30436377) / lg(-2023395425, 8226201))
assertEquals(lg(-3, -1), lg(1747624836, -215352612) / lg(-1349940168, 58723974))
assertEquals(lg(2, 0), lg(-583006891, 16111063) / lg(1853686630, 5479773))
assertEquals(lg(0, 0), lg(1498104050, 7322401) / lg(-407388940, 2141575618))
assertEquals(lg(5, 0), lg(1943726712, 869895175) / lg(-627430826, 169278540))
assertEquals(lg(0, 0), lg(1872895982, 98966340) / lg(1347573135, 529034148))
assertEquals(lg(-2, -1), lg(16010610, 187913494) / lg(-848952152, -81951424))
assertEquals(lg(0, 0), lg(830929771, -4393252) / lg(1829525088, 52659897))
assertEquals(lg(22, 0), lg(-2093526384, 133319293) / lg(-464927151, 6049576))
assertEquals(lg(0, 0), lg(1056318793, 13467735) / lg(1970348162, -672507521))
assertEquals(lg(0, 0), lg(-28853693, -169722715) / lg(-83877421, 770900857))
assertEquals(lg(-27, -1), lg(1743854071, -302158995) / lg(80117835, 11113120))
assertEquals(lg(-6, -1), lg(635796581, -146765250) / lg(441664676, 23716738))
assertEquals(lg(0, 0), lg(-1048312948, -37662905) / lg(1319664078, 208772026))
assertEquals(lg(0, 0), lg(-784292680, -14102823) / lg(2037268040, 744987722))
assertEquals(lg(176, 0), lg(-1116104092, -2073525743) / lg(1766685765, -11731135))
assertEquals(lg(0, 0), lg(-1991687284, 19448294) / lg(-1731357606, -202272807))
assertEquals(lg(6, 0), lg(-2042068328, -52956481) / lg(370482897, -7759903))
assertEquals(lg(1, 0), lg(334395247, 1906338595) / lg(342095090, 1248830168))
assertEquals(lg(0, 0), lg(-309616588, 44123460) / lg(2040055580, -476494291))
assertEquals(lg(0, 0), lg(137178123, 36336421) / lg(-360221107, -515689970))
assertEquals(lg(0, 0), lg(-422856762, -16760844) / lg(-334268074, -43984484))
assertEquals(lg(0, 0), lg(-24820293, 25823996) / lg(390711705, 288223876))
assertEquals(lg(0, 0), lg(1170265006, 2998984) / lg(-134995170, -2123267074))
assertEquals(lg(0, 0), lg(-1501380980, -6088910) / lg(-1175861016, -56027408))
assertEquals(lg(-56, -1), lg(307880183, 196786483) / lg(-1107761890, -3480429))
assertEquals(lg(0, 0), lg(-588606997, -37732967) / lg(-1124435958, -77404915))
assertEquals(lg(108, 0), lg(90560661, 990295925) / lg(731139348, 9165999))
assertEquals(lg(0, 0), lg(46312609, -28251908) / lg(1279863155, -519028300))
assertEquals(lg(0, 0), lg(1123427761, 55212863) / lg(-1081219733, 233090714))
assertEquals(lg(0, 0), lg(1447869812, -3646400) / lg(-1237950546, -27122943))
assertEquals(lg(-13, -1), lg(-1399920635, 110072031) / lg(-398678056, -8069387))
assertEquals(lg(0, 0), lg(513704441, 14319377) / lg(-796719013, 260081997))
assertEquals(lg(8, 0), lg(166886349, -190148673) / lg(68245235, -21656365))
assertEquals(lg(0, 0), lg(-1594024534, -144937584) / lg(177399758, 200473672))
assertEquals(lg(-1, -1), lg(447753993, -23591908) / lg(1399162166, 12505918))
assertEquals(lg(0, 0), lg(1500283330, 5361180) / lg(348398676, 156400271))
assertEquals(lg(-1, -1), lg(-216115001, 670826068) / lg(1759253954, -470062110))
assertEquals(lg(0, 0), lg(-1251659767, 18831569) / lg(-669341445, -34474821))
assertEquals(lg(31, 0), lg(817032953, 218701872) / lg(-176557210, 6899121))
assertEquals(lg(-19, -1), lg(1365998269, 613319842) / lg(319204438, -30758748))
assertEquals(lg(0, 0), lg(-428500325, 6610536) / lg(-46648893, -105360271))
assertEquals(lg(0, 0), lg(784528299, -6958267) / lg(1370662827, -774132635))
assertEquals(lg(-2, -1), lg(-769114167, 137614183) / lg(-929091402, -67103082))
assertEquals(lg(8, 0), lg(1810734914, 124115952) / lg(1149563530, 15197570))
}
@Test def modulo_%(): Unit = {
expectThrows(classOf[ArithmeticException], lg(0) % lg(0))
expectThrows(classOf[ArithmeticException], lg(5, 0) % lg(0))
expectThrows(classOf[ArithmeticException], lg(0, 5) % lg(0))
expectThrows(classOf[ArithmeticException], lg(-1) % lg(0))
expectThrows(classOf[ArithmeticException], lg(-1, 0) % lg(0))
assertEquals(lg(0), IntMinVal % lg(-1))
assertEquals(lg(0), IntMinVal % IntMaxValPlus1)
assertEquals(lg(0), IntMaxValPlus1 % lg(-1))
assertEquals(lg(0), IntMaxValPlus1 % IntMinVal)
assertEquals(lg(0), MaxVal % lg(-1))
assertEquals(lg(0), MinVal % lg(1))
assertEquals(lg(0), MinVal % lg(-1))
assertEquals(lg(-1, 2147483647), MaxVal % MinVal)
assertEquals(lg(0), MaxVal % MaxVal)
assertEquals(lg(0), MinVal % MinVal)
assertEquals(lg(-1), MinVal % MaxVal)
// int32 % int32
assertEquals(lg(880, 0), lg(880, 0) % lg(-219594, -1))
assertEquals(lg(-27, -1), lg(-49125, -1) % lg(98, 0))
assertEquals(lg(-1194, -1), lg(-1922504, -1) % lg(4195, 0))
assertEquals(lg(3, 0), lg(3, 0) % lg(7963, 0))
assertEquals(lg(-626, -1), lg(-626, -1) % lg(-484628621, -1))
assertEquals(lg(11315, 0), lg(11315, 0) % lg(-3914076, -1))
assertEquals(lg(26241, 0), lg(15712341, 0) % lg(-1045740, -1))
assertEquals(lg(-507, -1), lg(-855439, -1) % lg(5213, 0))
assertEquals(lg(-259, -1), lg(-101026259, -1) % lg(-500, -1))
assertEquals(lg(27720977, 0), lg(27720977, 0) % lg(-42317657, -1))
assertEquals(lg(1, 0), lg(25954, 0) % lg(-3, -1))
assertEquals(lg(6724180, 0), lg(338447650, 0) % lg(-8505730, -1))
assertEquals(lg(10488, 0), lg(23967, 0) % lg(-13479, -1))
assertEquals(lg(1, 0), lg(885202, 0) % lg(-3, -1))
assertEquals(lg(0, 0), lg(692795590, 0) % lg(-10, -1))
assertEquals(lg(-1, -1), lg(-1, -1) % lg(156, 0))
assertEquals(lg(388, 0), lg(388, 0) % lg(189523294, 0))
assertEquals(lg(352, 0), lg(352, 0) % lg(-3257, -1))
assertEquals(lg(-9, -1), lg(-9, -1) % lg(14653, 0))
assertEquals(lg(-1, -1), lg(-258745, -1) % lg(8, 0))
assertEquals(lg(-21023, -1), lg(-206976653, -1) % lg(34321, 0))
assertEquals(lg(-1, -1), lg(-1, -1) % lg(-971, -1))
assertEquals(lg(59, 0), lg(59, 0) % lg(388, 0))
assertEquals(lg(0, 0), lg(-7, -1) % lg(1, 0))
assertEquals(lg(12, 0), lg(77, 0) % lg(13, 0))
assertEquals(lg(224246, 0), lg(224246, 0) % lg(719055, 0))
assertEquals(lg(-61296, -1), lg(-61296, -1) % lg(-135723660, -1))
assertEquals(lg(549465, 0), lg(6897809, 0) % lg(793543, 0))
assertEquals(lg(45, 0), lg(45, 0) % lg(984210147, 0))
assertEquals(lg(0, 0), lg(-64, -1) % lg(1, 0))
assertEquals(lg(2, 0), lg(379611734, 0) % lg(4, 0))
assertEquals(lg(0, 0), lg(0, 0) % lg(-263, -1))
assertEquals(lg(29, 0), lg(29, 0) % lg(-117, -1))
assertEquals(lg(24, 0), lg(245094, 0) % lg(-70, -1))
assertEquals(lg(0, 0), lg(0, 0) % lg(5, 0))
assertEquals(lg(2, 0), lg(2, 0) % lg(47787927, 0))
assertEquals(lg(-124, -1), lg(-124, -1) % lg(-22714040, -1))
assertEquals(lg(412, 0), lg(412, 0) % lg(-17176, -1))
assertEquals(lg(-11860, -1), lg(-11860, -1) % lg(9506787, 0))
assertEquals(lg(-31, -1), lg(-31, -1) % lg(-1544676, -1))
assertEquals(lg(-3, -1), lg(-1990315281, -1) % lg(-7, -1))
assertEquals(lg(99, 0), lg(99, 0) % lg(-277, -1))
assertEquals(lg(-86, -1), lg(-29227, -1) % lg(-161, -1))
assertEquals(lg(106, 0), lg(106, 0) % lg(-47032956, -1))
assertEquals(lg(18, 0), lg(18, 0) % lg(510836179, 0))
assertEquals(lg(2, 0), lg(3543112, 0) % lg(10, 0))
assertEquals(lg(534271, 0), lg(3547603, 0) % lg(-1506666, -1))
assertEquals(lg(-16361, -1), lg(-16361, -1) % lg(10637613, 0))
assertEquals(lg(8, 0), lg(606879016, 0) % lg(-16, -1))
assertEquals(lg(-1, -1), lg(-1, -1) % lg(46424570, 0))
// int32 % int53
assertEquals(lg(-3, -1), lg(-3, -1) % lg(206801065, 1))
assertEquals(lg(-57756, -1), lg(-57756, -1) % lg(-1211050362, 13))
assertEquals(lg(0, 0), lg(0, 0) % lg(-475702596, 10040))
assertEquals(lg(423524, 0), lg(423524, 0) % lg(-2084961556, 16))
assertEquals(lg(38317, 0), lg(38317, 0) % lg(-1699004544, 24))
assertEquals(lg(60291, 0), lg(60291, 0) % lg(-458289291, 56))
assertEquals(lg(1, 0), lg(1, 0) % lg(-1247681936, 1229953))
assertEquals(lg(296788, 0), lg(296788, 0) % lg(183245860, 52))
assertEquals(lg(-2005515, -1), lg(-2005515, -1) % lg(331735459, 17))
assertEquals(lg(-179812, -1), lg(-179812, -1) % lg(-853047550, 5154))
assertEquals(lg(-3678, -1), lg(-3678, -1) % lg(1751271067, 243605))
assertEquals(lg(-93867, -1), lg(-93867, -1) % lg(-1925367590, 42))
assertEquals(lg(7600917, 0), lg(7600917, 0) % lg(-1807424604, 95574))
assertEquals(lg(300012, 0), lg(300012, 0) % lg(1951216728, 101))
assertEquals(lg(-6347, -1), lg(-6347, -1) % lg(-438713154, 23))
assertEquals(lg(-41, -1), lg(-41, -1) % lg(-1211982116, 459))
assertEquals(lg(3425, 0), lg(3425, 0) % lg(-1580976156, 2))
assertEquals(lg(-25, -1), lg(-25, -1) % lg(200240265, 25993))
assertEquals(lg(-8303, -1), lg(-8303, -1) % lg(1353761386, 1921))
assertEquals(lg(274032571, 0), lg(274032571, 0) % lg(1455543028, 255))
assertEquals(lg(-3, -1), lg(-3, -1) % lg(1143775281, 729))
assertEquals(lg(-1124428, -1), lg(-1124428, -1) % lg(-521284400, 339))
assertEquals(lg(-2, -1), lg(-2, -1) % lg(-303859962, 2524))
assertEquals(lg(1, 0), lg(1, 0) % lg(-402000545, 1))
assertEquals(lg(107013504, 0), lg(107013504, 0) % lg(157604607, 3))
assertEquals(lg(4976822, 0), lg(4976822, 0) % lg(-2046021074, 2230))
assertEquals(lg(-1, -1), lg(-1, -1) % lg(-306200858, 41))
assertEquals(lg(80396, 0), lg(80396, 0) % lg(-409002766, 13))
assertEquals(lg(937638, 0), lg(937638, 0) % lg(-697219650, 26))
assertEquals(lg(756, 0), lg(756, 0) % lg(-948806692, 1700920))
assertEquals(lg(5, 0), lg(5, 0) % lg(646021801, 21350))
assertEquals(lg(262831839, 0), lg(262831839, 0) % lg(1086270794, 10633))
assertEquals(lg(-2146273993, -1), lg(-2146273993, -1) % lg(-1539129401, 0))
assertEquals(lg(59799, 0), lg(59799, 0) % lg(1910837623, 102082))
assertEquals(lg(-5347, -1), lg(-5347, -1) % lg(1965292799, 18))
assertEquals(lg(926, 0), lg(926, 0) % lg(1939309159, 104206))
assertEquals(lg(1, 0), lg(1, 0) % lg(1651864405, 1233))
assertEquals(lg(334, 0), lg(334, 0) % lg(581635234, 20))
assertEquals(lg(-61747, -1), lg(-61747, -1) % lg(-842193425, 1497))
assertEquals(lg(-1, -1), lg(-1, -1) % lg(758739794, 79508))
assertEquals(lg(59605313, 0), lg(59605313, 0) % lg(-1162319751, 0))
assertEquals(lg(12267518, 0), lg(12267518, 0) % lg(1340161110, 568352))
assertEquals(lg(19230695, 0), lg(19230695, 0) % lg(1844291137, 21))
assertEquals(lg(3950296, 0), lg(3950296, 0) % lg(-848670202, 243))
assertEquals(lg(503276, 0), lg(503276, 0) % lg(-1756374670, 1))
assertEquals(lg(30880536, 0), lg(30880536, 0) % lg(-1380766565, 51064))
assertEquals(lg(5659804, 0), lg(5659804, 0) % lg(-725339057, 1))
assertEquals(lg(11882277, 0), lg(11882277, 0) % lg(243727355, 7))
assertEquals(lg(371783010, 0), lg(371783010, 0) % lg(630143580, 14001))
assertEquals(lg(840, 0), lg(840, 0) % lg(-1719362098, 109))
// int32 % big
assertEquals(lg(-267334310, -1), lg(-267334310, -1) % lg(1537718115, -134598983))
assertEquals(lg(57, 0), lg(57, 0) % lg(-1668867109, -10100325))
assertEquals(lg(30332, 0), lg(30332, 0) % lg(-615310153, -90004876))
assertEquals(lg(187, 0), lg(187, 0) % lg(-590535223, 8244144))
assertEquals(lg(-2, -1), lg(-2, -1) % lg(2125719729, 390762530))
assertEquals(lg(-4252915, -1), lg(-4252915, -1) % lg(2070489053, 23484863))
assertEquals(lg(-2, -1), lg(-2, -1) % lg(37507428, 96913792))
assertEquals(lg(10, 0), lg(10, 0) % lg(-533680689, -79923599))
assertEquals(lg(-14, -1), lg(-14, -1) % lg(-930313329, 2972085))
assertEquals(lg(-20155233, -1), lg(-20155233, -1) % lg(-49989774, -25498857))
assertEquals(lg(-406, -1), lg(-406, -1) % lg(2109762544, 126098611))
assertEquals(lg(43, 0), lg(43, 0) % lg(598811771, 154269509))
assertEquals(lg(-4830, -1), lg(-4830, -1) % lg(-1043650540, -2874494))
assertEquals(lg(-4271, -1), lg(-4271, -1) % lg(-950378080, -106126516))
assertEquals(lg(126, 0), lg(126, 0) % lg(-877412093, -90804729))
assertEquals(lg(40445345, 0), lg(40445345, 0) % lg(-1461218790, 6749169))
assertEquals(lg(-1, -1), lg(-1, -1) % lg(1776909778, 28425796))
assertEquals(lg(-2123811, -1), lg(-2123811, -1) % lg(-51805125, 44153129))
assertEquals(lg(-25650126, -1), lg(-25650126, -1) % lg(-1317209725, -16141386))
assertEquals(lg(30, 0), lg(30, 0) % lg(712479950, 158765535))
assertEquals(lg(2494211, 0), lg(2494211, 0) % lg(-432472367, 21859989))
assertEquals(lg(100937174, 0), lg(100937174, 0) % lg(212873269, -74778594))
assertEquals(lg(901687, 0), lg(901687, 0) % lg(-1225225931, -512562107))
assertEquals(lg(-422854, -1), lg(-422854, -1) % lg(-1361503923, -98826041))
assertEquals(lg(2, 0), lg(2, 0) % lg(386622050, -9945722))
assertEquals(lg(-465211, -1), lg(-465211, -1) % lg(-418132599, -160175963))
assertEquals(lg(63, 0), lg(63, 0) % lg(-1330189832, 180061391))
assertEquals(lg(47, 0), lg(47, 0) % lg(1439978282, -16520554))
assertEquals(lg(233450563, 0), lg(233450563, 0) % lg(-328511972, 377539644))
assertEquals(lg(-134912, -1), lg(-134912, -1) % lg(1349244684, -12612862))
assertEquals(lg(-95441, -1), lg(-95441, -1) % lg(511120357, 16112596))
assertEquals(lg(-1160726496, -1), lg(-1160726496, -1) % lg(-913371934, -9441145))
assertEquals(lg(-502, -1), lg(-502, -1) % lg(-1021329523, -377728463))
assertEquals(lg(3313324, 0), lg(3313324, 0) % lg(-67454848, 442297818))
assertEquals(lg(-145, -1), lg(-145, -1) % lg(-1010112762, 29724438))
assertEquals(lg(-19091, -1), lg(-19091, -1) % lg(-1944488998, -173788926))
assertEquals(lg(-3331910, -1), lg(-3331910, -1) % lg(2144172121, 73505274))
assertEquals(lg(56622, 0), lg(56622, 0) % lg(-1451372835, 5219178))
assertEquals(lg(0, 0), lg(0, 0) % lg(556032035, 32471322))
assertEquals(lg(800, 0), lg(800, 0) % lg(-1649243607, 2299368))
assertEquals(lg(86949, 0), lg(86949, 0) % lg(794150820, -1384562176))
assertEquals(lg(10, 0), lg(10, 0) % lg(-790693444, 1000869239))
assertEquals(lg(-333236, -1), lg(-333236, -1) % lg(-1020207444, 125043716))
assertEquals(lg(-598, -1), lg(-598, -1) % lg(-93061561, -329975227))
assertEquals(lg(-19, -1), lg(-19, -1) % lg(-1096862531, 163621631))
assertEquals(lg(465328283, 0), lg(465328283, 0) % lg(-21925149, -52057346))
assertEquals(lg(-25837, -1), lg(-25837, -1) % lg(677002620, 8643698))
assertEquals(lg(-383633650, -1), lg(-383633650, -1) % lg(1609519787, 8262009))
assertEquals(lg(-66, -1), lg(-66, -1) % lg(1917139359, 239618524))
assertEquals(lg(1676620, 0), lg(1676620, 0) % lg(910745834, 82765572))
// int53 / int32
assertEquals(lg(15827410, 0), lg(1244623439, 3) % lg(-231372097, -1))
assertEquals(lg(15118, 0), lg(-1392787378, 124) % lg(-20252, -1))
assertEquals(lg(11, 0), lg(578165055, 72) % lg(13, 0))
assertEquals(lg(42298679, 0), lg(-1836745385, 3) % lg(-95630157, -1))
assertEquals(lg(17447610, 0), lg(-1766124150, 29) % lg(-45315780, -1))
assertEquals(lg(0, 0), lg(540281958, 253606) % lg(-11, -1))
assertEquals(lg(51980, 0), lg(-442404110, 7696) % lg(1489246, 0))
assertEquals(lg(2, 0), lg(-631827526, 1455) % lg(8, 0))
assertEquals(lg(5125741, 0), lg(1266390909, 49) % lg(-34627848, -1))
assertEquals(lg(77691, 0), lg(-453014259, 21413) % lg(149449, 0))
assertEquals(lg(521867604, 0), lg(1573062436, 653) % lg(671211684, 0))
assertEquals(lg(14579368, 0), lg(-21113520, 0) % lg(177469767, 0))
assertEquals(lg(0, 0), lg(-262825676, 31) % lg(1, 0))
assertEquals(lg(24027362, 0), lg(-163968426, 1) % lg(33341027, 0))
assertEquals(lg(6792805, 0), lg(668741217, 14380) % lg(-11334498, -1))
assertEquals(lg(9, 0), lg(808041281, 1818) % lg(-10, -1))
assertEquals(lg(204, 0), lg(-1601247507, 25) % lg(-235, -1))
assertEquals(lg(61089, 0), lg(-1577206289, 0) % lg(1618642, 0))
assertEquals(lg(289305533, 0), lg(863396135, 503) % lg(-321808286, -1))
assertEquals(lg(7272892, 0), lg(-900149281, 55) % lg(15166197, 0))
assertEquals(lg(3, 0), lg(1802954050, 3593) % lg(7, 0))
assertEquals(lg(12036, 0), lg(800669146, 41901) % lg(-20591, -1))
assertEquals(lg(29, 0), lg(-1055636867, 39) % lg(48, 0))
assertEquals(lg(0, 0), lg(-491067123, 14) % lg(1, 0))
assertEquals(lg(260441364, 0), lg(1420289126, 67) % lg(1010219079, 0))
assertEquals(lg(3936541, 0), lg(1338756461, 32) % lg(-4427443, -1))
assertEquals(lg(183313645, 0), lg(-820843233, 778) % lg(-273780418, -1))
assertEquals(lg(91783, 0), lg(-1033566360, 561225) % lg(-156677, -1))
assertEquals(lg(5, 0), lg(-1567070603, 38) % lg(-8, -1))
assertEquals(lg(11214823, 0), lg(-1649343541, 185302) % lg(-19368267, -1))
assertEquals(lg(75719, 0), lg(-591434325, 76351) % lg(94212, 0))
assertEquals(lg(10941, 0), lg(235794528, 55) % lg(17599, 0))
assertEquals(lg(5331, 0), lg(-763589741, 116) % lg(-14942, -1))
assertEquals(lg(1, 0), lg(-1283158225, 237055) % lg(-2, -1))
assertEquals(lg(24400, 0), lg(1537105400, 29108) % lg(-37848, -1))
assertEquals(lg(95, 0), lg(-56778611, 994650) % lg(-170, -1))
assertEquals(lg(9836, 0), lg(-2057746932, 7) % lg(-10100, -1))
assertEquals(lg(30255783, 0), lg(1365793356, 12) % lg(-38454651, -1))
assertEquals(lg(417, 0), lg(-2128793438, 4) % lg(6825, 0))
assertEquals(lg(0, 0), lg(1667515072, 8) % lg(2, 0))
assertEquals(lg(257, 0), lg(420324337, 980) % lg(-845, -1))
assertEquals(lg(82991, 0), lg(-771084081, 8204) % lg(105392, 0))
assertEquals(lg(691256, 0), lg(-332377894, 1) % lg(882238, 0))
assertEquals(lg(0, 0), lg(1749263284, 11) % lg(-20, -1))
assertEquals(lg(4, 0), lg(347303218, 1234317) % lg(-13, -1))
assertEquals(lg(150, 0), lg(1199079324, 17271) % lg(11033, 0))
assertEquals(lg(14, 0), lg(1196217208, 13) % lg(-23, -1))
assertEquals(lg(256216433, 0), lg(-1078128939, 0) % lg(740155481, 0))
assertEquals(lg(45583, 0), lg(-1354463473, 3691) % lg(-63588, -1))
assertEquals(lg(459, 0), lg(-1255896801, 1469630) % lg(-502, -1))
// int53 % int53
assertEquals(lg(1805177178, 1), lg(1805177178, 1) % lg(-1293833696, 410))
assertEquals(lg(-583440651, 2), lg(647007072, 1811985) % lg(1091239449, 3))
assertEquals(lg(1346307032, 1), lg(1346307032, 1) % lg(-672335266, 33))
assertEquals(lg(858355422, 81), lg(858355422, 81) % lg(1490435172, 162402))
assertEquals(lg(744276027, 1), lg(-1299053281, 6330) % lg(1042770708, 1))
assertEquals(lg(29273105, 0), lg(-88774269, 25) % lg(775537355, 1))
assertEquals(lg(383200445, 2), lg(-962613261, 4309) % lg(-529185362, 5))
assertEquals(lg(-171009725, 445), lg(-171009725, 445) % lg(-1167557775, 307982))
assertEquals(lg(8166883, 15498), lg(1848497503, 78519) % lg(1533824479, 15755))
assertEquals(lg(-1752533311, 17), lg(-1752533311, 17) % lg(1904799096, 73566))
assertEquals(lg(-1641266817, 46), lg(-1641266817, 46) % lg(-31936789, 751199))
assertEquals(lg(-350685679, 656), lg(-637954451, 32352) % lg(-10259599, 1131))
assertEquals(lg(-1671876486, 0), lg(-1657673170, 122149) % lg(-534342412, 0))
assertEquals(lg(-660565679, 235), lg(-660565679, 235) % lg(-897090894, 14655))
assertEquals(lg(-1798560222, 612), lg(-1798560222, 612) % lg(-236039758, 2924))
assertEquals(lg(-28767936, 5704), lg(1010899296, 62798) % lg(-1974205776, 9515))
assertEquals(lg(-2004786867, 4), lg(1206965517, 91420) % lg(880030876, 7))
assertEquals(lg(712148070, 3), lg(712148070, 3) % lg(472319826, 2838))
assertEquals(lg(-1275175525, 44), lg(-1275175525, 44) % lg(162799342, 861329))
assertEquals(lg(1187224322, 14), lg(-516916094, 191396) % lg(-1920802608, 30))
assertEquals(lg(-1461747946, 0), lg(-1627551726, 4499) % lg(1200735793, 1))
assertEquals(lg(453535447, 39039), lg(453535447, 39039) % lg(520791957, 141909))
assertEquals(lg(216221627, 20), lg(216221627, 20) % lg(-781572865, 8131))
assertEquals(lg(1611884803, 23), lg(-1999221053, 528) % lg(1107934896, 25))
assertEquals(lg(1722095012, 0), lg(-701225584, 44) % lg(-1403297482, 0))
assertEquals(lg(-232837834, 5049), lg(-232837834, 5049) % lg(1000581509, 15836))
assertEquals(lg(-82376749, 239), lg(-82376749, 239) % lg(-163409376, 7688))
assertEquals(lg(2063025646, 2), lg(941363778, 110) % lg(336092572, 3))
assertEquals(lg(721574845, 383), lg(1004884706, 1133) % lg(283309861, 750))
assertEquals(lg(-2004547354, 47), lg(1436404594, 1595) % lg(1522987410, 70))
assertEquals(lg(1696970595, 8), lg(1696970595, 8) % lg(-1168832286, 4163))
assertEquals(lg(-2033329312, 6), lg(-1244970780, 32) % lg(394179266, 13))
assertEquals(lg(1864629418, 1), lg(1864629418, 1) % lg(528888491, 970677))
assertEquals(lg(1596298266, 43057), lg(-1763600443, 962032) % lg(1535552275, 102108))
assertEquals(lg(1181714932, 5), lg(1181714932, 5) % lg(1296434411, 26359))
assertEquals(lg(-2140209952, 7), lg(1535735456, 276446) % lg(-1930593680, 7))
assertEquals(lg(-1703068243, 11), lg(2079501385, 97596) % lg(-1803771626, 21))
assertEquals(lg(-1025858772, 33402), lg(286993796, 174379) % lg(656426284, 70488))
assertEquals(lg(-578045904, 11724), lg(221015334, 1635766) % lg(-2014306775, 270673))
assertEquals(lg(-2080784768, 56), lg(-2103734262, 977) % lg(-22949494, 920))
assertEquals(lg(-922083739, 29), lg(-922083739, 29) % lg(2040148267, 19160))
assertEquals(lg(-1728890579, 468), lg(-559850131, 11989) % lg(1366001936, 2880))
assertEquals(lg(1341547600, 13), lg(-1071198220, 2182) % lg(1526886260, 17))
assertEquals(lg(-896451936, 45), lg(-896451936, 45) % lg(2132477227, 164356))
assertEquals(lg(-1538011120, 53), lg(-561327714, 1420) % lg(-368698210, 151))
assertEquals(lg(1880884956, 621), lg(2112956103, 118429) % lg(-374507565, 859))
assertEquals(lg(902909663, 0), lg(380445410, 8) % lg(-1822479769, 1))
assertEquals(lg(-652149100, 56), lg(-1867274924, 105813) % lg(175641312, 79))
assertEquals(lg(-991170416, 37), lg(-991170416, 37) % lg(1740161397, 88122))
assertEquals(lg(-31602776, 1), lg(-31602776, 1) % lg(-503633567, 241909))
// int53 % big
assertEquals(lg(-930109303, 3), lg(-930109303, 3) % lg(1606982787, 925386547))
assertEquals(lg(-717668907, 16251), lg(-717668907, 16251) % lg(2079100937, 7825426))
assertEquals(lg(265990345, 3), lg(265990345, 3) % lg(-1140922127, -3108870))
assertEquals(lg(-1181318422, 1), lg(-1181318422, 1) % lg(1489652251, 75207246))
assertEquals(lg(380276439, 59), lg(380276439, 59) % lg(-1062351234, -3631372))
assertEquals(lg(1080382784, 7211), lg(1080382784, 7211) % lg(572850722, -139092025))
assertEquals(lg(2020323378, 316), lg(2020323378, 316) % lg(1716930349, -16333391))
assertEquals(lg(1302118364, 5), lg(1302118364, 5) % lg(-442067036, 1941456592))
assertEquals(lg(-641137972, 602), lg(-641137972, 602) % lg(1134212295, -135713760))
assertEquals(lg(-761172703, 499), lg(-761172703, 499) % lg(769981236, 12756336))
assertEquals(lg(1601268090, 610), lg(1601268090, 610) % lg(448513898, -160887452))
assertEquals(lg(-16483553, 0), lg(-16483553, 0) % lg(-1253549192, -1748027086))
assertEquals(lg(-1284021361, 241), lg(-1284021361, 241) % lg(13275221, -3818882))
assertEquals(lg(1499414278, 26), lg(1499414278, 26) % lg(570654893, -17498947))
assertEquals(lg(-368610421, 5074), lg(-368610421, 5074) % lg(685701351, 31070898))
assertEquals(lg(1200134796, 70), lg(1200134796, 70) % lg(1230376618, -2490370))
assertEquals(lg(1537764087, 64483), lg(1537764087, 64483) % lg(-1252591472, 66761881))
assertEquals(lg(-1981129198, 15), lg(-1981129198, 15) % lg(1937978150, 8201544))
assertEquals(lg(32422964, 200), lg(32422964, 200) % lg(2051327691, -20319622))
assertEquals(lg(1404616230, 30), lg(1404616230, 30) % lg(-748420073, -120320053))
assertEquals(lg(-1860381107, 38), lg(-1860381107, 38) % lg(392948122, 60098039))
assertEquals(lg(1050519262, 106431), lg(1050519262, 106431) % lg(361773491, -6329760))
assertEquals(lg(460136491, 1681770), lg(460136491, 1681770) % lg(1399049044, 759923035))
assertEquals(lg(2065599344, 11089), lg(2065599344, 11089) % lg(-465681057, 3484544))
assertEquals(lg(1849358428, 418531), lg(1849358428, 418531) % lg(1023666326, 3435570))
assertEquals(lg(1292603836, 80), lg(1292603836, 80) % lg(-1114872574, 250120091))
assertEquals(lg(1456627133, 194844), lg(1456627133, 194844) % lg(-1256385160, 59427917))
assertEquals(lg(-568179858, 160), lg(-568179858, 160) % lg(1142846538, 154324747))
assertEquals(lg(-2133580755, 203337), lg(-2133580755, 203337) % lg(111334842, 12695612))
assertEquals(lg(1961218705, 6687), lg(1961218705, 6687) % lg(-245612957, 134017780))
assertEquals(lg(335350966, 55096), lg(335350966, 55096) % lg(-1815119598, -120983980))
assertEquals(lg(-767561503, 211), lg(-767561503, 211) % lg(554589640, -7873602))
assertEquals(lg(1476687067, 3767), lg(1476687067, 3767) % lg(552659809, -753378142))
assertEquals(lg(-1107393223, 30), lg(-1107393223, 30) % lg(-78383575, -52663801))
assertEquals(lg(607313614, 2), lg(607313614, 2) % lg(-234099925, 59184919))
assertEquals(lg(-1542671184, 616882), lg(-1542671184, 616882) % lg(1370026838, -45628731))
assertEquals(lg(525616384, 1001), lg(525616384, 1001) % lg(1995646126, -11226360))
assertEquals(lg(2109958916, 21549), lg(2109958916, 21549) % lg(-419960245, -115959896))
assertEquals(lg(-450913111, 32140), lg(-450913111, 32140) % lg(-99267096, -3640047))
assertEquals(lg(1515870052, 198), lg(1515870052, 198) % lg(1415757861, -110282301))
assertEquals(lg(124639649, 865615), lg(124639649, 865615) % lg(-1354782388, 2569606))
assertEquals(lg(557119825, 7205), lg(557119825, 7205) % lg(683150209, -15864187))
assertEquals(lg(992846513, 1385110), lg(992846513, 1385110) % lg(1578961851, -8380578))
assertEquals(lg(1081385155, 4176), lg(1081385155, 4176) % lg(1892231070, 31130825))
assertEquals(lg(-738492748, 8), lg(-738492748, 8) % lg(-431212066, 687916944))
assertEquals(lg(-1448153936, 8101), lg(-1448153936, 8101) % lg(-584523654, -4814205))
assertEquals(lg(-713251055, 243), lg(-713251055, 243) % lg(261411225, 31444708))
assertEquals(lg(881178812, 47057), lg(881178812, 47057) % lg(823893049, -5940358))
assertEquals(lg(-506817388, 0), lg(-506817388, 0) % lg(-465610822, 10559551))
assertEquals(lg(-420315839, 112832), lg(-420315839, 112832) % lg(-686319219, -666166549))
// big % int32
assertEquals(lg(-3, -1), lg(-412174169, -319069709) % lg(-6, -1))
assertEquals(lg(464005, 0), lg(1634601702, 814446468) % lg(825883, 0))
assertEquals(lg(34559370, 0), lg(-1005992901, 2694218) % lg(108493743, 0))
assertEquals(lg(-286379, -1), lg(1534700309, -630528658) % lg(-506616, -1))
assertEquals(lg(-62, -1), lg(-456613426, -23298167) % lg(-206, -1))
assertEquals(lg(386945695, 0), lg(857770611, 2618490) % lg(1225551197, 0))
assertEquals(lg(270232, 0), lg(2127943654, 2768088) % lg(-291653, -1))
assertEquals(lg(277129, 0), lg(1085973072, 3470797) % lg(-29714535, -1))
assertEquals(lg(15, 0), lg(1536124828, 1268901218) % lg(-121, -1))
assertEquals(lg(1, 0), lg(371220141, 34588968) % lg(2, 0))
assertEquals(lg(46669, 0), lg(-1712997009, 187259899) % lg(129274, 0))
assertEquals(lg(-1508, -1), lg(586579000, -243530833) % lg(-31235, -1))
assertEquals(lg(0, 0), lg(1745775262, -400161972) % lg(-1, -1))
assertEquals(lg(-1680, -1), lg(-1564631310, -56487209) % lg(2626, 0))
assertEquals(lg(53, 0), lg(-1848745069, 11533547) % lg(59, 0))
assertEquals(lg(-1699972, -1), lg(-1415791920, -26215621) % lg(-2142359, -1))
assertEquals(lg(-200041, -1), lg(-481609933, -25891343) % lg(483607, 0))
assertEquals(lg(-13123232, -1), lg(-889674017, -4084771) % lg(428648085, 0))
assertEquals(lg(0, 0), lg(1587465684, -367383975) % lg(7, 0))
assertEquals(lg(-4528, -1), lg(811562260, -335104547) % lg(5502, 0))
assertEquals(lg(-71, -1), lg(2107357891, -10075787) % lg(110, 0))
assertEquals(lg(0, 0), lg(-1356326655, 5174156) % lg(-1, -1))
assertEquals(lg(7872112, 0), lg(-1794856776, 3059124) % lg(-29413816, -1))
assertEquals(lg(-37, -1), lg(-1118254374, -3629384) % lg(-85, -1))
assertEquals(lg(14227, 0), lg(288539563, 70814306) % lg(-14561, -1))
assertEquals(lg(-49, -1), lg(-719069745, -128562664) % lg(-256, -1))
assertEquals(lg(6101, 0), lg(1530955727, 15829469) % lg(195494, 0))
assertEquals(lg(-6, -1), lg(2144004402, -5408490) % lg(11, 0))
assertEquals(lg(-137624717, -1), lg(-1766192560, -17443468) % lg(-168087095, -1))
assertEquals(lg(-3592, -1), lg(-524619138, -371121095) % lg(4765, 0))
assertEquals(lg(4335, 0), lg(-1960083221, 176122524) % lg(-5564, -1))
assertEquals(lg(-271754, -1), lg(1528631102, -597885631) % lg(-413908, -1))
assertEquals(lg(-361112, -1), lg(-1513123614, -30582360) % lg(-496311, -1))
assertEquals(lg(-4, -1), lg(-1975522255, -46421733) % lg(29, 0))
assertEquals(lg(414436, 0), lg(-1715879325, 3072313) % lg(438221, 0))
assertEquals(lg(0, 0), lg(-1321015849, -300384564) % lg(1, 0))
assertEquals(lg(-454, -1), lg(-1088390706, -277354665) % lg(-1237, -1))
assertEquals(lg(586891857, 0), lg(-1012773943, 223943652) % lg(707359548, 0))
assertEquals(lg(2, 0), lg(1097288344, 26740237) % lg(-3, -1))
assertEquals(lg(-24053960, -1), lg(-1121404205, -87484234) % lg(80229261, 0))
assertEquals(lg(-79944815, -1), lg(-1503637931, -163703901) % lg(-983334452, -1))
assertEquals(lg(2600110, 0), lg(2012820970, 445991475) % lg(1035472980, 0))
assertEquals(lg(74, 0), lg(2015362538, 2985510) % lg(-148, -1))
assertEquals(lg(0, 0), lg(1764134228, 50881407) % lg(-1, -1))
assertEquals(lg(106, 0), lg(-523555853, 77167937) % lg(-563, -1))
assertEquals(lg(0, 0), lg(1531888651, -2389306) % lg(1, 0))
assertEquals(lg(659, 0), lg(-181277952, 32599207) % lg(-729, -1))
assertEquals(lg(968, 0), lg(223126732, 88838488) % lg(13378, 0))
assertEquals(lg(920991, 0), lg(670834629, 46037187) % lg(922370, 0))
assertEquals(lg(2462152, 0), lg(1098978850, 6541822) % lg(-8405198, -1))
// big % int53
assertEquals(lg(1057995305, 4748), lg(2008672965, 41566313) % lg(313991275, 18390))
assertEquals(lg(-1074209653, 18), lg(1922552561, 28139870) % lg(-2083633557, 19))
assertEquals(lg(1480601143, -11310), lg(843627074, -173776705) % lg(1451117493, 14364))
assertEquals(lg(-691687452, -38), lg(204865470, -6692402) % lg(-645190286, 413))
assertEquals(lg(-1218791457, -31), lg(952830559, -214594684) % lg(-1778162360, 378))
assertEquals(lg(-281609960, -1292), lg(1673740333, -69274846) % lg(-1549261605, 2390))
assertEquals(lg(-860426348, 1), lg(-1276804811, 367022678) % lg(-678111623, 11))
assertEquals(lg(-1244563205, -1264), lg(-1331527548, -33013551) % lg(-1975438267, 2961))
assertEquals(lg(-935830326, 135167), lg(1067523314, 72606174) % lg(-1716982106, 255179))
assertEquals(lg(-2025081444, -42140), lg(-937134490, -32649070) % lg(-804857990, 57507))
assertEquals(lg(85696931, 194), lg(108363299, 1224097478) % lg(1137551776, 281))
assertEquals(lg(-385517902, -5258), lg(-1965834834, -11053948) % lg(-942300324, 6487))
assertEquals(lg(-755355475, 2268), lg(-3151939, 171473802) % lg(-2071379940, 3914))
assertEquals(lg(-676865399, -663), lg(1465781759, -970108425) % lg(-1251607207, 3003))
assertEquals(lg(2042443783, -22321), lg(919308511, -1689158617) % lg(658566728, 36406))
assertEquals(lg(-903837593, 31415), lg(-418485001, 1000432592) % lg(-1653953022, 31957))
assertEquals(lg(496274972, -48207), lg(-880302655, -14116770) % lg(913871933, 118223))
assertEquals(lg(1210119082, -104892), lg(-525597278, -3790314) % lg(2133284776, 127083))
assertEquals(lg(473810731, -5), lg(-393124913, -28106221) % lg(958070140, 159))
assertEquals(lg(-1912903061, 25777), lg(6929245, 2749730) % lg(1462129294, 43237))
assertEquals(lg(1099532724, -19), lg(708024745, -15568245) % lg(1288198049, 56))
assertEquals(lg(920504149, 6836), lg(487601139, 13603229) % lg(723875593, 45021))
assertEquals(lg(1778080723, 29), lg(-2070321133, 115478389) % lg(-1799479616, 75))
assertEquals(lg(-720480381, 2735), lg(-307180735, 3049800) % lg(1043781053, 3319))
assertEquals(lg(1473972065, -1), lg(-1073877839, -6538577) % lg(-1408649838, 0))
assertEquals(lg(-1389255096, -200), lg(-1892822171, -1698321438) % lg(96164237, 514))
assertEquals(lg(857386403, 29656), lg(-674980011, 2764943) % lg(-445529419, 65125))
assertEquals(lg(-419043446, -22164), lg(2003347800, -46928389) % lg(368897711, 128159))
assertEquals(lg(-1599543668, -6569), lg(-1929871429, -241628283) % lg(202358381, 7645))
assertEquals(lg(581185953, 1), lg(419719197, 661188517) % lg(2112360098, 1))
assertEquals(lg(-1880704128, 171407), lg(1092830824, 1600823129) % lg(-1827462760, 172800))
assertEquals(lg(1210159480, -13), lg(-836779994, -27475595) % lg(-417527207, 16))
assertEquals(lg(807846066, 1), lg(-1759597755, 9157722) % lg(-987185779, 1))
assertEquals(lg(949995673, 1), lg(-1097231525, 20092165) % lg(1106421078, 1))
assertEquals(lg(-712450167, 7), lg(390678483, 3835040) % lg(1221250555, 14))
assertEquals(lg(1129531033, -4), lg(-284334384, -18425278) % lg(-1111448031, 6))
assertEquals(lg(2094997010, 3022), lg(-233961390, 53260849) % lg(-613558136, 3663))
assertEquals(lg(-496446555, 540290), lg(-3383211, 8039036) % lg(-1668680584, 749874))
assertEquals(lg(1280740603, -9472), lg(804358887, -189240235) % lg(179665302, 12347))
assertEquals(lg(2127427912, 6), lg(208769744, 280071599) % lg(-325433064, 14))
assertEquals(lg(-722136158, -1), lg(-1527711901, -51564742) % lg(-1019145455, 0))
assertEquals(lg(-1603688570, -2), lg(-159182038, -2145592347) % lg(-483720705, 15))
assertEquals(lg(-256578646, 177817), lg(1059926378, 477886379) % lg(924988992, 543468))
assertEquals(lg(1286157765, 80885), lg(-1800046387, 119696078) % lg(436524799, 94037))
assertEquals(lg(251450065, 19154), lg(-822280387, 44882065) % lg(-940828508, 22947))
assertEquals(lg(1310986115, 209), lg(1465101985, 269803551) % lg(-1953360551, 334))
assertEquals(lg(1436855439, -5), lg(-567675197, -8838663) % lg(1903221047, 6))
assertEquals(lg(296887390, -17), lg(689376065, -22622471) % lg(1534988921, 63))
assertEquals(lg(1577958450, -39), lg(-2017356377, -57717216) % lg(-1390284125, 42))
assertEquals(lg(661387374, 344542), lg(-128715878, 982583003) % lg(2004099318, 988167))
// big % big
assertEquals(lg(-320078007, 205603273), lg(-320078007, 205603273) % lg(2020227799, -360928021))
assertEquals(lg(408769930, -2221999), lg(-800732960, -371808530) % lg(744251542, -11199592))
assertEquals(lg(1575977183, -2441606), lg(-56774921, -32434115) % lg(1413374280, -2726592))
assertEquals(lg(-1897285736, 18894093), lg(1667937500, 228622683) % lg(-243248020, 69909529))
assertEquals(lg(-1333815518, 2097776), lg(-1333815518, 2097776) % lg(-1750106076, 18608702))
assertEquals(lg(-789967161, -4640836), lg(-162800691, -117885498) % lg(-709007774, 8711127))
assertEquals(lg(-1909427145, -2824029), lg(-1909427145, -2824029) % lg(2028036056, -660713154))
assertEquals(lg(14077923, 63046905), lg(14077923, 63046905) % lg(-688765214, 375445962))
assertEquals(lg(272760540, 19525127), lg(272760540, 19525127) % lg(-396955631, 848435537))
assertEquals(lg(-600396362, 406643261), lg(-600396362, 406643261) % lg(-1533973181, 491661310))
assertEquals(lg(1801834226, 200420454), lg(1801834226, 200420454) % lg(-1889418050, -328758068))
assertEquals(lg(361053022, 54544094), lg(1170836790, 510289402) % lg(202445942, 113936327))
assertEquals(lg(1369752396, -3152427), lg(-378923036, -1036580478) % lg(905093048, 5526353))
assertEquals(lg(1458911735, 21273958), lg(-2137034353, 1455139814) % lg(1665353214, 27574343))
assertEquals(lg(-1350216191, -3821167), lg(-1350216191, -3821167) % lg(-1333339390, -4746360))
assertEquals(lg(1166542449, -1370750), lg(-1289646201, -5193401) % lg(1838778646, -3822651))
assertEquals(lg(301867174, 5185218), lg(301867174, 5185218) % lg(157012848, -15464466))
assertEquals(lg(512572633, 48335882), lg(467711834, 155069651) % lg(-44860799, 106733768))
assertEquals(lg(1624269582, 11007763), lg(1624269582, 11007763) % lg(-158694824, -491219717))
assertEquals(lg(-1015519521, -163989350), lg(-1015519521, -163989350) % lg(1652525166, 530116116))
assertEquals(lg(-2127450406, -89864400), lg(2001612518, -452587333) % lg(1115217917, 90680733))
assertEquals(lg(-761803769, -6085789), lg(1039524645, -86121932) % lg(1131434363, 13339357))
assertEquals(lg(-1922291990, 6439098), lg(-1922291990, 6439098) % lg(-1083372307, -20634200))
assertEquals(lg(1508171882, 126457), lg(1408756974, 235847122) % lg(-1813277898, -9066180))
assertEquals(lg(-496706473, -2657930), lg(1121009342, -1533788016) % lg(-1724900447, -5821788))
assertEquals(lg(-1626361260, -113469353), lg(-1626361260, -113469353) % lg(1216987736, -817139415))
assertEquals(lg(-433139577, -182483493), lg(-433139577, -182483493) % lg(1019490766, -595625160))
assertEquals(lg(-1118452074, 1653764), lg(793542905, 198273616) % lg(-82759497, -2621599))
assertEquals(lg(-1199275184, 1262327), lg(425605214, 249789222) % lg(392156278, 6716943))
assertEquals(lg(213473729, 11660532), lg(213473729, 11660532) % lg(-547058106, 894811834))
assertEquals(lg(-1550227391, 2847368), lg(-1550227391, 2847368) % lg(-1996700003, 689370771))
assertEquals(lg(-1014778289, -3747071), lg(-144234222, -54239417) % lg(-1102770075, -7213193))
assertEquals(lg(524484467, 15124083), lg(524484467, 15124083) % lg(-1101379967, -39968226))
assertEquals(lg(-919997306, 2085072), lg(314758022, 5390195) % lg(-1234755328, -3305123))
assertEquals(lg(580679232, -10426812), lg(580679232, -10426812) % lg(-1964013803, -1738507605))
assertEquals(lg(225658926, -4189255), lg(1670083752, -254253193) % lg(722212413, -125031969))
assertEquals(lg(-495749254, -1833207), lg(-1744001445, -5443198) % lg(1248252191, 3609991))
assertEquals(lg(-1481543825, 608612), lg(-1786439869, 137339199) % lg(1821158508, 2909161))
assertEquals(lg(1026706952, -6267613), lg(1273422584, -284542935) % lg(1626032463, -17392208))
assertEquals(lg(-855876173, -4928311), lg(-513801887, -32580141) % lg(-342074286, 27651829))
assertEquals(lg(-1027906958, 55543678), lg(-1027906958, 55543678) % lg(-1936394792, 928937151))
assertEquals(lg(-1793811005, -17787029), lg(251585986, -50474191) % lg(-2045396991, 32687162))
assertEquals(lg(-356034186, -2235041), lg(66679938, -917589429) % lg(2124767660, -3454168))
assertEquals(lg(-924611099, -76507846), lg(-599564184, -209788131) % lg(-325046915, 133280284))
assertEquals(lg(838338995, -12983151), lg(838338995, -12983151) % lg(-842402530, 19411056))
assertEquals(lg(747658762, 18528439), lg(1444498155, 520850879) % lg(851271837, 23920116))
assertEquals(lg(-2028924578, -3124146), lg(2096765386, -117024114) % lg(-1726450785, -5694999))
assertEquals(lg(2056903464, -4954201), lg(-425905039, -180148939) % lg(-1397064581, -15926795))
assertEquals(lg(-2055992988, 596420), lg(-920215872, 219325473) % lg(1357686103, 54682263))
assertEquals(lg(1279110660, -10784541), lg(1279110660, -10784541) % lg(278869448, 758126792))
}
}
class RuntimeLongOldTest {
import RuntimeLong.fromDouble
def assertHexEquals(expected: String, l: RuntimeLong): Unit =
assertEquals(expected, l.toHexString)
def fromInt(x: Int): RuntimeLong = new RuntimeLong(x)
val maxInt = fromInt(Int.MaxValue)
val minInt = fromInt(Int.MinValue)
val one = fromInt(1)
val billion = fromInt(1000000000)
val `4503599627370510L` = new RuntimeLong(14, 0, 256)
val `613354684553L` = new RuntimeLong(639113, 146235, 0)
val `9863155567412L` = new RuntimeLong(2247476, 2351559, 0)
val `3632147899696541255L` = new RuntimeLong(1568327, 2954580, 206463)
val `7632147899696541255L` = new RuntimeLong(2616903, 1593290, 433837)
val minValue = new RuntimeLong(0, 0, 524288)
val minus1 = new RuntimeLong(4194303, 4194303, 1048575)
val minus2 = new RuntimeLong(4194302, 4194303, 1048575)
val minus3 = new RuntimeLong(4194301, 4194303, 1048575)
val minus4 = new RuntimeLong(4194300, 4194303, 1048575)
val minus15 = new RuntimeLong(4194289, 4194303, 1048575)
val minus16 = new RuntimeLong(4194288, 4194303, 1048575)
@Test def should_correctly_implement_negation(): Unit = {
assertHexEquals("fffffffffffffffb", -fromInt(5))
assertHexEquals("0", -fromInt(0))
assertHexEquals("80000000", -minInt)
}
@Test def should_correctly_implement_comparison(): Unit = {
assertEquals(true, fromInt(7) < fromInt(15))
assertEquals(false, fromInt(15) < fromInt(15))
assertEquals(true, fromInt(15) <= fromInt(15))
assertEquals(true, fromInt(14) <= fromInt(15))
assertEquals(false, fromInt(15) > fromInt(15))
assertEquals(false, fromInt(14) > fromInt(15))
assertEquals(true, fromInt(16) > fromInt(15))
assertEquals(true, fromInt(15) >= fromInt(15))
assertEquals(false, fromInt(14) >= fromInt(15))
assertEquals(true, fromInt(16) >= fromInt(15))
}
@Test def should_correctly_implement_addition(): Unit = {
assertHexEquals("16", fromInt(7) + fromInt(15))
assertHexEquals("fffffffe", maxInt + maxInt)
assertHexEquals("80000000", maxInt + one)
}
@Test def should_correctly_implement_subtraction(): Unit = {
assertHexEquals("fffffffffffffff8", fromInt(7) - fromInt(15))
assertHexEquals("0", maxInt - maxInt)
}
@Test def should_correctly_implement_multiplication(): Unit = {
assertHexEquals("69", fromInt(7) * fromInt(15))
assertHexEquals("ffffffffffffff97", fromInt(-7) * fromInt(15))
assertHexEquals("3fffffff00000001", maxInt * maxInt)
assertHexEquals("ffbfffffffffffc8", `4503599627370510L` * fromInt(-4))
}
@Test def should_correctly_implement_division(): Unit = {
assertHexEquals("0", fromInt(7) / fromInt(15))
assertHexEquals("4", fromInt(24) / fromInt(5))
assertHexEquals("fffffffffffffffc", fromInt(24) / fromInt(-5))
assertHexEquals("ffffffffe6666667", maxInt / fromInt(-5))
assertHexEquals("2", maxInt / billion)
assertHexEquals("2", (maxInt+one) / billion)
assertHexEquals("1", minValue / minValue)
assertHexEquals("8000000000000000", minValue / minus1)
assertHexEquals("4000000000000000", minValue / minus2)
assertHexEquals("2aaaaaaaaaaaaaaa", minValue / minus3)
assertHexEquals("2000000000000000", minValue / minus4)
assertHexEquals("888888888888888", minValue / minus15)
assertHexEquals("800000000000000", minValue / minus16)
assertHexEquals("0", `7632147899696541255L` / minValue)
assertHexEquals("961529ec0d5811b9", `7632147899696541255L` / minus1)
assertHexEquals("cb0a94f606ac08dd", `7632147899696541255L` / minus2)
assertHexEquals("dcb1b8a40472b093", `7632147899696541255L` / minus3)
assertHexEquals("e5854a7b0356046f", `7632147899696541255L` / minus4)
assertHexEquals("f8f05820cdb089b7", `7632147899696541255L` / minus15)
assertHexEquals("f961529ec0d5811c", `7632147899696541255L` / minus16)
}
@Test def should_correctly_implement_modulus(): Unit = {
assertHexEquals("7", fromInt(7) % fromInt(15))
assertHexEquals("4", fromInt(24) % fromInt(5))
assertHexEquals("4", fromInt(24) % fromInt(-5))
assertHexEquals("8ca6bff", maxInt % billion)
assertHexEquals("8ca6c00", (maxInt+one) % billion)
assertHexEquals("2", maxInt % fromInt(-5))
assertHexEquals("0", minValue % minValue)
assertHexEquals("0", minValue % minus1)
assertHexEquals("0", minValue % minus2)
assertHexEquals("fffffffffffffffe", minValue % minus3)
assertHexEquals("0", minValue % minus4)
assertHexEquals("fffffffffffffff8", minValue % minus15)
assertHexEquals("0", minValue % minus16)
assertHexEquals("69ead613f2a7ee47", `7632147899696541255L` % minValue)
assertHexEquals("0", `7632147899696541255L` % minus1)
assertHexEquals("1", `7632147899696541255L` % minus2)
assertHexEquals("0", `7632147899696541255L` % minus3)
assertHexEquals("3", `7632147899696541255L` % minus4)
assertHexEquals("0", `7632147899696541255L` % minus15)
assertHexEquals("7", `7632147899696541255L` % minus16)
}
@Test def should_correctly_implement_toString(): Unit = {
assertEquals("2147483647", maxInt.toString)
assertEquals("-50", fromInt(-50).toString)
assertEquals("-1000000000", fromInt(-1000000000).toString)
assertEquals("2147483648", (maxInt+one).toString)
assertEquals("-2147483648", minInt.toString)
}
@Test def should_correctly_implement_fromDouble(): Unit = {
assertHexEquals("4", fromDouble(4.5))
assertHexEquals("fffffffffffffffc", fromDouble(-4.5))
}
@Test def should_correctly_implement_toDouble(): Unit = {
assertEquals(5.0, fromInt(5).toDouble)
assertEquals(2147483648.0, (maxInt+one).toDouble)
}
@Test def should_correctly_implement_numberOfLeadingZeros(): Unit = {
assertEquals(64, fromInt(0).numberOfLeadingZeros)
assertEquals(63, fromInt(1).numberOfLeadingZeros)
assertEquals(0, fromInt(-1).numberOfLeadingZeros)
assertEquals(62, fromInt(2).numberOfLeadingZeros)
}
@Test def should_implement_hashCode_according_to_spec_in_j_l_Long(): Unit = {
assertEquals(0, fromInt(0).hashCode())
assertEquals(55, fromInt(55).hashCode())
assertEquals(11, fromInt(-12).hashCode())
assertEquals(10006548, fromInt(10006548).hashCode())
assertEquals(1098747, fromInt(-1098748).hashCode())
assertEquals(-825638905, `613354684553L`.hashCode())
assertEquals(1910653900, `9863155567412L`.hashCode())
assertEquals(1735398658, `3632147899696541255L`.hashCode())
assertEquals(-1689438124, `7632147899696541255L`.hashCode())
}
}
| lrytz/scala-js | test-suite/js/src/test/scala/org/scalajs/testsuite/jsinterop/RuntimeLongTest.scala | Scala | bsd-3-clause | 152,876 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.sql
import slamdata.Predef._
import argonaut._
import scalaz._, Scalaz._
final case class CIName(value: String) {
override def equals(other: Any) = other match {
case CIName(otherValue) => otherValue.toLowerCase === value.toLowerCase
case _ => false
}
override def hashCode: Int = value.toLowerCase.hashCode
}
object CIName {
implicit val equal: Equal[CIName] = Equal.equalA
implicit val shows: Show[CIName] = Show.shows(s => s.value)
implicit val codec: EncodeJson[CIName] = EncodeJson.jencode1(_.value)
}
| drostron/quasar | frontend/src/main/scala/quasar/sql/CIName.scala | Scala | apache-2.0 | 1,169 |
package concrete.runner
import concrete.BuildInfo
import cspom.StatisticsManager
trait ConcreteWriter {
var end: RunnerResult = Unfinished()
var lastSolution: Option[String] = None
def stats: StatisticsManager
def solution(solution: String, obj: Seq[(String, Any)]): Unit = {
lastSolution = Some(solution)
printSolution(solution, obj)
}
def printSolution(solution: String, obj: Seq[(String, Any)]): Unit
def error(e: Throwable): Unit
def disconnect(): Unit = disconnect(end)
def disconnect(status: RunnerResult): Unit
def version: String = s"${BuildInfo.version}.${BuildInfo.buildTime}"
}
| concrete-cp/concrete | src/main/scala/concrete/runner/ConcreteWriter.scala | Scala | lgpl-2.1 | 627 |
package net.magik6k.lxcadmin.panel
import net.magik6k.jliblxc.Lxc
import net.magik6k.jwwf.enums.Type
import net.magik6k.jwwf.util.Tab
import net.magik6k.jwwf.widgets.basic.panel.{TabbedPanel, NamedPanel, Panel, Row}
class LxcPanel extends Panel(12, 1) {
val filelist = Lxc.getContainers
filelist.foreach(println(_))
val systems = filelist
var row = new Row(2)
val containerList = systems.map(file => new Container(file))
var containers = new TabbedPanel(systems.length,
containerList.map(container => new Tab(new NamedPanel(container, "Container <b>"+container.name+"</b>", Type.INFO), container.name, Type.SUCCESS)).toArray:_*)
row.put(containers.asPanel(12))
put(row)
def refresh() {
containerList.foreach(_.refresh())
}
}
| magik6k/LxcAdmin | src/main/scala/net/magik6k/lxcadmin/panel/LxcPanel.scala | Scala | mit | 748 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import scala.collection.GenSeq
import scala.collection.GenMap
import scala.collection.GenSet
import scala.collection.GenIterable
import scala.collection.GenTraversable
import scala.collection.GenTraversableOnce
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import org.scalactic.Equality
import org.scalactic.TripleEquals._
import Matchers._
class ShouldTripleEqualsEqualitySpec extends Spec with NonImplicitAssertions {
object `the should === operator should use the appropriate Equality type class` {
def `for Any` {
() should === (())
() should !== (7)
implicit val e = new Equality[Unit] {
def areEqual(a: Unit, b: Any): Boolean = a != b
}
() should !== (())
() should === (7)
}
def `for String` {
"hi" should === ("hi")
"hi" should !== ("ho")
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
"hi" should !== ("hi")
"hi" should === ("ho")
}
def `for Numeric` {
3 should === (3)
3 should !== (4)
implicit val e = new Equality[Int] {
def areEqual(a: Int, b: Any): Boolean = a != b
}
3 should !== (3)
3 should === (4)
}
object `for Map` {
def `with default equality` {
Map("I" -> 1, "II" -> 2) should === (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should !== (Map("one" -> 1, "two" -> 2))
implicit val e = new Equality[GenMap[String, Int]] {
def areEqual(a: GenMap[String, Int], b: Any): Boolean = a != b
}
Map("I" -> 1, "II" -> 2) should === (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should !== (Map("one" -> 1, "two" -> 2))
}
def `with inferred GenMap equality` {
implicit def travEq[T <: GenMap[String, Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
Map("I" -> 1, "II" -> 2) should !== (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should === (Map("one" -> 1, "two" -> 2))
}
def `with specific Map equality` {
implicit val e = new Equality[Map[String, Int]] {
def areEqual(a: Map[String, Int], b: Any): Boolean = a != b
}
Map("I" -> 1, "II" -> 2) should !== (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should === (Map("one" -> 1, "two" -> 2))
}
def `with both GenMap and specific Map equality, though I don't know why this compiles` {
implicit val e = new Equality[GenMap[String, Int]] {
def areEqual(a: GenMap[String, Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[Map[String, Int]] { // Should pick the most specific one
def areEqual(a: Map[String, Int], b: Any): Boolean = a != b
}
Map("I" -> 1, "II" -> 2) should !== (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should === (Map("one" -> 1, "two" -> 2))
}
def `with both inferred GenMap and specific Map equality` {
implicit def travEq[T <: GenMap[String, Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
implicit val e2 = new Equality[Map[String, Int]] { // Should pick the most specific one
def areEqual(a: Map[String, Int], b: Any): Boolean = a != b
}
Map("I" -> 1, "II" -> 2) should !== (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should === (Map("one" -> 1, "two" -> 2))
}
}
object `for mutable.Map` {
def `with default equality` {
mutable.Map("I" -> 1, "II" -> 2) should === (mutable.Map("I" -> 1, "II" -> 2))
mutable.Map("I" -> 1, "II" -> 2) should !== (mutable.Map("one" -> 1, "two" -> 2))
implicit val e = new Equality[GenMap[String, Int]] {
def areEqual(a: GenMap[String, Int], b: Any): Boolean = a != b
}
mutable.Map("I" -> 1, "II" -> 2) should === (mutable.Map("I" -> 1, "II" -> 2))
mutable.Map("I" -> 1, "II" -> 2) should !== (mutable.Map("one" -> 1, "two" -> 2))
}
def `with inferred GenMap equality` {
implicit def travEq[T <: GenMap[String, Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
mutable.Map("I" -> 1, "II" -> 2) should !== (mutable.Map("I" -> 1, "II" -> 2))
mutable.Map("I" -> 1, "II" -> 2) should === (mutable.Map("one" -> 1, "two" -> 2))
}
def `with specific mutable.Map equality` {
implicit val e = new Equality[mutable.Map[String, Int]] {
def areEqual(a: mutable.Map[String, Int], b: Any): Boolean = a != b
}
mutable.Map("I" -> 1, "II" -> 2) should !== (mutable.Map("I" -> 1, "II" -> 2))
mutable.Map("I" -> 1, "II" -> 2) should === (mutable.Map("one" -> 1, "two" -> 2))
}
def `with both GenMap and specific mutable.Map equality, though I don't know why this compiles` {
implicit val e = new Equality[GenMap[String, Int]] {
def areEqual(a: GenMap[String, Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[mutable.Map[String, Int]] { // Should pick the most specific one
def areEqual(a: mutable.Map[String, Int], b: Any): Boolean = a != b
}
mutable.Map("I" -> 1, "II" -> 2) should !== (mutable.Map("I" -> 1, "II" -> 2))
mutable.Map("I" -> 1, "II" -> 2) should === (mutable.Map("one" -> 1, "two" -> 2))
}
def `with both inferred GenMap and specific mutable.Map equality` {
implicit def travEq[T <: GenMap[String, Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
implicit val e2 = new Equality[mutable.Map[String, Int]] { // Should pick the most specific one
def areEqual(a: mutable.Map[String, Int], b: Any): Boolean = a != b
}
mutable.Map("I" -> 1, "II" -> 2) should !== (mutable.Map("I" -> 1, "II" -> 2))
mutable.Map("I" -> 1, "II" -> 2) should === (mutable.Map("one" -> 1, "two" -> 2))
}
}
def `for AnyRef` {
case class Person(name: String)
Person("Joe") should === (Person("Joe"))
Person("Joe") should !== (Person("Sally"))
implicit val e = new Equality[Person] {
def areEqual(a: Person, b: Any): Boolean = a != b
}
Person("Joe") should !== (Person("Joe"))
Person("Joe") should === (Person("Sally"))
}
object `for Traversable` {
def `with default equality` {
Set(1, 2, 3) should === (Set(1, 2, 3))
Set(1, 2, 3) should !== (Set(1, 2, 4))
implicit val e = new Equality[GenTraversable[Int]] {
def areEqual(a: GenTraversable[Int], b: Any): Boolean = a != b
}
Set(1, 2, 3) should === (Set(1, 2, 3))
Set(1, 2, 3) should !== (Set(1, 2, 4))
}
def `with inferred GenTraversable equality` {
implicit def travEq[T <: GenTraversable[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
Set(1, 2, 3) should !== (Set(1, 2, 3))
Set(1, 2, 3) should === (Set(1, 2, 4))
}
def `with specific Traversable equality` {
implicit val e = new Equality[Set[Int]] {
def areEqual(a: Set[Int], b: Any): Boolean = a != b
}
Set(1, 2, 3) should !== (Set(1, 2, 3))
Set(1, 2, 3) should === (Set(1, 2, 4))
}
def `with both GenTraversable and specific Traversable equality` {
implicit val e = new Equality[GenTraversable[Int]] {
def areEqual(a: GenTraversable[Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[Set[Int]] { // Should pick the most specific one
def areEqual(a: Set[Int], b: Any): Boolean = a != b
}
Set(1, 2, 3) should !== (Set(1, 2, 3))
Set(1, 2, 3) should === (Set(1, 2, 4))
}
def `with both inferred GenTraversable and specific Traversable equality` {
implicit def travEq[T <: GenTraversable[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
implicit val e2 = new Equality[Set[Int]] { // Should pick the most specific one
def areEqual(a: Set[Int], b: Any): Boolean = a != b
}
Set(1, 2, 3) should !== (Set(1, 2, 3))
Set(1, 2, 3) should === (Set(1, 2, 4))
}
}
object `for mutable.Traversable` {
def `with default equality` {
mutable.Set(1, 2, 3) should === (mutable.Set(1, 2, 3))
mutable.Set(1, 2, 3) should !== (mutable.Set(1, 2, 4))
implicit val e = new Equality[GenTraversable[Int]] {
def areEqual(a: GenTraversable[Int], b: Any): Boolean = a != b
}
mutable.Set(1, 2, 3) should === (mutable.Set(1, 2, 3))
mutable.Set(1, 2, 3) should !== (mutable.Set(1, 2, 4))
}
def `with inferred GenTraversable equality` {
implicit def travEq[T <: GenTraversable[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
mutable.Set(1, 2, 3) should !== (mutable.Set(1, 2, 3))
mutable.Set(1, 2, 3) should === (mutable.Set(1, 2, 4))
}
def `with specific mutable.Traversable equality` {
implicit val e = new Equality[mutable.Set[Int]] {
def areEqual(a: mutable.Set[Int], b: Any): Boolean = a != b
}
mutable.Set(1, 2, 3) should !== (mutable.Set(1, 2, 3))
mutable.Set(1, 2, 3) should === (mutable.Set(1, 2, 4))
}
def `with both GenTraversable and specific Traversable equality` {
implicit val e = new Equality[GenTraversable[Int]] {
def areEqual(a: GenTraversable[Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[mutable.Set[Int]] { // Should pick the most specific one
def areEqual(a: mutable.Set[Int], b: Any): Boolean = a != b
}
mutable.Set(1, 2, 3) should !== (mutable.Set(1, 2, 3))
mutable.Set(1, 2, 3) should === (mutable.Set(1, 2, 4))
}
def `with both inferred GenTraversable and specific Traversable equality` {
implicit def travEq[T <: GenTraversable[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
implicit val e2 = new Equality[mutable.Set[Int]] { // Should pick the most specific one
def areEqual(a: mutable.Set[Int], b: Any): Boolean = a != b
}
mutable.Set(1, 2, 3) should !== (mutable.Set(1, 2, 3))
mutable.Set(1, 2, 3) should === (mutable.Set(1, 2, 4))
}
}
object `for Java Collection` {
val javaSet123: java.util.Set[Int] = new java.util.HashSet
javaSet123.add(1)
javaSet123.add(2)
javaSet123.add(3)
val javaSet124: java.util.Set[Int] = new java.util.HashSet
javaSet124.add(1)
javaSet124.add(2)
javaSet124.add(4)
def `with default equality` {
javaSet123 should === (javaSet123)
javaSet123 should !== (javaSet124)
implicit val e = new Equality[java.util.Collection[Int]] {
def areEqual(a: java.util.Collection[Int], b: Any): Boolean = a != b
}
javaSet123 should === (javaSet123)
javaSet123 should !== (javaSet124)
}
def `with inferred Collection equality` {
// implicit val e = new Equality[GenTraversable[Int]] { ... does not and should not compile
implicit def travEq[T <: java.util.Collection[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
javaSet123 should !== (javaSet123)
javaSet123 should === (javaSet124)
}
def `with specific Collection equality` {
implicit val e = new Equality[java.util.Set[Int]] {
def areEqual(a: java.util.Set[Int], b: Any): Boolean = a != b
}
javaSet123 should !== (javaSet123)
javaSet123 should === (javaSet124)
}
def `with both Collection and specific Collection equality` {
implicit val e = new Equality[java.util.Collection[Int]] {
def areEqual(a: java.util.Collection[Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[java.util.Set[Int]] { // Should pick the most specific one
def areEqual(a: java.util.Set[Int], b: Any): Boolean = a != b
}
javaSet123 should !== (javaSet123)
javaSet123 should === (javaSet124)
}
def `with both inferred Collection and specific Collection equality` {
implicit def travEq[T <: java.util.Collection[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
implicit val e2 = new Equality[java.util.Set[Int]] { // Should pick the most specific one
def areEqual(a: java.util.Set[Int], b: Any): Boolean = a != b
}
javaSet123 should !== (javaSet123)
javaSet123 should === (javaSet124)
}
}
object `for Java Map` {
val javaMap123: java.util.HashMap[String, Int] = new java.util.HashMap
javaMap123.put("one",1)
javaMap123.put("two", 2)
javaMap123.put("three", 3)
val javaMap124: java.util.HashMap[String, Int] = new java.util.HashMap
javaMap124.put("one",1)
javaMap124.put("two", 2)
javaMap124.put("four", 4)
def `with default equality` {
javaMap123 should === (javaMap123)
javaMap123 should !== (javaMap124)
implicit val e = new Equality[java.util.Map[String, Int]] {
def areEqual(a: java.util.Map[String, Int], b: Any): Boolean = a != b
}
javaMap123 should === (javaMap123)
javaMap123 should !== (javaMap124)
}
def `with inferred Map equality` {
implicit def travEq[T <: java.util.Map[String, Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
javaMap123 should !== (javaMap123)
javaMap123 should === (javaMap124)
}
def `with specific HashMap equality` {
implicit val e = new Equality[java.util.HashMap[String, Int]] {
def areEqual(a: java.util.HashMap[String, Int], b: Any): Boolean = a != b
}
javaMap123 should !== (javaMap123)
javaMap123 should === (javaMap124)
}
def `with both Map and specific HashMap equality` {
implicit val e = new Equality[java.util.Map[String, Int]] {
def areEqual(a: java.util.Map[String, Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[java.util.HashMap[String, Int]] { // Should pick this because it is an exact match
def areEqual(a: java.util.HashMap[String, Int], b: Any): Boolean = a != b
}
javaMap123 should !== (javaMap123)
javaMap123 should === (javaMap124)
}
def `with both inferred Map and specific HashMap equality` {
implicit def travEq[T <: java.util.Map[String, Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
implicit val e2 = new Equality[java.util.HashMap[String, Int]] { // Should pick the most specific one
def areEqual(a: java.util.HashMap[String, Int], b: Any): Boolean = a != b
}
javaMap123 should !== (javaMap123)
javaMap123 should === (javaMap124)
}
}
object `for Seq` {
def `with default equality` {
Vector(1, 2, 3) should === (Vector(1, 2, 3))
Vector(1, 2, 3) should !== (Vector(1, 2, 4))
}
def `with inferred GenSeq equality` {
implicit def travEq[T <: GenSeq[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
Vector(1, 2, 3) should !== (Vector(1, 2, 3))
Vector(1, 2, 3) should === (Vector(1, 2, 4))
}
def `with specific Seq equality` {
implicit val e = new Equality[Vector[Int]] {
def areEqual(a: Vector[Int], b: Any): Boolean = a != b
}
Vector(1, 2, 3) should !== (Vector(1, 2, 3))
Vector(1, 2, 3) should === (Vector(1, 2, 4))
}
def `with both GenSeq and specific Seq equality` {
implicit val e = new Equality[GenSeq[Int]] {
def areEqual(a: GenSeq[Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[Vector[Int]] { // Should pick the exact one
def areEqual(a: Vector[Int], b: Any): Boolean = a != b
}
Vector(1, 2, 3) should !== (Vector(1, 2, 3))
Vector(1, 2, 3) should === (Vector(1, 2, 4))
}
def `with both inferred GenSeq and specific Seq equality` {
implicit def travEq[T <: GenSeq[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a == b
}
implicit val e2 = new Equality[Vector[Int]] { // Should pick the exact one
def areEqual(a: Vector[Int], b: Any): Boolean = a != b
}
Vector(1, 2, 3) should !== (Vector(1, 2, 3))
Vector(1, 2, 3) should === (Vector(1, 2, 4))
}
}
object `for mutable.Seq` {
def `with default equality` {
ListBuffer(1, 2, 3) should === (ListBuffer(1, 2, 3))
ListBuffer(1, 2, 3) should !== (ListBuffer(1, 2, 4))
}
def `with inferred GenSeq equality` {
implicit def travEq[T <: GenSeq[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
ListBuffer(1, 2, 3) should !== (ListBuffer(1, 2, 3))
ListBuffer(1, 2, 3) should === (ListBuffer(1, 2, 4))
}
def `with specific Seq equality` {
implicit val e = new Equality[ListBuffer[Int]] {
def areEqual(a: ListBuffer[Int], b: Any): Boolean = a != b
}
ListBuffer(1, 2, 3) should !== (ListBuffer(1, 2, 3))
ListBuffer(1, 2, 3) should === (ListBuffer(1, 2, 4))
}
def `with both GenSeq and specific Seq equality` {
implicit val e = new Equality[GenSeq[Int]] {
def areEqual(a: GenSeq[Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[ListBuffer[Int]] { // Should pick the exact one
def areEqual(a: ListBuffer[Int], b: Any): Boolean = a != b
}
ListBuffer(1, 2, 3) should !== (ListBuffer(1, 2, 3))
ListBuffer(1, 2, 3) should === (ListBuffer(1, 2, 4))
}
def `with both inferred GenSeq and specific Seq equality` {
implicit def travEq[T <: GenSeq[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a == b
}
implicit val e2 = new Equality[ListBuffer[Int]] { // Should pick the exact one
def areEqual(a: ListBuffer[Int], b: Any): Boolean = a != b
}
ListBuffer(1, 2, 3) should !== (ListBuffer(1, 2, 3))
ListBuffer(1, 2, 3) should === (ListBuffer(1, 2, 4))
}
}
def `for Array` {
Array(1, 2, 3) should === (Array(1, 2, 3))
Array(1, 2, 3) should !== (Array(1, 2, 4))
implicit val e = new Equality[Array[Int]] {
def areEqual(a: Array[Int], b: Any): Boolean = a.deep != b.asInstanceOf[Array[Int]].deep
}
Array(1, 2, 3) should !== (Array(1, 2, 3))
Array(1, 2, 3) should === (Array(1, 2, 4))
}
object `for Java List` {
val javaList123: java.util.List[Int] = new java.util.ArrayList
javaList123.add(1)
javaList123.add(2)
javaList123.add(3)
val javaList124: java.util.List[Int] = new java.util.ArrayList
javaList124.add(1)
javaList124.add(2)
javaList124.add(4)
def `with default equality` {
javaList123 should === (javaList123)
javaList123 should !== (javaList124)
implicit val e = new Equality[java.util.Collection[Int]] {
def areEqual(a: java.util.Collection[Int], b: Any): Boolean = a != b
}
javaList123 should === (javaList123)
javaList123 should !== (javaList124)
}
def `with inferred java.util.Collection equality` {
implicit def travEq[T <: java.util.Collection[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a != b
}
javaList123 should !== (javaList123)
javaList123 should === (javaList124)
}
def `with specific java.util.List equality` {
implicit val e = new Equality[java.util.List[Int]] {
def areEqual(a: java.util.List[Int], b: Any): Boolean = a != b
}
javaList123 should !== (javaList123)
javaList123 should === (javaList124)
}
def `with both java.util.Collection and java.util.List equality` {
implicit val e = new Equality[java.util.Collection[Int]] {
def areEqual(a: java.util.Collection[Int], b: Any): Boolean = a == b
}
implicit val e2 = new Equality[java.util.List[Int]] { // Should pick the exact one
def areEqual(a: java.util.List[Int], b: Any): Boolean = a != b
}
javaList123 should !== (javaList123)
javaList123 should === (javaList124)
}
def `with both inferred java.util.List and specific java.util.List equality` {
implicit def travEq[T <: java.util.List[Int]] = new Equality[T] {
def areEqual(a: T, b: Any): Boolean = a == b
}
implicit val e2 = new Equality[java.util.List[Int]] { // Should pick the exact one
def areEqual(a: java.util.List[Int], b: Any): Boolean = a != b
}
javaList123 should !== (javaList123)
javaList123 should === (javaList124)
}
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/ShouldTripleEqualsEqualitySpec.scala | Scala | apache-2.0 | 22,111 |
package reactivemongo.api.collections
import scala.util.{ Failure, Success, Try }
import scala.concurrent.{ ExecutionContext, Future }
import reactivemongo.core.protocol.MongoWireVersion
import reactivemongo.core.errors.GenericDriverException
import reactivemongo.api.SerializationPack
import reactivemongo.api.commands.{
CommandCodecs,
LastError,
MultiBulkWriteResult,
ResolvedCollectionCommand,
WriteConcern,
WriteResult
}
/**
* @define writeConcernParam the [[https://docs.mongodb.com/manual/reference/write-concern/ writer concern]] to be used
* @define orderedParam the [[https://docs.mongodb.com/manual/reference/method/db.collection.insert/#perform-an-unordered-insert ordered]] behaviour
* @define bypassDocumentValidationParam the flag to bypass document validation during the operation
*/
trait InsertOps[P <: SerializationPack with Singleton] {
collection: GenericCollection[P] =>
private object InsertCommand
extends reactivemongo.api.commands.InsertCommand[collection.pack.type] {
val pack: collection.pack.type = collection.pack
}
/**
* @param ordered $orderedParam
* @param writeConcern $writeConcernParam
* @param bypassDocumentValidation $bypassDocumentValidationParam
*/
private[reactivemongo] final def prepareInsert(
ordered: Boolean,
writeConcern: WriteConcern,
bypassDocumentValidation: Boolean): InsertBuilder = {
if (ordered) {
new OrderedInsert(writeConcern, bypassDocumentValidation)
} else {
new UnorderedInsert(writeConcern, bypassDocumentValidation)
}
}
private type InsertCmd = ResolvedCollectionCommand[InsertCommand.Insert]
implicit private lazy val insertWriter: pack.Writer[InsertCmd] = {
val builder = pack.newBuilder
val writeWriteConcern = CommandCodecs.writeWriteConcern(pack)
val writeSession = CommandCodecs.writeSession(builder)
val session = collection.db.session
import builder.{ elementProducer => element }
pack.writer[InsertCmd] { insert =>
import insert.command
val documents = builder.array(command.head, command.tail)
val ordered = builder.boolean(command.ordered)
val elements = Seq.newBuilder[pack.ElementProducer]
elements ++= Seq[pack.ElementProducer](
element("insert", builder.string(insert.collection)),
element("ordered", ordered),
element("documents", documents),
element(
"bypassDocumentValidation",
builder.boolean(command.bypassDocumentValidation)))
session.foreach { s =>
elements ++= writeSession(s)
}
if (!session.exists(_.transaction.isSuccess)) {
// writeConcern is not allowed within a multi-statement transaction
// code=72
elements += element(
"writeConcern", writeWriteConcern(command.writeConcern))
}
builder.document(elements.result())
}
}
/** Builder for insert operations. */
sealed trait InsertBuilder {
//implicit protected def writer: pack.Writer[T]
@inline private def metadata = db.connectionState.metadata
/** The max BSON size, including the size of command envelope */
private lazy val maxBsonSize: Int = {
// Command envelope to compute accurate BSON size limit
val emptyDoc: pack.Document = pack.newBuilder.document(Seq.empty)
val emptyCmd = ResolvedCollectionCommand(
collection.name,
InsertCommand.Insert(
emptyDoc, Seq.empty[pack.Document], ordered, writeConcern, false))
val doc = pack.serialize(emptyCmd, insertWriter)
metadata.maxBsonSize - pack.bsonSize(doc) + pack.bsonSize(emptyDoc)
}
/** $orderedParam */
def ordered: Boolean
/** $writeConcernParam */
def writeConcern: WriteConcern
/** $bypassDocumentValidationParam (default: `false`) */
def bypassDocumentValidation: Boolean
protected def bulkRecover: Option[Exception => Future[WriteResult]]
/**
* Inserts a single document.
*
* {{{
* import scala.concurrent.ExecutionContext.Implicits.global
*
* import reactivemongo.api.bson.BSONDocument
* import reactivemongo.api.bson.collection.BSONCollection
*
* def insertOne(coll: BSONCollection, doc: BSONDocument) = {
* val insert = coll.insert(ordered = true)
*
* insert.one(doc)
* }
* }}}
*/
final def one[T](document: T)(implicit ec: ExecutionContext, writer: pack.Writer[T]): Future[WriteResult] = Future(pack.serialize(document, writer)).flatMap { single =>
execute(Seq(single))
}
/** Inserts many documents, according the ordered behaviour. */
/**
* [[https://docs.mongodb.com/manual/reference/method/db.collection.insertMany/ Inserts many documents]], according the ordered behaviour.
*
* {{{
* import scala.concurrent.ExecutionContext.Implicits.global
*
* import reactivemongo.api.bson.BSONDocument
* import reactivemongo.api.bson.collection.BSONCollection
*
* def insertMany(coll: BSONCollection, docs: Iterable[BSONDocument]) = {
* val insert = coll.insert(ordered = true)
*
* insert.many(docs) // Future[MultiBulkWriteResult]
* }
* }}}
*/
final def many[T](documents: Iterable[T])(implicit ec: ExecutionContext, writer: pack.Writer[T]): Future[MultiBulkWriteResult] = {
val bulkSz = metadata.maxBulkSize
val maxSz = maxBsonSize
for {
docs <- serialize(documents)
res <- {
val bulkProducer = BulkOps.bulks(
docs, maxSz, bulkSz) { pack.bsonSize(_) }
BulkOps.bulkApply[pack.Document, WriteResult](bulkProducer)({ bulk =>
execute(bulk.toSeq)
}, bulkRecover)
}
} yield MultiBulkWriteResult(res)
}
// ---
private def serialize[T](input: Iterable[T])(implicit ec: ExecutionContext, writer: pack.Writer[T]): Future[Iterable[pack.Document]] =
Future.sequence(input.map { v =>
Try(pack.serialize(v, writer)) match {
case Success(v) => Future.successful(v)
case Failure(e) => Future.failed[pack.Document](e)
}
})
implicit private val resultReader: pack.Reader[InsertCommand.InsertResult] =
CommandCodecs.defaultWriteResultReader(pack)
private final def execute(documents: Seq[pack.Document])(implicit ec: ExecutionContext): Future[WriteResult] = documents.headOption match {
case Some(head) => {
if (metadata.maxWireVersion >= MongoWireVersion.V26) {
val cmd = InsertCommand.Insert(
head, documents.tail, ordered, writeConcern,
bypassDocumentValidation)
runCommand(cmd, writePreference).flatMap { wr =>
val flattened = wr.flatten
if (!flattened.ok) {
// was ordered, with one doc => fail if has an error
Future.failed(WriteResult.lastError(flattened).
getOrElse[Exception](GenericDriverException(
s"fails to insert: $documents")))
} else Future.successful(wr)
}
} else { // Mongo < 2.6
Future.failed[WriteResult](GenericDriverException(
s"unsupported MongoDB version: $metadata"))
}
}
case _ => Future.successful(WriteResult.empty) // No doc to insert
}
}
// ---
private val orderedRecover = Option.empty[Exception => Future[WriteResult]]
private final class OrderedInsert(
val writeConcern: WriteConcern,
val bypassDocumentValidation: Boolean) extends InsertBuilder {
val ordered = true
val bulkRecover = orderedRecover
}
private val unorderedRecover: Option[Exception => Future[WriteResult]] =
Some[Exception => Future[WriteResult]] {
case lastError: WriteResult =>
Future.successful(lastError)
case cause => Future.successful(LastError(
ok = false,
errmsg = Option(cause.getMessage),
code = Option.empty,
lastOp = Some(2002), // InsertOp
n = 0,
singleShard = Option.empty[String],
updatedExisting = false,
upserted = Option.empty,
wnote = Option.empty[WriteConcern.W],
wtimeout = false,
waited = Option.empty[Int],
wtime = Option.empty[Int]))
}
private final class UnorderedInsert(
val writeConcern: WriteConcern,
val bypassDocumentValidation: Boolean) extends InsertBuilder {
val ordered = false
val bulkRecover = unorderedRecover
}
}
| cchantep/ReactiveMongo | driver/src/main/scala/api/collections/InsertOps.scala | Scala | apache-2.0 | 8,513 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import java.util
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.tf._
import com.intel.analytics.bigdl.serialization.Bigdl.{AttrValue, BigDLModule}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils._
import com.intel.analytics.bigdl.utils.serializer._
import com.intel.analytics.bigdl.utils.serializer.converters.DataConverter
import com.intel.analytics.bigdl.utils.tf.Tensorflow
import com.intel.analytics.bigdl.visualization.tensorboard.{FileWriter => TFFileWriter}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import scala.reflect.runtime.universe
import scala.language.existentials
import scala.collection.JavaConverters._
import org.tensorflow.framework.GraphDef
/**
* A graph container. The modules in the container are connected as a directed Graph. Each module
* can output one tensor or multiple tensors(as table). The edges between modules in the graph
* define how these tensors are passed. For example, if a module outputs two tensors, you can
* pass these two tensors together to its following module, or pass only one of them
* to its following module. If a tensor in the module output is connected to multiple modules, in
* the back propagation, the gradients from multiple connection will be accumulated. If multiple
* edges point to one module, the tensors from these edges will be stack as a table, then pass to
* that module. In the back propagation, the gradients will be splited based on how the input
* tensors stack.
*
* The graph container has multiple inputs and multiple outputs. The order of the input tensors
* should be same with the order of the input nodes when you construct the graph container. In the
* back propagation, the order of the gradients tensors should be the same with the order of the
* output nodes.
*
* If there's one output, the module output is a tensor. If there're multiple outputs, the module
* output is a table, which is actually an sequence of tensor. The order of the output tensors is
* same with the order of the output modules.
*
* All inputs should be able to connect to outputs through some paths in the graph. It is
* allowed that some successors of the inputs node are not connect to outputs. If so, these nodes
* will be excluded in the computation.
*
* @param inputs input nodes
* @param outputs output nodes
* @param variables an Array of tensor containing all the weights and biases of this graph,
* used when different nodes of this graph may share the same weight or bias.
* @tparam T Numeric type. Only support float/double now
*/
@SerialVersionUID(- 2896121321564992779L)
abstract class Graph[T: ClassTag](
val inputs : Seq[ModuleNode[T]],
private[bigdl] val outputs : Seq[ModuleNode[T]],
private[bigdl] val variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
)(implicit ev: TensorNumeric[T]) extends Container[Activity, Activity, T]{
/**
* For a multi-tensor output module, some output tensors may not contributed to the final forward
* result. So in the back propagation, the gradient on these positions are missing. And we use
* zero tensor to populate.
*
* @param output
* @param gradOutput
*/
protected def addZeroTensorToMissingGradOutput(output: Table, gradOutput: Table): Unit = {
var i = 0
while (i < output.length()) {
if (!gradOutput.contains(i + 1)) {
val tensor = output[Tensor[T]](i + 1)
val zero = Tensor(tensor.size())
gradOutput(i + 1) = zero
}
i = i + 1
}
}
private def calcSumTimesOfAllNodes(
timesOfAllNodes: Array[(AbstractModule[_ <: Activity, _ <: Activity, T], Long, Long)])
: (Long, Long) = {
var sumForward = 0L
var sumBackward = 0L
timesOfAllNodes.foreach(x => {
sumForward += x._2
sumBackward += x._3
})
(sumForward, sumBackward)
}
override def getTimes():
Array[(AbstractModule[_ <: Activity, _ <: Activity, T], Long, Long)] = {
val timesOfAllNodes = this.modules.flatMap(_.getTimes()).toArray
val (sumForward, sumBackward) = calcSumTimesOfAllNodes(timesOfAllNodes)
timesOfAllNodes ++ Array((this, this.forwardTime - sumForward, this.backwardTime - sumBackward))
}
override def parameters(): (Array[Tensor[T]], Array[Tensor[T]]) = {
variables match {
case None => super.parameters()
case Some((weights, gradients)) => (weights, gradients)
}
}
// todo: expand the graph
override def toGraph(startNodes: ModuleNode[T]*): Graph[T] = this
/**
* Return the corresponding node has the given name. If the given name doesn't match any node,
* NoSuchElementException will be thrown
* @param name
* @return
*/
def node(name: String): ModuleNode[T] = {
val matchNodes = forwardNodes.filter(_.element.getName() == name).toArray
if (matchNodes.length == 0) {
throw new NoSuchElementException(s"Can not find node with name $name")
} else {
return matchNodes.head
}
}
// Add a dummy output node, to get an one end forward graph. So the nodes that are not dependent
// by the outputs will be excluded
protected val dummyOutput = new ModuleNode[T](new Identity[T]())
outputs.foreach(_ -> dummyOutput)
protected val forwardGraph = dummyOutput.graph(reverse = true)
protected val forwardNodes = forwardGraph.DFS.toArray
populateModules()
// Check all inputs of the graph should be passed in
checkRoots
protected def populateModules(): Unit
// Check if the graph is correct
private def checkRoots: Unit = {
def duplicatedNames(names: Seq[String]): mutable.Set[String] = {
names.sortWith(_ < _)
val buffer = new mutable.HashSet[String]()
var i = 1
while(i < names.length) {
if (names(i) == names(i - 1)) buffer.add(names(i))
i += 1
}
buffer
}
require(forwardNodes.map(_.element.getName()).distinct.length == forwardNodes.length,
s"the name of node in the graph should be unique, but find duplicated name " +
s"${duplicatedNames(forwardNodes.map(_.element.getName())).mkString(", ")}")
val roots = forwardNodes.filter(_.prevNodes.size == 0)
.filterNot(_.element.isInstanceOf[WithoutInput])
.filterNot(_.element.isInstanceOf[ControlDependency[_]])
val realInputs = inputs.filterNot(_.element.isInstanceOf[WithoutInput])
require(roots.size == realInputs.length, s"There're ${realInputs.length} inputs, " +
s"but graph has ${roots.size} roots")
realInputs.foreach(n =>
require(roots.contains(n), "inputs and graph roots are not match")
)
}
protected var dummyOutputGrad: ModuleNode[T] = _
protected var backwardGraph: DirectedGraph[AbstractModule[Activity, Activity, T]] = _
protected var backwardNodes: Array[Node[AbstractModule[Activity, Activity, T]]] = _
// If the graph will generate gradInput for the input
private var isGradInputAvailable: Array[Boolean] = _
/**
* Generate backward graph and apply the stopGrad
*/
private[bigdl] def buildBackwardGraph(): this.type = {
// Clone the forward graph and reverse the edge
val gradGraph = forwardGraph.cloneGraph(reverseEdge = true)
dummyOutputGrad = gradGraph.source
gradGraph.DFS.filter(x => isStopGradient(x.element)).foreach(removeStopNodes(_))
backwardNodes = gradGraph.DFS
.filterNot(_.eq(dummyOutputGrad))
.filterNot(_.element.isInstanceOf[ControlDependency[_]]).toArray
val inputNames = inputs.map(_.element.getName()).toSet
val dummyBackwardEnd = Identity().inputs()
val backwardTargets = backwardNodes
.filter(n => (n.element.parameters() != null && n.element.parameters()._1.length != 0)
|| inputNames.contains(n.element.getName()))
backwardTargets.foreach(_ -> dummyBackwardEnd)
backwardGraph = dummyBackwardEnd.graph(true)
// Check if gradInput is empty for each input
isGradInputAvailable = inputs.map(_ => false).toArray
backwardGraph.DFS.foreach(curNode => {
inputs.zipWithIndex.map { case (n, i) =>
if (curNode.element.getName() == n.element.getName() && !isStopGradient(n.element)) {
isGradInputAvailable(i) = true
}
}
})
clearState()
this
}
private var stopGradientLayers: util.HashSet[String] = _
def getStopGradientLayers(): util.HashSet[String] = stopGradientLayers
/**
* whether stop propagating gradInput back
* @return
*/
protected def isStopGradient(module: AbstractModule[_ <: Activity, _ <: Activity, T]): Boolean = {
null != stopGradientLayers && stopGradientLayers.contains(module.getName())
}
/**
* stop the input gradient of layers that match the given ```names```
* their input gradient are not computed.
* And they will not contributed to the input gradient computation of
* layers that depend on them.
* @param names an array of layer names
* @return current graph model
*/
def stopGradient(names: Array[String]): this.type = {
if (stopGradientLayers == null) stopGradientLayers = new util.HashSet[String]()
names.foreach(name => {
val layer = this (name)
require(layer.isDefined, s"cannot find layer match ${name}")
stopGradientLayers.add(layer.get.getName())
})
buildBackwardGraph()
this
}
/**
* set an array of layers that match the given ```names``` to be "freezed",
* i.e. their parameters(weight/bias, if exists) are not changed in training process
* @param names an array of layer names
* @return current graph model
*/
def freeze(names: Array[String]): this.type = {
names.foreach(name => {
val layer = this (name)
require(layer.isDefined, s"cannot find layer match ${name}")
layer.get.setScaleW(0)
layer.get.setScaleB(0)
})
this
}
private[bigdl] def removeStopNodes(n: Node[_]): Unit = {
val nodes = n.nextNodes
n.removeNextEdges()
nodes.filter(_.prevNodes.length == 0).foreach(removeStopNodes(_))
}
protected def getInput(
node: Node[AbstractModule[Activity, Activity, T]],
input: Activity
): Activity = {
if (inputs.length == 1) {
require(inputs(0).eq(node), "input node is not in the input list")
input
} else {
val i = inputs.indexOf(node)
require(i != -1, "input node is not in the input list")
input.toTable[Tensor[T]](i + 1)
}
}
protected def findInput(node: ModuleNode[T], input: Activity): Activity = {
if (node.element.isInstanceOf[WithoutInput]) return null
val nodeInput = if (node.prevNodes.isEmpty) {
getInput(node, input)
} else {
val prevActivities = node.prevNodesAndEdges
.filterNot(n => n._1.element.isInstanceOf[ControlDependency[T]])
.map(n => {
n._2.fromIndex match {
case Some(i) =>
if (n._1.element.output == null || (i == 1 && n._1.element.output.isTensor)) {
n._1.element.output
} else {
n._1.element.output.toTable.apply[Activity](i)
}
case None => n._1.element.output
}
})
if (prevActivities.length == 1) {
prevActivities.head
} else {
T.seq(prevActivities)
}
}
nodeInput
}
protected def findGradOutput(curNode: ModuleNode[T], gradOutput: Activity): Activity = {
var curGradOutput : Activity = if (curNode.eq(dummyOutputGrad)) gradOutput else null
curNode.prevNodesAndEdges.filterNot(n => n._1.element.isInstanceOf[ControlDependency[T]])
.foreach(n => {
val otherActivity = if (n._1.element.gradInput.isTensor || n._1.nextEdges.length == 1) {
n._1.element.gradInput
} else {
val index = n._1.nextEdges.indexOf(n._2) + 1
n._1.element.gradInput.toTable.apply[Activity](index)
}
n._2.fromIndex match {
case Some(i) =>
if (i == 1 && curNode.element.output.isTensor) {
curGradOutput = accActivity(curGradOutput, otherActivity)
} else {
if (curNode.element.output.isTable && curGradOutput == null) {
curGradOutput = T()
}
val curActivity = curGradOutput.toTable.getOrElse[Activity](i, null)
curGradOutput.toTable(i) = accActivity(curActivity, otherActivity)
}
case None =>
curGradOutput = accActivity(curGradOutput, otherActivity)
}
})
if (curNode.element.output.isTable) {
addZeroTensorToMissingGradOutput(curNode.element.output.toTable, curGradOutput.toTable)
}
curGradOutput
}
protected def fetchModelGradInput(): Activity = {
if (inputs.length == 1) {
if (isGradInputAvailable.head) {
inputs.head.element.gradInput
} else {
Activity.emptyGradInput(this.getName())
}
} else {
var i = 0
T.seq(inputs.zipWithIndex.map{ case(n, i) =>
if (isGradInputAvailable(i)) {
n.element.gradInput
} else {
Activity.emptyGradInput(this.getName())
}
})
}
}
override def reset(): Unit = {
if (null != stopGradientLayers) stopGradientLayers.clear()
unFreeze()
buildBackwardGraph()
}
/**
* Get forward executions, the dummy node will be filtered.
*
* This method will output an unsorted executions.
* @return
*/
def getForwardExecutions(): Array[Node[AbstractModule[Activity, Activity, T]]] = {
forwardNodes.filterNot(_.eq(dummyOutput))
}
/**
* Get forward executions, the dummy nodes and control dependency nodes will be filtered.
*
* This method will output a sorted executions. If the graph contains loop, it will throw an
* exception
* @return
*/
def getSortedForwardExecutions(): Array[ModuleNode[T]] = {
forwardGraph.topologySort
// todo: convert control dep node to edge
.filterNot(_.element.isInstanceOf[ControlDependency[T]]).reverse
.filter(n => !n.eq(dummyOutput))
}
@inline
protected def accActivity(activity: Activity, other: Activity): Activity = {
if (activity == null) {
other
} else {
if (other.isTensor) {
require(activity.isTensor, "Cannot add a table to a tensor")
activity.toTensor[T].add(other.toTensor[T])
} else {
// if 'activity' and 'other' are both table, we need to merge 'other' to 'activity'
// if 'other' and 'activity' both contains the index, update 'activity' by sum
// if 'other' contains the index while 'activity' does not,
// just insert the corresponding tensor of 'other' to 'activity'
val actTable = activity.toTable
val otherTable = other.toTable
otherTable.keySet.foreach(index => {
if (actTable.contains(index)) {
accActivity(actTable[Activity](index), otherTable[Activity](index))
} else {
actTable.insert(index.asInstanceOf[Int], otherTable(index))
}
})
actTable
}
}
}
/**
* Save current model graph to a folder, which can be display in tensorboard by running
* tensorboard --logdir logPath
* @param logPath
* @param backward Draw backward graph instead of forward
* @return
*/
def saveGraphTopology(logPath: String, backward: Boolean = false): this.type = {
val writer = new TFFileWriter(logPath)
val graphBuilder = GraphDef.newBuilder()
val nodes = if (backward) {
backwardNodes.filter(n => !n.eq(dummyOutputGrad))
} else {
forwardNodes.filter(n => !n.eq(dummyOutput))
}
nodes.map(m => {
val nodeDef = Tensorflow.bigdlModule(m.element, m.prevNodes.map(_.element.getName()).asJava)
graphBuilder.addNode(nodeDef)
})
writer.addGraphDef(graphBuilder.build())
writer.close()
this
}
/**
* Clear the original module and reset with module in the graph
*/
def resetModules(): Unit = {
modules.clear()
modules.appendAll(forwardGraph.DFS.toArray
.filterNot(_.element.isInstanceOf[ControlDependency[T]])
.filter(n => !n.eq(dummyOutput)).map(_.element)
// Some tests compare the paramerters between sequential and graph,add a reverse makes
// it's eaiser to compare
.reverse
)
}
}
object Graph extends GraphSerializable {
/**
* Node for graph container. The module should have a tensor/table input while a tensor output
* @tparam T
*/
type ModuleNode[T] = Node[AbstractModule[Activity, Activity, T]]
/**
* Build multiple inputs, multiple outputs graph container.
* @param input input node
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](
input: Array[ModuleNode[T]],
output: Array[ModuleNode[T]],
variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
)(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](input, output, variables)
}
def apply[T: ClassTag](preprocessor: Module[T], trainable: Module[T])
(implicit ev: TensorNumeric[T]): Graph[T] = {
val preprocessorNode = preprocessor.inputs()
val stopGradients = Identity[T]().inputs(preprocessorNode)
val trainableNode = trainable.inputs(stopGradients)
val graph = apply[T](preprocessorNode, trainableNode)
graph.stopGradient(Array(stopGradients.element.getName()))
graph
}
private[bigdl] def dynamic[T: ClassTag](
input : Array[ModuleNode[T]],
output : Array[ModuleNode[T]],
variables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None,
generateBackward: Boolean = true)(implicit ev: TensorNumeric[T]): Graph[T] = {
new DynamicGraph[T](input, output, variables, generateBackward)
}
/**
* Build a single input, multiple outputs graph container
* @param input input node
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input: ModuleNode[T], output: Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](Seq(input), output)
}
private[bigdl] def dynamic[T: ClassTag](input : ModuleNode[T], output : Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new DynamicGraph[T](Array(input), output, None, true)
}
/**
* Build a multiple inputs, single output graph container
* @param input input nodes
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](input: Array[ModuleNode[T]], output: ModuleNode[T])
(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](input, Seq(output))
}
private[bigdl] def dynamic[T: ClassTag](input : Array[ModuleNode[T]], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new DynamicGraph[T](input, Array(output), None, true)
}
/**
* Build a single input, single output graph container
* @param input input nodes
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input: ModuleNode[T], output: ModuleNode[T])
(implicit ev: TensorNumeric[T]): Graph[T] = {
new StaticGraph[T](Seq(input), Seq(output))
}
private[bigdl] def dynamic[T: ClassTag](input : ModuleNode[T], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Graph[T] = {
new DynamicGraph[T](Array(input), Array(output), None, true)
}
}
trait GraphSerializable extends ContainerSerializable {
private[bigdl] def prepareLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]) = {
val module = context.bigdlModule
val subModules = module.getSubModulesList.asScala
val attributes = module.getAttrMap
val inputNames = new ArrayBuffer[String]
val outputNames = new ArrayBuffer[String]
DataConverter.getAttributeValue(context, attributes.get("inputNames"))
.asInstanceOf[Array[String]].map(name => inputNames.append(name))
DataConverter.getAttributeValue(context, attributes.get("outputNames"))
.asInstanceOf[Array[String]].map(name => outputNames.append(name))
val inputs = new ArrayBuffer[ModuleNode[T]]
val outputs = new ArrayBuffer[ModuleNode[T]]
// layer name to layer node mapping
val layerMap = new mutable.HashMap[String, (ModuleNode[T], Seq[String])]()
subModules.foreach(subModule => {
val bigDLModule = ModuleSerializer.load(DeserializeContext(subModule,
context.storages, context.storageType))
val moduleNode = bigDLModule.module match {
case controlOps: ControlOps[T] => createControlNode(controlOps)
case _ => new ModuleNode[T](bigDLModule.module)
}
val preNodes = bigDLModule.pre
layerMap(bigDLModule.module.getName) = (moduleNode, preNodes)
})
layerMap.values.foreach(moduleNode => {
val edges = DataConverter.getAttributeValue(context,
attributes.get(s"${moduleNode._1.element.getName}_edges")).
asInstanceOf[mutable.HashMap[String, mutable.HashMap[String, Int]]]
val edgeMap = edges.get(moduleNode._1.element.getName).get
moduleNode._2.foreach(pre => {
if (layerMap.contains(pre)) {
val edge: Edge = edgeMap.get(pre).get match {
case -1 => Edge()
case index: Int => Edge(index)
}
layerMap(pre)._1.add(moduleNode._1, edge)
}
})
})
inputNames.foreach(inputName => inputs.append(layerMap(inputName)._1))
outputNames.foreach(outputName => outputs.append(layerMap(outputName)._1))
var sharedVariables: Option[(Array[Tensor[T]], Array[Tensor[T]])] = None
if (attributes.containsKey("sharedWeight") && attributes.containsKey("sharedBias")) {
val weights = attributes.get("sharedWeight")
val biases = attributes.get("sharedBias")
val weightArray = DataConverter.getAttributeValue(context, weights)
.asInstanceOf[Array[Tensor[T]]]
val biasArray = DataConverter.getAttributeValue(context, biases)
.asInstanceOf[Array[Tensor[T]]]
sharedVariables = Some(weightArray, biasArray)
}
val generateBackwardValue = attributes.get("generateBackward")
(module, inputs, outputs, generateBackwardValue, sharedVariables)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]): AbstractModule[Activity, Activity, T] = {
val (module, inputs, outputs, generateBackwardValue, sharedVariables) =
prepareLoadModule(context)
val attributes = module.getAttrMap
val graph = if (generateBackwardValue != null) {
val generateBackward = DataConverter.getAttributeValue(context, generateBackwardValue)
.asInstanceOf[Boolean]
Graph.dynamic[T](inputs.toArray, outputs.toArray, sharedVariables, generateBackward)
} else {
new StaticGraph[T](inputs, outputs, sharedVariables, false)
}
var serializedStopGradientLayers : Array[String] = null
// this is to keep backward compatible
if (attributes.containsKey("stopGradientLayers")) {
val stopGradientLayers = attributes.get("stopGradientLayers")
serializedStopGradientLayers = DataConverter.
getAttributeValue(context, stopGradientLayers).asInstanceOf[Array[String]]
}
if (serializedStopGradientLayers != null) {
graph.stopGradient(serializedStopGradientLayers)
}
graph
}
private def createControlNode[T: ClassTag](controlOps: ControlOps[T]): ModuleNode[T] = {
controlOps match {
case switchOps: SwitchOps[T] => new SwitchControlNode[Module[T]](switchOps)
case mergeOps: MergeOps[T] => new MergeControlNode[Module[T]](mergeOps)
case _ => new Node[Module[T]](controlOps)
}
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
graphBuilder: BigDLModule.Builder)
(implicit ev: TensorNumeric[T]): Unit = {
val module = context.moduleData
module.next.foreach(_ => graphBuilder.addAllPreModules(_))
module.pre.foreach(_ => graphBuilder.addAllNextModules(_))
val graph = module.module.asInstanceOf[Graph[T]]
val inputsNames = graph.inputs.map(_.element.getName).toArray
val outputsNames = graph.outputs.map(_.element.getName).toArray
graph.getForwardExecutions.foreach(execution => {
val edgeMap = new mutable.HashMap[String, mutable.Map[String, Int]]
val preNodesAndEdges = execution.prevNodesAndEdges
val preNodes = preNodesAndEdges.map(_._1.element.getName)
val nextNodes = preNodesAndEdges.map(_._1.element.getName)
val currNode = execution.element
.asInstanceOf[AbstractModule[Activity, Activity, T]]
val subModel = ModuleSerializer.serialize(SerializeContext(
ModuleData(currNode, preNodes, nextNodes), context.storages, context.storageType))
// add edges
val preNodeEdges = new mutable.HashMap[String, Int]()
preNodesAndEdges.foreach(pre => {
val preNodeName = pre._1.element.getName
val preEdgeIndex = pre._2.fromIndex match {
case Some(i) => i
case None => -1
}
preNodeEdges(preNodeName) = preEdgeIndex
})
edgeMap(execution.element.getName) = preNodeEdges
val attriBulder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, attriBulder, edgeMap)
graphBuilder.putAttr(s"${execution.element.getName}_edges", attriBulder.build)
graphBuilder.addSubModules(subModel.bigDLModule)
})
if (graph.variables.isDefined) {
val (weights, bias) = graph.variables.get
val weightAttrBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, weightAttrBuilder, weights,
universe.typeOf[Array[Tensor[_ <: scala.Any]]])
graphBuilder.putAttr("sharedWeight", weightAttrBuilder.build)
val biasAttrBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, biasAttrBuilder, bias,
universe.typeOf[Array[Tensor[_ <: scala.Any]]])
graphBuilder.putAttr("sharedBias", biasAttrBuilder.build)
}
val inputNamesAttrBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, inputNamesAttrBuilder,
inputsNames, universe.typeOf[Array[String]])
graphBuilder.putAttr("inputNames", inputNamesAttrBuilder.build)
val outputNamesBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, outputNamesBuilder,
outputsNames, universe.typeOf[Array[String]])
graphBuilder.putAttr("outputNames", outputNamesBuilder.build)
if (graph.isInstanceOf[DynamicGraph[_]]) {
val generateBackwardBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, generateBackwardBuilder,
graph.asInstanceOf[DynamicGraph[_]].generateBackward, universe.typeOf[Boolean])
graphBuilder.putAttr("generateBackward", generateBackwardBuilder.build)
}
val stopGradientLayers = graph.getStopGradientLayers
if (stopGradientLayers != null && stopGradientLayers.size > 0) {
val stopGradientLayersBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, stopGradientLayersBuilder,
stopGradientLayers.toArray(new Array[String](stopGradientLayers.size)),
universe.typeOf[Array[String]])
graphBuilder.putAttr("stopGradientLayers", stopGradientLayersBuilder.build)
}
}
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Graph.scala | Scala | apache-2.0 | 28,254 |
package test.ch.bsisa.hyperbird.util
import org.specs2.mutable._
/**
* Base parent test class to have some test constants
* or utilities defined in a single place.
*
* @author Patrick Refondini
*/
class BaseSerialisationSpec extends Specification {
val TestResourcesDir = "./test/resources/"
val TestResultsDir = "./target/test/results/"
// Check whether TestResultsDir exists and creates it otherwise
val testResultDir = new java.io.File(TestResultsDir)
if (!testResultDir.exists()) { testResultDir.mkdirs() }
}
| bsisa/hb-api | test/test/ch/bsisa/hyperbird/util/BaseSerialisationSpec.scala | Scala | gpl-2.0 | 534 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.tensorflow.syntax
trait Syntax extends ScioContextSyntax with SCollectionSyntax with FileStorageSyntax
| spotify/scio | scio-tensorflow/src/main/scala/com/spotify/scio/tensorflow/syntax/Syntax.scala | Scala | apache-2.0 | 729 |
package frdomain.ch3
package repository
import util.Try
trait Repository[A, IdType] {
def query(id: IdType): Try[Option[A]]
def store(a: A): Try[A]
}
| debasishg/frdomain | src/main/scala/frdomain/ch3/repository/Repository.scala | Scala | apache-2.0 | 156 |
package lv.ddgatve.screenscrapers.web
import scala.io.Source
import scala.xml.XML
import scala.util.matching.Regex
import java.security.MessageDigest
import java.io.File
import org.apache.commons.io.FileUtils
/**
* uniqueExtractors - names, their regexes and group numbers that represent single fields in the document
* tableExtractor - regex to extract the main data table - it matches the whole expression
* tableFields - all the column names (i.e. names for TDs in their natural order)
* tidyTable - regexes, how many times to replace and replacement strings.
*
*/
case class Downloader(cachePath: String) {
def getMD5String(arg: String): String = {
val hex = Array('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F')
val bytes = MessageDigest.getInstance("MD5").digest(arg.getBytes)
val posBytes = bytes map (x => if (x < 0) x + 256 else x)
val result = bytes map (x => List(hex((x & 240) / 16), hex(x & 15)))
new String(result.flatten)
}
/**
* url - which url to download
* return value is the full string stored at the URL
*/
def download(url: String, fromWeb: Boolean): String = {
if (fromWeb) {
println("Downloader.download url=" + url)
Thread.sleep(1000)
val html = Source.fromURL(url)
val s = html.mkString
val fName = getMD5String(url) + ".html"
FileUtils.writeStringToFile(new File(cachePath, fName), s, "UTF-8")
s
} else {
val file = new File(cachePath, getMD5String(url) + ".html")
FileUtils.readFileToString(file)
}
}
def download(url: String): String = {
val file = new File(cachePath, getMD5String(url) + ".html")
download(url, !file.exists())
}
} | kapsitis/ddgatve-screenscrapers | src/main/scala/lv/ddgatve/screenscrapers/web/Downloader.scala | Scala | cc0-1.0 | 1,721 |
package scalanlp.optimize
import scalanlp.stats.distributions.Rand
/**
* A diff function that supports subsets of the data. By default it evaluates on all the data
*/
trait BatchDiffFunction[T] extends DiffFunction[T] with ((T,IndexedSeq[Int])=>Double) { outer =>
/**
* Calculates the gradient of the function on a subset of the data
*/
def gradientAt(x:T, batch: IndexedSeq[Int]) : T = calculate(x,batch)._2;
/**
* Calculates the value of the function on a subset of the data
*/
def valueAt(x:T, batch: IndexedSeq[Int]) : Double = calculate(x,batch)._1
/**
* Calculates the value and gradient of the function on a subset of the data;
*/
def calculate(x:T, batch: IndexedSeq[Int]): (Double,T)
override def calculate(x:T):(Double,T) = calculate(x,fullRange);
override def valueAt(x:T):Double = valueAt(x,fullRange)
override def gradientAt(x:T):T = gradientAt(x,fullRange)
def apply(x:T, batch:IndexedSeq[Int]) = valueAt(x,batch);
/**
* The full size of the data
*/
def fullRange: IndexedSeq[Int];
def withRandomBatches(size: Int):StochasticDiffFunction[T] = new StochasticDiffFunction[T] {
val rand = Rand.subsetsOfSize(fullRange,size)
def calculate(x: T) = outer.calculate(x, rand.get)
}
def withScanningBatches(size: Int):StochasticDiffFunction[T] = new StochasticDiffFunction[T] {
var lastStop = 0
def nextBatch = synchronized {
val start = lastStop
lastStop += size
lastStop %= fullRange.size
Array.tabulate(size)(i => fullRange((i+start)%fullRange.size))
}
def calculate(x: T) = outer.calculate(x, nextBatch)
}
} | MLnick/scalanlp-core | learn/src/main/scala/scalanlp/optimize/BatchDiffFunction.scala | Scala | apache-2.0 | 1,620 |
/**
* Copyright 2015 Devon Miller
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im
package vdom
import scala.concurrent.duration._
import scala.scalajs.js._
import scala.scalajs.js.JSApp
import scala.scalajs.js.timers._
import scala.concurrent.Future
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import annotation.JSExportDescendentObjects
import scala.scalajs.js.UndefOr.any2undefOrA
import scala.scalajs.js.UndefOr.undefOr2ops
import scala.language._
import backend.dom.DOMBackend._;
import SVGAttributes._
import UIEvents._
import VNode._
import HTML5Attributes._
import Styles._
import events._
import Handler.Implicits._
import _root_.org.scalajs.{ dom => d }
import d.document
object Test extends JSApp {
def main(): Unit = {
println("test of scala-vdom library")
val vdom = document.getElementById("vdomtest")
if (vdom != null)
vdomTest()
val delegate = document.getElementById("delegatetest")
if (delegate != null)
delegateTest()
val todo = document.getElementById("todo")
if (todo != null)
todotest(todo)
}
var _idcounter: Int = -1
def idcounter = {
_idcounter += 1
_idcounter
}
case class ToDo(id: Int, content: String)
val todoDb: collection.mutable.ListBuffer[ToDo] = collection.mutable.ListBuffer()
var todoNode: VNode = empty
/** Render todo vnode */
def renderTodo(root: d.Node): VNode = {
tag("div",
tag("input", Seq(keyup ~~> { (e: d.Event, t: d.EventTarget) =>
{
val kevent = e.asInstanceOf[d.KeyboardEvent]
if (kevent.keyCode == d.ext.KeyCode.Enter) {
println(s"$e, $t")
val todoText = t.asInstanceOf[d.raw.HTMLInputElement].value
if (todoText.length > 0) {
todoDb += ToDo(idcounter, todoText)
val previousTodoNode = todoNode
todoNode = renderTodo(root)
println("rendering new list")
println(s"previous: $previousTodoNode\\nnew: $todoNode")
val p = diff(previousTodoNode, todoNode)
//println(s"$p")
val ioaction: IOAction[_] = p(root)
//println(s"$ioaction\\n")
run(ioaction)
}
}
}
true
})),
tag("ul",
todoDb.map(i =>
tag("li", text(i.content))): _*))
}
/**
* Ubiquitous TODO application.
*/
def todotest(root: d.Node) = {
println("todo test")
Seq(ToDo(idcounter, "Get up in the morning"), ToDo(idcounter, "Eat breakfast")).foreach(todoDb += _)
todoNode = renderTodo(root)
run(diff(empty, todoNode)(root))
}
def delegateTest() = {
println("starting delegate test")
val test1: UndefOr[d.Element] = document.getElementById("test1target")
test1.foreach { el =>
println("attaching test1target click handler")
var test1counter = 0
val del = Delegate()
// add a handler
del.on("click", (event, node) =>
{
println(s"test1target clicked $test1counter")
test1counter += 1
true
})
// attach it
del.root(Some(el))
}
val test2: UndefOr[d.Element] = document.getElementById("test2target")
var test2counter = 0
test2.foreach { el =>
println("attaching test2target mouse over handler")
val del = Delegate(Some(el))
del.on("mouseover", (event, node) => {
println(s"test2target mouseover $test2counter")
test2counter += 1
true
})
}
val test3a: UndefOr[d.Element] = document.getElementById("test3targeta")
val test3b: UndefOr[d.Element] = document.getElementById("test3targetb")
test3a.foreach { el =>
println("attaching test3target resetting root")
var test3counter = 0
val del = Delegate()
del.on("click", (event, node) => {
println(s"test3target clicked $test3counter")
test3counter += 1
true
})
del.root(Some(el))
del.root(None)
test3b.foreach { e => del.root(Some(e)) }
}
val test4child: UndefOr[d.Element] = document.getElementById("test4child")
val test4parent: UndefOr[d.Element] = document.getElementById("test4parent")
test4child.foreach { el =>
require(test4parent.isDefined)
var test4counter = 0
println("attaching test4target - upward bubbling but with child id matcher")
val parentd = new Delegate()
parentd.on("mouseover", (event, node) => {
println(s"event fired on test4parent due to match on child id $test4counter")
test4counter += 1
false
}, Matcher.MatchId("test4child"))
test4parent.foreach { p => parentd.root(Some(p)) }
}
val test5target: UndefOr[d.Element] = document.getElementById("test5target")
val test5input: UndefOr[d.Element] = document.getElementById("test5input")
test5target.foreach { el =>
require(test5input != null)
val vnodeB = tag("div", tag("p", text("Clicked!")))
val vnodeA = tag("div", tag("button",
Seq(click ~~> Handler { (e: d.Event) =>
{
// Create a replacement for this node when clicked, should force
// cleanup actions to be called.
val node = e.target
test5input.foreach { pnode =>
// cheat, run a remove patch for convenience
run(RemovePatch.applyTo(Seq(0))(pnode))
// then just add this by hand not using patch, just to confuse you!
run(InsertPatch(vnodeB)(pnode))
}
true
}
}), text("Click Me!")))
// Create vnodeA and append it to test5input
// Add an artificial cleanup action to test cleanup actions being attached and called.
test5input.foreach { pnode =>
val newNode = run(InsertPatch(vnodeA)(pnode))
newNode.foreach(addDetachAction(_, Action.lift { println("Cleanup occurred!") }))
}
}
}
def vdomTest() = {
println("vdom test")
val p1 = TextPatch("test1 succeeded: add text to existing element by replacing text")
val target = document.getElementById("test1")
require(target != null)
run(p1(target))
val p1_5 = TextPatch("test1.5 succeeded: add text to existing text element adding it directly")
val target1_5 = document.getElementById("test1_5")
require(target1_5 != null)
run(p1_5(target1_5))
val target2 = document.getElementById("test2")
val p2 = InsertPatch(tag("p", text("test2 succeeded: a new child was inserted")))
run(p2(target2))
val target3 = document.getElementById("test3")
val p3 = ReplacePatch(tag("div", tag("p", text("test3 succeeded: replace a child"))))
run(p3(target3))
val target4 = document.getElementById("test4")
val p4 = InsertPatch(tag("div", Seq(cls := Some("surroundme2")),
tag("div",
tag("p", text("line 1")),
tag("p", text("line 2")))))
run(p4(target4))
val target5 = document.getElementById("test5")
val vdom5_a = tag("div", Seq(cls := Some("surroundme2")),
tag("p", text("test 5 - if you see this the test failed")),
tag("p", text("test 5 - if you see the test failed")))
val vdom5_b = tag("div", Seq(cls := Some("surroundme2")),
tag("p", text("success! test 5 new line 1")),
tag("p", text("success! test 5 new line 2")))
// Diff vdom5_a with the empty node, this should create an insert patch
val patch5 = diff(empty, vdom5_a)
val result5 = for {
newChild <- run(patch5(target5))
patch5_1 = diff(vdom5_a, vdom5_b)
newChild2 <- run(patch5_1(newChild))
} yield newChild2
// Test patching via a one level path
val target5aa = document.getElementById("test5a")
val vdom5aa = tag("div", Seq(cls := Some("surroundme")), text("test5a success on path!"))
val patch5aa = PathPatch(InsertPatch(vdom5aa), Nil)
run(patch5aa(target5aa))
// Test patching a child while using PathPatches.
val target5ab = document.getElementById("test5b_parent")
val vdom5ab = tag("div", Seq(cls := Some("surroundme")), text("test5b success on path!"))
// Adds a path two different ways
val patch5ab = RemovePatch.applyTo(Seq(0)) andThen PathPatch(InsertPatch(vdom5ab))
run(patch5ab(target5ab))
val target6 = document.getElementById("test6")
val vdom6a = tag("div", "key1", Seq(id := "willbedropped", cls := "surroundme"),
tag("p", text("test 6 line 1")),
tag("p", text("test 6 line 2")))
val vdom6b = tag("div", "key1", Seq(cls := Some("surroundme2")),
tag("p", text("success! test 6 line 1")),
tag("span", text("***")),
tag("p", text("success! test 6 line 2")))
// patch between two vdom trees
val patch6b = diff(vdom6a, vdom6b)
// create an IOAction to install the first vdom into the DOM
val p6 = ReplacePatch(vdom6a)(target6)
// Run the IOACtion to perform the DOM update
// new6 holds the new node that was created in the ReplacePatch
val new6 = run(p6)
// Wait till the patch has run - we should always run in the "callback"
// We could also sequence the runs (outputing futures) using a for comprehension
// new6result now holds the DOM nodes for connected to vdom6b virtual nodes
val new6result = new6.flatMap { newNode =>
// don't worry about the future here, we are done with test6
val action = patch6b(newNode)
run(action)
}
//
// Expanding box
//
val target7 = document.getElementById("test7")
def box(count: Int) = {
val s = Seq(textAlign := "center",
lineHeight := s"${100 + count * 2}px",
border := "1px solid red",
width := s"${100 + 5 * count}px",
height := s"${100 + 2 * count}px")
tag("div", Some("box"), None, s, text(count.toString))
}
var count = 0
var tree = box(count)
var rootNode = run(render(tree))
rootNode.foreach(target7.appendChild(_)) // manual append
val cancel = timers.setInterval(1000) {
count += 1
val newTree = box(count)
val patch = diff(tree, newTree)
rootNode.foreach(n => run(patch(n)))
tree = newTree
}
timers.setTimeout(10 seconds)(timers.clearInterval(cancel))
//
// Compare Test7 to the javascript version from virtual-dom.
//
// 1: Create a function that declares what the DOM should look like
//function render(count) {
// return h('div', {
// style: {
// textAlign: 'center',
// lineHeight: (100 + count) + 'px',
// border: '1px solid red',
// width: (100 + count) + 'px',
// height: (100 + count) + 'px'
// }
// }, [String(count)]);
//}
//
// 2: Initialise the document
//var count = 0; // We need some app data. Here we just store a count.
//
//var tree = render(count); // We need an initial tree
//var rootNode = createElement(tree); // Create an initial root DOM node ...
//document.body.appendChild(rootNode); // ... and it should be in the document
//
// 3: Wire up the update logic
//setInterval(function () {
// count++;
//
// var newTree = render(count);
// var patches = diff(tree, newTree);
// rootNode = patch(rootNode, patches);
// tree = newTree;
//}, 1000);
//
// Test 8 - SVG example
//
val target8 = document.getElementById("test8")
val svg8 = VNode.svg(Seq(), tag("rect", None, Some(Constants.NS.SVG),
Seq(x := "30", width := "40", height := "10",
Styles.stroke := "#00cc00", Styles.fill := "#006600", width := "100", height := "20")))
val new8 = run(InsertPatch(svg8)(target8))
//
// Test 9 - test Delegate.
//
val target9 = document.getElementById("test9button")
val del = Delegate()
del.on("click", (e: d.Event, t: d.EventTarget) => {
println(s"click event capture: $e, $t")
true
}).delegate.root(Some(target9))
//
// Test 10 - button that has a callback which updates the vdom
//
// The flow looks messy because we are operating at the vdom layer
// instead of a nice react-like layer. Just imagine that the vars are
// part of a cool, higher-level component.
//
val target10 = document.getElementById("test10button")
var count10: Int = 0
var button10: Future[d.Node] = Future(null)
var tree10: VNode = null
lazy val clickHandler: Handler = (e: d.Event) => {
count10 += 1
// re-create the virtual tree
val newTree = create10(count10, clickHandler)
// calculate patch with new virtual tree
val patch = diff(tree10, newTree)
// apply the patch against the current DOM button and update the real DOM button with the result
button10 = button10.flatMap(b => run(patch(b)))
// update the previous virtual tree with the new virtual tree
tree10 = newTree
true
}
def create10(count: Int, handler: Handler): VNode = tag("div", tag("button",
// When clicked, re-render...
Seq(click ~~> handler),
text(s"Click Me - $count")), text(s"You have clicked the button $count10 times!"))
// create the initial virtual button
tree10 = create10(count10, clickHandler)
// create the DOM button from the virtual button
button10 = run(render(tree10))
// add the new DOM button to the DOM tree so it displays
button10.foreach(target10.appendChild(_))
val target11 = document.getElementById("test11")
val data = Seq("one", "two", "three", "four")
def renderList(data: Seq[String]) = data.map(c => tag("li", text(c)))
run(InsertPatch(tag("ul", renderList(data): _*))(target11))
val rendertest1 = document.getElementById("rendertest1")
val rt1vnode = tag("div",
tag("p", text("Test creating markup from vnodes - you should see an unnumbered list with 3 items")),
tag("ul",
tag("li", tag("i", text("bullet 1"))),
tag("li", tag("em", text("bullet 2"))),
tag("li", tag("b", text("bullet 3")))))
run(InsertPatch(rt1vnode)(rendertest1))
}
}
| aappddeevv/scala-vdom | js/src/main/scala/im/vdom/Test.scala | Scala | apache-2.0 | 14,716 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.rendering.actor
import scalismo.ui.model.capabilities.Transformable
import scalismo.ui.model.properties.{ColorProperty, LineWidthProperty, NodeProperty, OpacityProperty}
import scalismo.ui.model.{BoundingBox, PointCloudNode}
import scalismo.ui.rendering.actor.mixin.{ActorColor, ActorLineWidth, ActorOpacity, ActorSceneNode}
import scalismo.ui.rendering.util.VtkUtil
import scalismo.ui.view.{ViewportPanel, ViewportPanel2D, ViewportPanel3D}
import vtk.{vtkGlyph3D, vtkPoints, vtkPolyData, vtkSphereSource}
object PointCloudActor extends SimpleActorsFactory[PointCloudNode] {
override def actorsFor(renderable: PointCloudNode, viewport: ViewportPanel): Option[Actors] = {
viewport match {
case _: ViewportPanel3D => Some(new PointCloudActor3D(renderable))
case _2d: ViewportPanel2D => Some(new PointCloudActor2D(renderable, _2d))
}
}
}
trait PointCloudActor extends SingleDataSetActor with ActorOpacity with ActorColor with ActorSceneNode {
override def sceneNode: PointCloudNode
override def opacity: OpacityProperty = sceneNode.opacity
override def color: ColorProperty = sceneNode.color
protected def onInstantiated(): Unit
lazy val sphere = new vtkSphereSource
private def transformedPoints: vtkPoints = new vtkPoints {
sceneNode.transformedSource.foreach { point =>
InsertNextPoint(point(0), point(1), point(2))
}
}
lazy val polydata = new vtkPolyData
protected lazy val glyph: vtkGlyph3D = new vtkGlyph3D {
SetSourceConnection(sphere.GetOutputPort)
SetInputData(polydata)
}
def rerender(geometryChanged: Boolean): Unit = {
if (geometryChanged) {
polydata.SetPoints(transformedPoints)
}
sphere.SetRadius(sceneNode.radius.value)
mapper.Modified()
actorChanged(geometryChanged)
}
listenTo(sceneNode, sceneNode.radius)
reactions += {
case Transformable.event.GeometryChanged(_) => rerender(true)
case NodeProperty.event.PropertyChanged(p) if p eq sceneNode.radius => rerender(true)
}
onInstantiated()
rerender(true)
}
class PointCloudActor2D(override val sceneNode: PointCloudNode, viewport: ViewportPanel2D)
extends SlicingActor(viewport)
with PointCloudActor
with ActorLineWidth {
override def lineWidth: LineWidthProperty = sceneNode.lineWidth
override protected def onSlicingPositionChanged(): Unit = rerender(false)
override protected def onInstantiated(): Unit = {
planeCutter.SetInputConnection(glyph.GetOutputPort())
}
override protected def sourceBoundingBox: BoundingBox = VtkUtil.bounds2BoundingBox(polydata.GetBounds())
}
class PointCloudActor3D(override val sceneNode: PointCloudNode) extends PointCloudActor {
override protected def onInstantiated(): Unit = {
mapper.SetInputConnection(glyph.GetOutputPort)
}
}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/rendering/actor/PointCloudActor.scala | Scala | gpl-3.0 | 3,579 |
package ohnosequences.metapasta.instructions
import ohnosequences.nisperon._
import org.clapper.avsl.Logger
import java.io.{File, PrintWriter}
import scala.collection.mutable.ListBuffer
import ohnosequences.nisperon.logging.S3Logger
import ohnosequences.metapasta._
import ohnosequences.metapasta.databases.{DatabaseFactory, InMemoryGIMapperFactory, BlastDatabase16S}
import ohnosequences.nisperon.bundles.NisperonMetadataBuilder
import ohnosequences.metapasta.MergedSampleChunk
import ohnosequences.parsers.S3ChunksReader
import ohnosequences.formats.RawHeader
import ohnosequences.formats.FASTQ
import ohnosequences.metapasta.AssignTable
import ohnosequences.awstools.s3.ObjectAddress
class BlastInstructions(
aws: AWS,
metadataBuilder: NisperonMetadataBuilder, //for bio4j
assignmentConfiguration: AssignmentConfiguration,
databaseFactory: DatabaseFactory[BlastDatabase16S],
blastCommandTemplate: String = """blastn -task megablast -db $db$ -query $input$ -out $output$ -max_target_seqs 1 -num_threads 1 -outfmt 6 -show_gis""",
useXML: Boolean,
logging: Boolean,
resultDirectory: ObjectAddress,
readsDirectory: ObjectAddress
) extends
MapInstructions[List[MergedSampleChunk], (AssignTable, Map[(String, AssignmentType), ReadsStats])] {
case class BlastContext(nodeRetriever: NodeRetriever, database: BlastDatabase16S, blast: Blast, assigner: Assigner)
override type Context = BlastContext
val logger = Logger(this.getClass)
//todo think about this space
def extractHeader(s: String) = s.replace("@", "").split("\\\\s")(0)
override def prepare() = {
val lm = aws.s3.createLoadingManager()
val nodeRetreiver = new BundleNodeRetrieverFactory().build(metadataBuilder)
val blastDatabase = databaseFactory.build(lm)
val blast = new BlastFactory().build(lm)
val giMapper = new InMemoryGIMapperFactory().build(lm)
val fastasWriter = new FastasWriter(aws, readsDirectory, nodeRetreiver)
val assigner = new Assigner(new Bio4JTaxonomyTree(nodeRetreiver), blastDatabase, giMapper, assignmentConfiguration, extractHeader, Some(fastasWriter))
BlastContext(nodeRetreiver, blastDatabase, blast, assigner)
}
def apply(input: List[MergedSampleChunk], s3logger: S3Logger, context: BlastContext): (AssignTable, Map[(String, AssignmentType), ReadsStats]) = {
import context._
//todo fix this head
val chunk = input.head
//parsing
val reader = S3ChunksReader(aws.s3, chunk.fastq)
val parsed: List[FASTQ[RawHeader]] = reader.parseChunk[RawHeader](chunk.range._1, chunk.range._2)._1
val inputFile = new File("reads.fasta")
val outputFile = new File("out.blast")
logger.info("saving reads to " + inputFile.getPath)
val writer = new PrintWriter(inputFile)
var emptyInput = true
parsed.foreach { fastq =>
val s = fastq.toFasta
if(emptyInput && !s.trim.isEmpty) {
emptyInput = false
}
writer.println(s)
}
writer.close()
val startTime = System.currentTimeMillis()
val code = if(emptyInput) {
logger.warn("empty chunk.. skipping mapping")
val pw = new PrintWriter(outputFile)
pw.println("")
pw.close()
0
} else {
context.blast.launch(blastCommandTemplate, context.database, inputFile, outputFile, useXML)
}
val endTime = System.currentTimeMillis()
logger.info("blast: " + (endTime - startTime + 0.0) / parsed.size + " ms per read")
if(code != 0) {
//todo to do something with error message
throw new Error("BLAST finished with error code " + code)
}
if(logging) {
aws.s3.putObject(S3Paths.blastOut(resultDirectory, ChunkId(chunk)), outputFile)
// s3logger.uploadFile(outputFile)
}
logger.info("reading BLAST result")
//todo add xml parser
val resultRaw = if (useXML) "" else Utils.readFile(outputFile)
val t1 = System.currentTimeMillis()
//blast 12 fields
//25 gi|339283447|gb|JF799642.1| 100.00 399 0 0 1 399 558 956 0.0 737
//M02255:17:000000000-A8J9J:1:2104:18025:8547 gi|291331518|gb|GU958050.1| 88.31 77 3 6 1 74 506 579 1e-15 87.9
//val blastHit = """\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*([^\\s]+)\\s*(\\d+)$""".r
val blastHit = """^\\s*([^\\s]+)\\s+([^\\s]+).*?([^\\s]+)\\s*$""".r
val comment = """#(.*)""".r
val hits = new ListBuffer[Hit]()
resultRaw.linesIterator.foreach {
case comment(c) => //logger.info("skipping comment: " + c)
case blastHit(header, refId, _score) => {
val readId = extractHeader(header)
val score = Utils.parseDouble(_score)
hits += Hit(ReadId(readId), RefId(refId), score)
}
case l => logger.error("can't parse: " + l)
}
val t2 = System.currentTimeMillis()
logger.info("parsed " + hits.size + " hits " + (t2 - t1) + " ms")
assigner.assign(s3logger, ChunkId(chunk), parsed, hits.toList)
//result.toList
}
}
| INTERCROSSING/metapasta | src/main/scala/ohnosequences/metapasta/instructions/BlastInstructions.scala | Scala | agpl-3.0 | 5,252 |
package org.sisioh.aws4s.cfn.model
import com.amazonaws.services.cloudformation.model.{ DescribeStackResourcesResult, StackResource }
import org.sisioh.aws4s.PimpedType
import scala.collection.JavaConverters._
object DescribeStackResourcesResultFactory {
def create(): DescribeStackResourcesResult =
new DescribeStackResourcesResult()
}
class RichDescribeStackResourcesResult(val underlying: DescribeStackResourcesResult)
extends AnyVal
with PimpedType[DescribeStackResourcesResult] {
def stackResources: Seq[StackResource] =
underlying.getStackResources.asScala.toVector
def stackResources_=(value: Seq[StackResource]): Unit =
underlying.setStackResources(value.asJava)
def withStackResources(value: Seq[StackResource]): DescribeStackResourcesResult =
underlying.withStackResources(value.asJava)
}
| sisioh/aws4s | aws4s-cfn/src/main/scala/org/sisioh/aws4s/cfn/model/RichDescribeStackResourcesResult.scala | Scala | mit | 841 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.workflow
import org.apache.predictionio.controller.EngineParams
import org.apache.predictionio.controller.Evaluation
import org.apache.predictionio.core.BaseEngine
import org.apache.predictionio.core.BaseEvaluator
import org.apache.predictionio.core.BaseEvaluatorResult
import org.apache.predictionio.data.storage.EngineInstance
import org.apache.predictionio.data.storage.EvaluationInstance
import org.apache.predictionio.data.storage.Model
import org.apache.predictionio.data.storage.Storage
import com.github.nscala_time.time.Imports.DateTime
import grizzled.slf4j.Logger
import scala.language.existentials
/** CoreWorkflow handles PredictionIO metadata and environment variables of
* training and evaluation.
*/
object CoreWorkflow {
@transient lazy val logger = Logger[this.type]
@transient lazy val engineInstances = Storage.getMetaDataEngineInstances
@transient lazy val evaluationInstances =
Storage.getMetaDataEvaluationInstances()
def runTrain[EI, Q, P, A](
engine: BaseEngine[EI, Q, P, A],
engineParams: EngineParams,
engineInstance: EngineInstance,
env: Map[String, String] = WorkflowUtils.pioEnvVars,
params: WorkflowParams = WorkflowParams()) {
logger.debug("Starting SparkContext")
val mode = "training"
val batch = if (params.batch.nonEmpty) {
s"{engineInstance.engineFactory} (${params.batch}})"
} else {
engineInstance.engineFactory
}
val sc = WorkflowContext(
batch,
env,
params.sparkEnv,
mode.capitalize)
try {
val models: Seq[Any] = engine.train(
sc = sc,
engineParams = engineParams,
engineInstanceId = engineInstance.id,
params = params
)
val instanceId = Storage.getMetaDataEngineInstances
val kryo = KryoInstantiator.newKryoInjection
logger.info("Inserting persistent model")
Storage.getModelDataModels.insert(Model(
id = engineInstance.id,
models = kryo(models)))
logger.info("Updating engine instance")
val engineInstances = Storage.getMetaDataEngineInstances
engineInstances.update(engineInstance.copy(
status = "COMPLETED",
endTime = DateTime.now
))
logger.info("Training completed successfully.")
} catch {
case e @(
_: StopAfterReadInterruption |
_: StopAfterPrepareInterruption) => {
logger.info(s"Training interrupted by $e.")
}
} finally {
logger.debug("Stopping SparkContext")
CleanupFunctions.run()
sc.stop()
}
}
def runEvaluation[EI, Q, P, A, R <: BaseEvaluatorResult](
evaluation: Evaluation,
engine: BaseEngine[EI, Q, P, A],
engineParamsList: Seq[EngineParams],
evaluationInstance: EvaluationInstance,
evaluator: BaseEvaluator[EI, Q, P, A, R],
env: Map[String, String] = WorkflowUtils.pioEnvVars,
params: WorkflowParams = WorkflowParams()) {
logger.info("runEvaluation started")
logger.debug("Start SparkContext")
val mode = "evaluation"
val batch = if (params.batch.nonEmpty) {
s"{evaluation.getClass.getName} (${params.batch}})"
} else {
evaluation.getClass.getName
}
val sc = WorkflowContext(
batch,
env,
params.sparkEnv,
mode.capitalize)
try {
val evaluationInstanceId = evaluationInstances.insert(evaluationInstance)
logger.info(s"Starting evaluation instance ID: $evaluationInstanceId")
val evaluatorResult: BaseEvaluatorResult = EvaluationWorkflow.runEvaluation(
sc,
evaluation,
engine,
engineParamsList,
evaluator,
params)
if (evaluatorResult.noSave) {
logger.info(s"This evaluation result is not inserted into database: $evaluatorResult")
} else {
val evaluatedEvaluationInstance = evaluationInstance.copy(
status = "EVALCOMPLETED",
id = evaluationInstanceId,
endTime = DateTime.now,
evaluatorResults = evaluatorResult.toOneLiner,
evaluatorResultsHTML = evaluatorResult.toHTML,
evaluatorResultsJSON = evaluatorResult.toJSON
)
logger.info(s"Updating evaluation instance with result: $evaluatorResult")
evaluationInstances.update(evaluatedEvaluationInstance)
}
logger.info("runEvaluation completed")
} finally {
logger.debug("Stop SparkContext")
CleanupFunctions.run()
sc.stop()
}
}
}
| PredictionIO/PredictionIO | core/src/main/scala/org/apache/predictionio/workflow/CoreWorkflow.scala | Scala | apache-2.0 | 5,335 |
package scala.offheap
package internal
package macros
import scala.collection.mutable
import scala.reflect.macros.blackbox
/* This class essentially defines the opt(allocator, block) macro.
* That will optimised the operations with DenseMatrices present in block.
*
* The optimisation is done in several stages:
* 1. Common subexpression elimination
* 2. Strength reduction
* 3. Conversion to static single assignment form
* 4. Scheduling (producing a scala tree)
*/
class NumericMethod(val c: blackbox.Context) extends Common {
import c.universe._
/* There are multiple Op classes, one for each stage of the optimisation.
* While this slightly increase the verbosity of the code it has the huge
* benefit that every match statement bellow as a meaningful way to handle
* every case's. Thus "case _ =>" is avoided entirely and the compiler will
* always produce usefull warnings.
*
* Thus if someone wants to add support for another operation, he simply add
* the relevent subclasses of Op, compile and then add the code to support
* the new operation wherever the compiler emmits warning.
* */
object Unoptimized {
/* Operations recognized in the block passed to the opt() macro. */
sealed abstract class Op
final case class Const(tree: Tree) extends Op
final case class Mat(tree: Tree) extends Op
final case class Mat_*(left: Op, right: Op) extends Op
final case class Mat_+(left: Op, right: Op) extends Op
final case class Mat_T(matrix: Op) extends Op
final case class Scalar_*(left: Const, right: Op) extends Op
final case class NewMat(tree: Tree) extends Op
}
object CSE {
/* After commons subexpression elimination those operation are obtained. */
sealed abstract class Op
final case class Ref(name: TermName) extends Op
final case class Const(tree: Tree) extends Op
final case class Mat(tree: Tree) extends Op
final case class Mat_*(left: Op, right: Op) extends Op
final case class Mat_+(left: Op, right: Op) extends Op
final case class Mat_T(matrix: Op) extends Op
final case class Scalar_*(left: Const, right: Op) extends Op
final case class NewMat(tree: Tree) extends Op
case class Result(root: Op, named: Map[TermName, Op])
}
object SR {
/* After strength reduction those operations are optained. */
sealed abstract class Op
final case class Ref(name: TermName) extends Op
final case class Const(tree: Tree) extends Op
final case class Mat(tree: Tree) extends Op
final case class Mat_*(left: Op, right: Op) extends Op
final case class Mat_+(left: Op, right: Op) extends Op
final case class Mat_T(matrix: Op) extends Op
final case class Scalar_*(left: Const, right: Op) extends Op
// C = alpha * A? * B? + beta * C
final case class DGEMM(alpha: Const, beta: Const, a: Op, at: Const, b: Op, bt: Const, c: Op) extends Op
final case class DGEMM0(alpha: Const, a: Op, at: Const, b: Op, bt: Const) extends Op
final case class NewMat(tree: Tree) extends Op
case class Result(root: Op, named: Map[TermName, Op])
implicit def refFromCSE(r: CSE.Ref): SR.Ref = {
r match {
case CSE.Ref(name) => Ref(name)
}
}
implicit def constFromCSE(c: CSE.Const): SR.Const = {
c match {
case CSE.Const(tree) => Const(tree)
}
}
}
object SSA {
/* The operations in SSA form. */
sealed abstract class Op
final case class Assign(name: TermName, op: Op) extends Op
final case class Const(tree: Tree) extends Op
final case class Mat(tree: Tree) extends Op
final case class Mat_*(left: TermName, right: TermName) extends Op
final case class Mat_+(left: TermName, right: TermName) extends Op
final case class Mat_T(matrix: TermName) extends Op
final case class Scalar_*(left: TermName, right: TermName) extends Op
final case class DGEMM(alpha: Const, beta: Const, a: TermName, at: Const, b: TermName, bt: Const, c: TermName) extends Op
final case class DGEMM0(alpha: Const, a: TermName, at: Const, b: TermName, bt: Const) extends Op
final case class NewMat(tree: Tree) extends Op
final case class Result(ops: List[Op])
}
val DenseMatrixClass = rootMirror.staticClass("scala.offheap.numeric.DenseMatrix")
def isMatrix(tpe: Type): Boolean = tpe <:< DenseMatrixClass.toType
def isAllocator(tpe: Type): Boolean = tpe <:< AllocatorClass.toType
def ensureConst(op: Unoptimized.Op): Unoptimized.Const = op match {
case cl: Unoptimized.Const => cl
case _ => throw new RuntimeException("Const expected but got " + op.getClass)
}
def toOp(tree: Tree): Unoptimized.Op = {
import Unoptimized._
val q"{ ..$init; $last }" = tree
val bindings: Map[Symbol, Tree] = init.map {
//case vd @ q"val $_: $_ = $rhs" =>
case vd @ ValDef(_, _, _, rhs) =>
(vd.symbol, rhs)
}.toMap
def loop(expr: Tree): Op = {
println("loop: " + showCode(expr))
println("loop: " + showRaw(expr))
// FIXME: Quasiquotes in pattern appears to be unrelliable: sometimes they don't
// match as expected. Since I'm unable to reproduce the problem consistently I
// replaced them by the equivalent scala tree.
expr match {
case id: RefTree if bindings.contains(id.symbol) =>
loop(bindings(id.symbol))
case id: RefTree =>
if (isMatrix(id.tpe)) Mat(id)
else throw new Exception("Reference to type unsuported by opt() macro")
//case q"scala.offheap.numeric.`package`.Double2DenseMatrixRichDouble($v)" => {
case Apply(Select(Select(Select(Select(Ident(TermName("scala")), TermName("offheap")), TermName("numeric")), termNames.PACKAGE), TermName("Double2DenseMatrixRichDouble")), List(v)) => {
Const(v)
}
//case q"$a.+($b)(alloc)" => {
case Apply(Apply(Select(a, TermName("$plus")), List(b)), List(alloc)) => {
(isMatrix(a.tpe), isMatrix(b.tpe)) match {
case (true, true) => Mat_+(loop(a), loop(b))
case _ => throw new Exception("Unsupported types for + in opt() block")
}
}
//case q"$a.*($b)(alloc)" => {
case Apply(Apply(Select(a, TermName("$times")), List(b)), List(alloc)) => {
(isMatrix(a.tpe), isMatrix(b.tpe)) match {
case (true, true) => Mat_*(loop(a), loop(b))
case (false, true) =>
Scalar_*(ensureConst(loop(a)), loop(b))
case (true, false) =>
Scalar_*(ensureConst(loop(b)), loop(a))
case (false, false) => Const(expr)
}
}
//case q"$a.t()(alloc)" => {
case Apply(Apply(Select(a, TermName("t")), List()), List(alloc)) => {
Mat_T(loop(a))
}
// case q"scala.offheap.numeric.DenseMatrix.apply(tree)(alloc)"
case Apply(Apply(Select(Select(Select(Select(Ident(TermName("scala")), TermName("offheap")), TermName("numeric")), TermName("DenseMatrix")), TermName("apply")), List(tree)), List(alloc)) =>
// FIXME: It should be an error to use a val defined in the opt block
// in tree.
NewMat(tree)
case c: Literal =>
Const(c)
case e =>
throw new Exception("unknown expr: " + showRaw(e))
}
}
loop(last)
}
// " // <- fixes syntax hilighting issues with quasiquotes
/* Performs common subexpression elimination */
def cse(op: Unoptimized.Op): CSE.Result = {
import CSE._
def toCSEops(op: Unoptimized.Op): Op = {
op match {
case Unoptimized.Const(tree) => Const(tree)
case Unoptimized.Mat(tree) => Mat(tree)
case Unoptimized.NewMat(tree) => NewMat(tree)
case Unoptimized.Mat_*(left, right) => Mat_*(toCSEops(left), toCSEops(right))
case Unoptimized.Mat_+(left, right) => Mat_+(toCSEops(left), toCSEops(right))
case Unoptimized.Mat_T(matrix) => Mat_T(toCSEops(matrix))
case Unoptimized.Scalar_*(Unoptimized.Const(left), right) =>
Scalar_*(Const(left), toCSEops(right))
}
}
/** Return the largest common subexpression */
def find(op: Op): Option[Op] = {
val seenOp: mutable.Set[Op] = mutable.Set.empty[Op]
def markSeen(op: Op): Unit = {
op match {
case _: Ref => {}
case _: Mat => {}
case _: Const => {}
case _ => seenOp += op
}
}
def loop(op: Op): Option[Op] = {
if (seenOp.contains(op)) Some(op)
else {
markSeen(op)
op match {
case o: Ref => None
case c: Const => None
case m: Mat => None
case m: NewMat => None
case Mat_*(a, b) => loop(a).orElse(loop(b))
case Mat_+(a, b) => loop(a).orElse(loop(b))
case Mat_T(a) => loop(a)
case Scalar_*(a, b) => loop(a).orElse(loop(b))
}
}
}
loop(op)
}
/** Replace all occurrences of op in tree by a reference to name.
* @return the modifield tree. */
def replace(tree: Op, op: Op, name: TermName): Op = {
def loop(tree: Op): Op = {
if (tree == op) Ref(name)
else tree match {
case r: Ref => r
case c: Const => c
case m: Mat => m
case m: NewMat => m
case Mat_*(left, right) => Mat_*(loop(left), loop(right))
case Mat_+(left, right) => Mat_+(loop(left), loop(right))
case Mat_T(matrix) => Mat_T(loop(matrix))
case Scalar_*(left, right) => Scalar_*(left, loop(right))
}
}
loop(tree)
}
def loop(primary: Op, others: Map[TermName, Op]): Result = {
val cs = find(primary)
cs match {
case None => Result(primary, others)
case Some(cs) =>
println("cs found: " + cs)
val name = fresh("cse")
val newPrimary = replace(primary, cs, name)
val newOthers = others.mapValues(v => replace(v, cs, name))
loop(newPrimary, newOthers + ((name, cs)))
}
}
loop(toCSEops(op), Map.empty[TermName, Op])
}
/** Performs strength reduction */
def sr(cseResult: CSE.Result): SR.Result = {
import CSE._
def sr0(op: Op): SR.Op = {
op match {
// (v * A') * B' + w * C
case Mat_+(Mat_*(Scalar_*(v, Mat_T(a)), Mat_T(b)), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Scalar_*(v, Mat_T(a)), Mat_T(b))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"true"), sr0(c))
// (v * A') * B + w * C
case Mat_+(Mat_*(Scalar_*(v, Mat_T(a)), b), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Scalar_*(v, Mat_T(a)), b)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"false"), sr0(c))
// (v * A) * B' + w * C
case Mat_+(Mat_*(Scalar_*(v, a), Mat_T(b)), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Scalar_*(v, a), Mat_T(b))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"true"), sr0(c))
// (v * A) * B + w * C
case Mat_+(Mat_*(Scalar_*(v, a), b), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Scalar_*(v, a), b)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"false"), sr0(c))
// A' * (v * B') + w * C
case Mat_+(Mat_*(Mat_T(a), Scalar_*(v, Mat_T(b))), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Mat_T(a), Scalar_*(v, Mat_T(b)))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"true"), sr0(c))
// A' * (v * B) + w * C
case Mat_+(Mat_*(Mat_T(a), Scalar_*(v, b)), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Mat_T(a), Scalar_*(v, b))) =>
SR.DGEMM(v, w, sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
// A * (v * B') + w * C
case Mat_+(Mat_*(a, Scalar_*(v, Mat_T(b))), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(a, Scalar_*(v, Mat_T(b)))) =>
SR.DGEMM(v, w, sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
// A * (v * B) + w * C
case Mat_+(Mat_*(a, Scalar_*(v, b)), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(a, Scalar_*(v, b))) =>
SR.DGEMM(v, w, sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
// v * (A' * B') + w * C
case Mat_+(Scalar_*(v, Mat_*(Mat_T(a), Mat_T(b))), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Scalar_*(v, Mat_*(Mat_T(a), Mat_T(b)))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"true"), sr0(c))
// v * (A' * B) + w * C
case Mat_+(Scalar_*(v, Mat_*(Mat_T(a), b)), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Scalar_*(v, Mat_*(Mat_T(a), b))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"true"), sr0(b), SR.Const(q"false"), sr0(c))
// v * (A * B') + w * C)
case Mat_+(Scalar_*(v, Mat_*(a, Mat_T(b))), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Scalar_*(v, Mat_*(a, Mat_T(b)))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"true"), sr0(c))
// v * (A * B) + w * C
case Mat_+(Scalar_*(v, Mat_*(a, b)), Scalar_*(w, c)) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Scalar_*(v, Mat_*(a, b))) =>
SR.DGEMM(v, w, sr0(a), SR.Const(q"false"), sr0(b), SR.Const(q"false"), sr0(c))
// (v * A') * B' + C
case Mat_+(Mat_*(Scalar_*(v, Mat_T(a)), Mat_T(b)), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(c, Mat_*(Scalar_*(v, Mat_T(a)), Mat_T(b))) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
// (v * A') * B + C
case Mat_+(Mat_*(Scalar_*(v, Mat_T(a)), b), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(c, Mat_*(Scalar_*(v, Mat_T(a)), b)) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
// (v * A) * B' + C
case Mat_+(Mat_*(Scalar_*(v, a), Mat_T(b)), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(c, Mat_*(Scalar_*(v, a), Mat_T(b))) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
// (v * A) * B + C
case Mat_+(Mat_*(Scalar_*(v, a), b), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(c, Mat_*(Scalar_*(v, a), b)) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
// A' * (v * B') + C
case Mat_+(Mat_*(Mat_T(a), Scalar_*(v, Mat_T(b))), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(c, Mat_*(Mat_T(a), Scalar_*(v, Mat_T(b)))) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
// A' * (v * B) + C
case Mat_+(Mat_*(Mat_T(a), Scalar_*(v, b)), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(c, Mat_*(Mat_T(a), Scalar_*(v, b))) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
// A * (v * B') + C
case Mat_+(Mat_*(a, Scalar_*(v, Mat_T(b))), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(c, Mat_*(a, Scalar_*(v, Mat_T(b)))) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
// A * (v * B) + C
case Mat_+(Mat_*(a, Scalar_*(v, b)), c) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(c, Mat_*(a, Scalar_*(v, b))) =>
SR.DGEMM(v, Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
// (v * A') * B'
case Mat_*(Scalar_*(v, Mat_T(a)), Mat_T(b)) =>
SR.DGEMM0(v, sr0(a), Const(q"true"), sr0(b), Const(q"true"))
// (v * A') * B
case Mat_*(Scalar_*(v, Mat_T(a)), b) =>
SR.DGEMM0(v, sr0(a), Const(q"true"), sr0(b), Const(q"false"))
// (v * A) * B'
case Mat_*(Scalar_*(v, a), Mat_T(b)) =>
SR.DGEMM0(v, sr0(a), Const(q"false"), sr0(b), Const(q"true"))
// (v * A) * B
case Mat_*(Scalar_*(v, a), b) =>
SR.DGEMM0(v, sr0(a), Const(q"false"), sr0(b), Const(q"false"))
// A' * (v * B')
case Mat_*(Mat_T(a), Scalar_*(v, Mat_T(b))) =>
SR.DGEMM0(v, sr0(a), Const(q"true"), sr0(b), Const(q"true"))
// A' * (v * B)
case Mat_*(Mat_T(a), Scalar_*(v, b)) =>
SR.DGEMM0(v, sr0(a), Const(q"true"), sr0(b), Const(q"false"))
// A * (v * B')
case Mat_*(a, Scalar_*(v, Mat_T(b))) =>
SR.DGEMM0(v, sr0(a), Const(q"false"), sr0(b), Const(q"true"))
// A * (v * B)
case Mat_*(a, Scalar_*(v, b)) =>
SR.DGEMM0(v, sr0(a), Const(q"false"), sr0(b), Const(q"false"))
// A' * B' + w * C
case Mat_+(Mat_*(Mat_T(a), Mat_T(b)), Scalar_*(w, c)) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Mat_T(a), Mat_T(b))) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
// A' * B + w * C
case Mat_+(Mat_*(Mat_T(a), b), Scalar_*(w, c)) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(Mat_T(a), b)) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
// A * B' + w * C
case Mat_+(Mat_*(a, Mat_T(b)), Scalar_*(w, c)) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(a, Mat_T(b))) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
// A * B + w * C
case Mat_+(Mat_*(a, b), Scalar_*(w, c)) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(Scalar_*(w, c), Mat_*(a, b)) =>
SR.DGEMM(Const(q"1.0d"), w, sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
// A' * B' + C
case Mat_+(Mat_*(Mat_T(a), Mat_T(b)), c) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(c, Mat_*(Mat_T(a), Mat_T(b))) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"), sr0(c))
// A' * B + C
case Mat_+(Mat_*(Mat_T(a), b), c) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(c, Mat_*(Mat_T(a), b)) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"), sr0(c))
// A * B' + C
case Mat_+(Mat_*(a, Mat_T(b)), c) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
case Mat_+(c, Mat_*(a, Mat_T(b))) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"), sr0(c))
// A * B + C
case Mat_+(Mat_*(a, b), c) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
case Mat_+(c, Mat_*(a, b)) =>
SR.DGEMM(Const(q"1.0d"), Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"false"), sr0(c))
// A' * B'
case Mat_*(Mat_T(a), Mat_T(b)) =>
SR.DGEMM0(Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"true"))
// A' * B
case Mat_*(Mat_T(a), b) =>
SR.DGEMM0(Const(q"1.0d"), sr0(a), Const(q"true"), sr0(b), Const(q"false"))
// A * B'
case Mat_*(a, Mat_T(b)) =>
SR.DGEMM0(Const(q"1.0d"), sr0(a), Const(q"false"), sr0(b), Const(q"true"))
case Ref(name) => SR.Ref(name)
case Const(tree) => SR.Const(tree)
case Mat(tree) => SR.Mat(tree)
case NewMat(tree) => SR.NewMat(tree)
case Mat_*(left, right) => SR.Mat_*(sr0(left), sr0(right))
case Mat_+(left, right) => SR.Mat_+(sr0(left), sr0(right))
case Mat_T(matrix) => SR.Mat_T(sr0(matrix))
case Scalar_*(left, right) => SR.Scalar_*(left, sr0(right))
}
}
SR.Result(sr0(cseResult.root), cseResult.named.mapValues(sr0))
}
/** Transform the CSE result to SSA form in which each operation take only
* names as arguments and each result is assigned a new name. */
def toSSA(srResult: SR.Result): SSA.Result = {
import SSA._
/* Return the names that must be available before this Op can be
* computed.*/
def dependencies(op: SR.Op): Set[TermName] = {
def loop(op: SR.Op): Set[TermName] = {
import SR._
op match {
case Ref(name) => Set(name)
case Const(_) => Set()
case Mat(_) => Set()
case NewMat(_) => Set()
case Mat_*(left, right) => loop(left) ++ loop(right)
case Mat_+(left, right) => loop(left) ++ loop(right)
case Mat_T(matrix) => loop(matrix)
case Scalar_*(_, right) => loop(right)
case DGEMM(_, _, a, _, b, _, c) => loop(a) ++ loop(b) ++ loop(c)
case DGEMM0(_, a, _, b, _) => loop(a) ++ loop(b)
}
}
loop(op)
}
/* Returns the namedOps in executable order. */
def order(namedOps: Map[TermName, SR.Op]): List[(TermName, SR.Op)] = {
def loop (deps: Map[TermName, Set[TermName]]): List[(TermName, SR.Op)] = {
if (deps.isEmpty) List()
else {
deps.find{ case (name, depNames) => depNames.isEmpty } match {
case None => throw new RuntimeException("Cyclic dependencies")
case Some((name, _)) =>
val rest = (deps - name).mapValues(s => s - name)
(name, namedOps(name)) :: loop(rest)
}
}
}
if (namedOps.isEmpty) List()
else {
val deps: Map[TermName, Set[TermName]] = namedOps.mapValues(dependencies)
println("DEPS: " + deps)
loop(deps)
}
}
/* Convert op to SSA form, resulting list is in reverse order */
def convert(op: SR.Op): List[Op] = {
def loop(op: SR.Op): (TermName, List[Op]) = {
op match {
case SR.Ref(name) => (name, List())
case SR.Const(tree) =>
val name = fresh("ssa_const")
(name, List(Assign(name, Const(tree))))
case SR.Mat(tree) =>
val name = fresh("ssa_mat")
(name, List(Assign(name, Mat(tree))))
case SR.NewMat(tree) =>
val name = fresh("ssa_new_mat")
(name, List(Assign(name, NewMat(tree))))
case SR.Mat_*(left, right) =>
val name = fresh("ssa_mat_*")
val (leftName, leftOps) = loop(left)
val (rightName, rightOps) = loop(right)
(name, Assign(name, Mat_*(leftName, rightName))
:: rightOps ::: leftOps)
case SR.Mat_+(left, right) =>
val name = fresh("ssa_mat_+")
val (leftName, leftOps) = loop(left)
val (rightName, rightOps) = loop(right)
(name, Assign(name, Mat_+(leftName, rightName))
:: rightOps ::: leftOps)
case SR.Mat_T(matrix) =>
val name = fresh("ssa_matT")
val (mName, mOps) = loop(matrix)
(name, Assign(name, Mat_T(mName)) :: mOps)
case SR.Scalar_*(left, right) =>
val name = fresh("ssa_scalar*")
val (leftName, leftOps) = loop(left)
val (rightName, rightOps) = loop(right)
(name, Assign(name, Scalar_*(leftName, rightName))
:: rightOps ::: leftOps)
case SR.DGEMM(SR.Const(alpha), SR.Const(beta), a, SR.Const(at), b, SR.Const(bt), c) =>
val name = fresh("ssa_dgemm")
val (aName, aOps) = loop(a)
val (bName, bOps) = loop(b)
val (cName, cOps) = loop(c)
(name, Assign(name, DGEMM(Const(alpha), Const(beta), aName, Const(at), bName, Const(bt), cName))
:: aOps ::: bOps ::: cOps)
case SR.DGEMM0(SR.Const(alpha), a, SR.Const(at), b, SR.Const(bt)) =>
val name = fresh("ssa_dgemm0")
val (aName, aOps) = loop(a)
val (bName, bOps) = loop(b)
(name, Assign(name, DGEMM0(Const(alpha), aName, Const(at), bName, Const(bt)))
:: aOps ::: bOps)
}
}
val (_, ops) = loop(op)
ops match {
case List() => ops
case Assign(_, res) :: rest => (res :: rest)
case _ => throw new RuntimeException("Conversion error")
}
}
// Convert returns list in reverse order so build the program from
// the end and then reverse it
val end = convert(srResult.root)
val start = order(srResult.named).reverse.flatMap{case (name, cseOp) => {
convert(cseOp) match {
case List() => List()
case op :: ops => Assign(name, op) :: ops
}
} }
Result((end ::: start).reverse)
}
// TODO: preserve original names?
def toTree(op: Unoptimized.Op, alloc: Tree): Tree = {
import Unoptimized._
val allocName = fresh("allocator")
var schedule = q"val $allocName = $alloc" :: List.empty[Tree]
val scheduleMap = mutable.Map.empty[Op, TermName]
def loop(op: Op): TermName =
if (scheduleMap.contains(op))
scheduleMap(op)
else
op match {
case Mat(tree) =>
val name = fresh("matrix_leaf")
schedule = q"val $name = $tree" :: schedule
scheduleMap += ((op, name))
name
case NewMat(tree) =>
val name = fresh("new_matrix_leaf")
schedule = q"val $name = $tree" :: schedule
scheduleMap += ((op, name))
name
case Const(tree) =>
val name = fresh("constant_leaf")
schedule = q"val $name = $tree" :: schedule
scheduleMap += ((op, name))
name
case Mat_*(left, right) =>
val leftname = loop(left)
val rightname = loop(right)
val name = fresh("matrix_multiply")
schedule = q"val $name = $leftname.*($rightname)($allocName)" :: schedule
scheduleMap += ((op, name))
name
case Mat_+(left, right) =>
val leftname = loop(left)
val rightname = loop(right)
val name = fresh("matrix_multiply")
schedule = q"val $name = $leftname.+($rightname)($allocName)" :: schedule
scheduleMap += ((op, name))
name
case Scalar_*(left, right) =>
val leftname = loop(left)
val rightname = loop(right)
val name = fresh("matrix_scalar_multiply")
schedule = q"val $name = $leftname.*($rightname)($allocName)" :: schedule
scheduleMap += ((op, name))
name
case Mat_T(m) =>
val transposed = loop(m)
val name = fresh("matrix_transpose")
schedule = q"val $name = $transposed.t()($allocName)" :: schedule
scheduleMap += ((op, name))
name
/*
case DGEMM(alpha, beta, a, at, b, bt, c) =>
val alphaName = loop(alpha)
val betaName = loop(beta)
val aName = loop(a)
val atName = loop(at)
val bName = loop(b)
val btName = loop(bt)
val cName = loop(c)
schedule = q"$cName.dgemm($alphaName, $betaName, $aName, $atName, $bName, $btName)" :: schedule
scheduleMap --= scheduleMap.filter(_._2 == cName).keys
scheduleMap += ((op, cName))
cName
*/
}
val last = loop(op)
q"{ ..${schedule.reverse}; $last }"
}
/** Return a block executing each tree of the given list in order. */
def makeBlock(ts: List[Tree]): Tree = {
ts match {
case List() => q"{}"
case t :: rest => q"{..$ts}"
}
}
/** Convert the SSA form into a scala tree. */
def simpleSchedule(ssaResult: SSA.Result, alloc: Tree): Tree = {
import SSA._
def toTree(op: Op): Tree = {
op match {
case Assign(name, op) =>
val t = toTree(op)
q"val $name = $t"
case Const(tree) => tree
case Mat(tree) => tree
case NewMat(tree) => q"scala.offheap.numeric.DenseMatrix($tree)"
case Mat_*(left, right) => q"$left.*($right)($alloc)"
case Mat_+(left, right) => q"$left.+($right)($alloc)"
case Scalar_*(left, right) => q"$left.*($right)($alloc)"
case Mat_T(matrix) => q"$matrix.t()(alloc)"
case DGEMM(Const(alpha), Const(beta), a, Const(at), b, Const(bt), c) =>
q"($c.copy()).dgemm($alpha, $beta, $a, $at, $b, $bt)"
case DGEMM0(Const(alpha), a, Const(at), b, Const(bt)) =>
q"(DenseMatrix.uninit($a.rows, $b.columns)).dgemm($alpha, 0.0, $a, $at, $b, $bt)"
}
}
makeBlock(ssaResult.ops.map(toTree))
}
/** Convert the SSA form into a scala tree reusing existing matrices. */
def schedule(ssaResult: SSA.Result, alloc: Tree): Tree = {
import SSA._
/** Return the names used by the given Op. */
def usedNames(op: Op): Set[TermName] = {
op match {
case Assign(name, op) => usedNames(op)
case Const(tree) => Set()
case Mat(tree) => Set()
case NewMat(tree) => Set()
case Mat_*(left, right) => Set(left, right)
case Mat_+(left, right) => Set(left, right)
case Scalar_*(left, right) => Set(left, right)
case Mat_T(matrix) => Set(matrix)
case DGEMM(_, _, a, _, b, _, c) => Set(a, b, c)
case DGEMM0(_, a, _, b, _) => Set(a, b)
}
}
/** For each Op return the Set of name used at this Op and the ones following. */
def nameUsage(ops: List[Op], prev: List[Set[TermName]]): List[Set[TermName]] = {
var prevNames = prev match {
case List() => Set()
case p :: _ => p
}
ops match {
case List() => prev
case op :: rest => nameUsage(rest, (prevNames ++ usedNames(op)) :: prev)
}
}
/* Those keep state between calls to toTree. */
val nameMap: mutable.Map[TermName, TermName] = mutable.Map.empty[TermName, TermName]
val existingNames: mutable.Set[TermName] = mutable.Set.empty[TermName]
/** Convert an Op to a scala tree, due to being stateful it must be called
* for each Op in program order. */
def toTree(opsWithNextUsed: (Op, Set[TermName])): Tree = {
println("toTree: " + opsWithNextUsed._1)
println("nxtUse: " + opsWithNextUsed._2)
def rename(name: TermName, into: TermName): Unit = {
nameMap += ((name, into))
}
def getName(name: TermName): TermName = {
nameMap.getOrElse(name, name)
}
def findUnused(nextUsed: Set[TermName], prefer: Set[TermName]): Option[TermName] = {
println(" findUnused(" + nextUsed + ", " + prefer + ")")
println(" existing: " + existingNames)
println(" nextused: " + nextUsed.map(getName))
val available = existingNames -- (nextUsed.map(getName))
val prefered = available & prefer
val res = prefered.headOption.orElse(available.headOption)
println(" -> " + res)
res
}
val res = opsWithNextUsed._1 match {
case Assign(name, Const(tree)) => q"val $name = $tree"
case Assign(name, Mat(tree)) => q"val $name = $tree"
case Assign(name, NewMat(tree)) => q"val $name = scala.offheap.numeric.DenseMatrix($tree)"
case Assign(name, Mat_+(left0, right0)) =>
val left = getName(left0)
val right = getName(right0)
findUnused(opsWithNextUsed._2, Set(left, right)) match {
case None =>
existingNames += name
q"val $name = $left.+($right)($alloc)"
case Some(newName) =>
if (newName == left) {
rename(name, newName)
q"$left.+=($right)"
} else if (newName == right) {
rename(name, newName)
q"$right.+=($left)"
}
else {
// Try to update if size match at runtime?
existingNames += name
q"val $name = $left.+($right)($alloc)"
}
}
case Assign(name, Mat_*(left0, right0)) =>
val left = getName(left0)
val right = getName(right0)
findUnused(opsWithNextUsed._2, Set(left, right)) match {
case None =>
existingNames += name
q"val $name = $left.*($right)($alloc)"
case Some(newName) =>
if (newName == left || newName == right) {
// No inplace matrix multiplication
existingNames += name
q"val $name = $left.*($right)($alloc)"
}
else {
/* Either a new matrix or the one with newName will be returned
* at runtime. Since we dont know at compile time, a new name
* for that result is created. */
val newName2 = fresh("scheduleMat_*")
rename(name, newName2)
existingNames += newName2
/* Make the old name unavailable since newName2 might alias to it. */
existingNames -= newName
q"val $newName2 = $newName._maybeUpdateToMulRes($left, false, $right, false)($alloc)"
}
}
case Assign(name, Scalar_*(left0, right0)) =>
val left = getName(left0)
val right = getName(right0)
findUnused(opsWithNextUsed._2, Set(right)) match {
case None =>
existingNames += name
q"val $name = $left.*($right)($alloc)"
case Some(newName) =>
if (newName == right) {
rename(name, newName)
q"$right.:*=($left)"
}
else {
/* Same as above: maybe will end up allocating a new matrix at
* runtime so newName becomes unusable. */
val newName2 = fresh("scheduleScalar_*")
rename(name, newName2)
existingNames += newName2
existingNames -= newName
q"val $newName2 = $newName._maybeUpdateToScalarMulRes($left, $right)($alloc)"
}
}
case Assign(name, Mat_T(matrix0)) =>
val matrix = getName(matrix0)
findUnused(opsWithNextUsed._2, Set(matrix)) match {
case None =>
existingNames += name
q"val $name = $matrix.t()($alloc)"
case Some(newName) =>
// Inplace transposition not supported (yet?)
if (newName == matrix) {
existingNames += name
q"val $name = $matrix.t()($alloc)"
}
else {
// Try to update if size match at runtime?
existingNames += name
q"val $name = $matrix.t()($alloc)"
}
}
case Assign(name, DGEMM(Const(alpha), Const(beta), a0, Const(at), b0, Const(bt), c0)) =>
val a = getName(a0)
val b = getName(b0)
val c = getName(c0)
findUnused(opsWithNextUsed._2, Set(c)) match {
case Some(newName) if newName == c =>
rename(name, newName)
q"$c.dgemm($alpha, $beta, $a, $at, $b, $bt)"
case _ =>
existingNames += name
q"val $name = ($c.copy()).dgemm($alpha, $beta, $a, $at, $b, $bt)"
}
case Assign(name, DGEMM0(Const(alpha), a0, Const(at), b0, Const(bt))) =>
val a = getName(a0)
val b = getName(b0)
findUnused(opsWithNextUsed._2, Set()) match {
case None =>
existingNames += name
q"val $name = (DenseMatrix.uninit($a.rows, $b.columns)).dgemm($alpha, 0.0, $a, $at, $b, $bt)"
case Some(newName) =>
/* Either a new matrix or the one with newName will be returned
* at runtime. Since we dont know at compile time, a new name
* for that result is created. */
val newName2 = fresh("scheduleDGEMM0")
rename(name, newName2)
existingNames += newName2
existingNames -= newName
q"val $newName2 = $newName._maybeUpdateToDGEMM0($alpha, $a, $at, $b, $bt)($alloc)"
}
case Assign(name, _) =>
// Necessary because scalac doesn't emmit warnings about unhandled cases.
throw new RuntimeException("Unhandled case, please add case Assign(name, ...) above")
/* Those are for the final expression. */
case Const(tree) => tree
case Mat(tree) => tree
case op =>
val name = fresh("return")
val tree = toTree((Assign(name, op), opsWithNextUsed._2))
val newName = getName(name)
tree match {
case ValDef(_, _, _, rhs) =>
rhs
case _ =>
q"$tree; $newName"
}
}
println(" -> " + showCode(res))
res
}
var used = nameUsage(ssaResult.ops.reverse, List())
var usedNext: List[Set[TermName]] = used.tail ::: List(Set.empty[TermName])
if (usedNext.size != ssaResult.ops.size)
throw new RuntimeException("Programming error")
makeBlock(ssaResult.ops.zip(usedNext).map(toTree))
}
def opt(alloc: Tree, t: Tree): Tree = {
val op = toOp(t)
println(s"parsed op: $op")
val opCSE = cse(op)
println(s"after CSE: $opCSE")
val opSR = sr(opCSE)
println(s"after SR: $opSR")
//val optOp = optimise(op)
//println(s"optimised op: $optOp")
val optSSA = toSSA(opSR)
println(s"SSA form: $optSSA")
//val res = toTree(optOp, alloc)
//println(s"macro result: ${showCode(res)}")
//res
//val res = simpleSchedule(optSSA, alloc)
val res = schedule(optSSA, alloc)
println("macro result: " + showCode(res))
res
}
}
| florv/scala-offheap | macros/src/main/scala/offheap/internal/macros/NumericMethod.scala | Scala | bsd-3-clause | 39,735 |
package fpinscala.laziness
import scala.annotation.tailrec
import Stream._
trait Stream[+A] {
def foldRight[B](z: => B)(f: (A, => B) => B): B = // The arrow `=>` in front of the argument type `B` means that the function `f` takes its second argument by name and may choose not to evaluate it.
this match {
case Cons(h,t) => f(h(), t().foldRight(z)(f)) // If `f` doesn't evaluate its second argument, the recursion never occurs.
case _ => z
}
def exists(p: A => Boolean): Boolean =
foldRight(false)((a, b) => p(a) || b) // Here `b` is the unevaluated recursive step that folds the tail of the stream. If `p(a)` returns `true`, `b` will never be evaluated and the computation terminates early.
@annotation.tailrec
final def find(f: A => Boolean): Option[A] = this match {
case Empty => None
case Cons(h, t) => if (f(h())) Some(h()) else t().find(f)
}
// Exercise 5.1
def toList: List[A] = {
@annotation.tailrec
def go(stream: Stream[A], acc: List[A]): List[A] = stream match {
case Cons(h, t) => go(t(), h() :: acc)
case Empty => acc
}
go(this, List()) reverse
}
def drop(n: Int): Stream[A] =
this match {
case Cons(h, t) if (n > 0) => t().drop(n - 1)
case _ => this
}
// Exercise 5.2
def take(n: Int): Stream[A] =
this match {
case Cons(h, t) if (n > 0) => cons(h(), t().take(n - 1))
case _ => empty
}
// Exercise 5.13
def takeViaUnfold(n: Int): Stream[A] =
unfold(this, n) {
case (Cons(h, t), n) if n > 0 => Some(h(), (t(), n - 1))
case _ => None
}
// Exercise 5.4
def forAll(p: A => Boolean): Boolean =
foldRight(true)((a, b) => p(a) && b)
// Exercise 5.5
def takeWhile(p: A => Boolean): Stream[A] =
foldRight(empty[A])((a, b) => if (p(a)) cons(a, b) else empty)
// Exercise 5.13
def takeWhileViaUnfold(p: A => Boolean): Stream[A] =
unfold(this) {
case Cons(h, t) if p(h()) => Some(h(), t())
case _ => None
}
// Exercise 5.6
def headOption: Option[A] =
foldRight(Option.empty[A])((a, _) => Option(a))
// Exercise 5.13
def mapViaUnfold[B](f: A => B): Stream[B] =
unfold(this) {
case Cons(h, t) => Some(f(h()), t())
case Empty => None
}
def mapViaUnfold2[B](f: A => B): Stream[B] =
unfold(this)(s => for (h <- s.headOption) yield (f(h), s.drop(1)))
// Exercise 5.7
def map[B](f: A => B): Stream[B] =
foldRight(empty[B])((a, b) => cons(f(a), b))
def filter(f: A => Boolean): Stream[A] =
foldRight(empty[A])((a, b) => if (f(a)) cons(a, b) else b)
def append[B>:A](s: => Stream[B]): Stream[B] =
foldRight(s)((a, b) => cons(a, b))
def flatMap[B](f: A => Stream[B]): Stream[B] =
foldRight(empty[B])((a, b) => f(a) append b)
// Exercise 5.13
def zipWith[B, C](other: Stream[B])(f: (A, B) => C): Stream[C] =
unfold(this, other) {
case (Cons(h1, t1), Cons(h2, t2)) => Some(f(h1(), h2()), (t1(), t2()))
case _ => None
}
def zipAll[B](other: Stream[B]): Stream[(Option[A], Option[B])] =
unfold(this, other) {
case (Cons(h1, t1), Cons(h2, t2)) => Some((Some(h1()), Some(h2())), (t1(), t2()))
case (Cons(h1, t1), Empty) => Some((Some(h1()), None), (t1(), Empty))
case (Empty, Cons(h2, t2)) => Some((None, Some(h2())), (Empty, t2()))
case _ => None
}
def hasSubsequence[A](other: Stream[A]): Boolean =
tails.exists(_.startsWith(other))
// Exercise 5.14
def startsWith[A](other: Stream[A]): Boolean =
zipAll(other).takeWhile {
case (a, b) => ! b.isEmpty
} forAll {
case (a, b) => a == b
}
// Exercise 5.15
def tails: Stream[Stream[A]] =
unfold(this) {
case s @ Cons(_, t) => Some(s, t())
case Empty => None
} append Stream(empty)
// Exercise 5.16
def scanRight[B](z: => B)(f: (A, => B) => B): Stream[B] =
tails.map(_.foldRight(z)(f)) // I guess this is not quite right yet
}
case object Empty extends Stream[Nothing]
case class Cons[+A](h: () => A, t: () => Stream[A]) extends Stream[A]
object Stream {
def cons[A](hd: => A, tl: => Stream[A]): Stream[A] = {
lazy val head = hd
lazy val tail = tl
Cons(() => head, () => tail)
}
def empty[A]: Stream[A] = Empty
def apply[A](as: A*): Stream[A] =
if (as.isEmpty) empty
else cons(as.head, apply(as.tail: _*))
// Exercise 5.11
def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] =
f(z) match {
case Some((a, s)) => cons(a, unfold(s)(f))
case None => empty
}
// Exercise 5.12
val ones: Stream[Int] =
constant(1)
def constant[A](a: A): Stream[A] =
unfold(a)(a => Some(a, a))
def from(n: Int): Stream[Int] =
unfold(n)(n => Some(n, n + 1))
def fibs(): Stream[Int] =
unfold((0, 1)) { case (f1, f2) => Some(f1, (f2, f1 + f2))}
} | fpinscala-muc/fpinscala-g-fresh | exercises/src/main/scala/fpinscala/laziness/Stream.scala | Scala | mit | 4,838 |
package io.cumulus.views.email
import io.cumulus.Settings
import io.cumulus.utils.Base16
import io.cumulus.models.user.User
import play.api.i18n.Messages
import scalatags.Text.all._
case class CumulusEmailValidationEmail(
user: User,
)(implicit
val settings: Settings,
val messages: Messages
) extends CumulusEmailTemplate {
override protected def mailContentTitle: String =
messages("email.email-validation.content-title")
override protected def mailContent: Seq[Tag] = {
val link = s"${settings.host.url}/validateEmail?userLogin=${user.login}&emailCode=${Base16.encode(user.security.validationCode)}"
Seq(
span(
messages("email.email-validation.greetings", user.login),
br, br,
messages("email.email-validation.content", user.email),
a(
href := link,
style := "color: #3dc7be;",
messages("email.email-validation.link")
),
messages("email.email-validation.content-next")
),
span(
messages("email.email-validation.disclaimer")
)
)
}
}
| Cumulus-Cloud/cumulus | server/cumulus-core/src/main/scala/io/cumulus/views/email/CumulusEmailValidationEmail.scala | Scala | mit | 1,080 |
package com.joescii.typed
sealed trait IntList[Size <: SizeType] {
def ::(head:Int):IntList[SizeN[Size]] = IntListImpl(head, this)
def +(that:IntList[Size]):IntList[Size]
def ++[ThatSize <: SizeType](that:IntList[ThatSize]):IntList[Size#plus[ThatSize]]
def size:Int
}
case object IntNil extends IntList[Size0] {
override def +(that:IntList[Size0]) = this
override val size = 0
override def ++[ThatSize <: SizeType](that:IntList[ThatSize]) = that
}
private[typed] case class IntListImpl[TailSize <: SizeType](head:Int, tail:IntList[TailSize]) extends IntList[SizeN[TailSize]] {
private type Size = SizeN[TailSize] // defined for clarity
override def +(that:IntList[Size]) = that match {
case IntListImpl(h, t) => (head + h) :: (tail + t)
}
override val size = 1 + tail.size
override def ++[ThatSize <: SizeType](that:IntList[ThatSize]) = IntListImpl(head, tail++that)
} | joescii/type-prog | src/main/scala/com/joescii/typed/IntList.scala | Scala | apache-2.0 | 899 |
package breeze.linalg
import breeze.benchmark.{MyRunner, BreezeBenchmark}
import breeze.linalg.operators.DenseVectorSupportMethods
import spire.syntax.cfor._
/**
* Created by dlwh on 8/14/15.
*/
class DenseAxpyBenchmark extends BreezeBenchmark {
assert(usingNatives)
val dv, dv2 = DenseVector.rand(5)
def timeSmallDVAxpy(reps: Int) = {
var sum = 0.0
cforRange(0 until reps) { rep =>
axpy(0.042, dv, dv2)
}
dv2
}
def timeSmallDVInlineRange(reps: Int) = {
cforRange(0 until reps) { rep =>
val ad = dv.data
val bd = dv2.data
cforRange(0 until dv.length) { i =>
bd(i) += 0.042 * ad(i)
}
}
dv2
}
def timeSmallDVScaleAddInline(reps: Int) = {
cforRange(0 until reps) { rep =>
dv(0) += dv2(0) * 0.042
dv(1) += dv2(1) * 0.042
dv(2) += dv2(2) * 0.042
dv(3) += dv2(3) * 0.042
dv(4) += dv2(4) * 0.042
}
dv
}
}
object DenseAxpyBenchmark extends MyRunner(classOf[DenseAxpyBenchmark])
| chen0031/breeze | benchmark/src/main/scala/breeze/linalg/DenseAxpyBenchmark.scala | Scala | apache-2.0 | 1,004 |
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* Add your spec here.
* You can mock out a whole application including requests, plugins etc.
* For more information, consult the wiki.
*/
@RunWith(classOf[JUnitRunner])
class ApplicationSpec extends Specification {
"Application" should {
"send 404 on a bad request" in new WithApplication {
route(FakeRequest(GET, "/boum")) must beNone
}
"should redirect to login page" in new WithApplication {
val home = route(FakeRequest(GET, "/")).get
status(home) must equalTo(SEE_OTHER)
}
}
}
| tomasharkema/Plex.Scala | test/ApplicationSpec.scala | Scala | mit | 670 |
package org.elasticmq.persistence
import spray.json.{DefaultJsonProtocol, JsonFormat}
package object sql {
case class SerializableAttribute(
key: String,
primaryDataType: String,
stringValue: String,
customType: Option[String]
)
object SerializableAttributeProtocol extends DefaultJsonProtocol {
implicit val colorFormat: JsonFormat[SerializableAttribute] = jsonFormat4(SerializableAttribute)
}
object DeadLettersQueueProtocol extends DefaultJsonProtocol {
implicit val DeadLettersQueueFormat: JsonFormat[DeadLettersQueue] = jsonFormat2(DeadLettersQueue)
}
import DeadLettersQueueProtocol._
object CreateQueueProtocol extends DefaultJsonProtocol {
implicit val CreateQueueFormat: JsonFormat[CreateQueueMetadata] = jsonFormat12(CreateQueueMetadata.apply)
}
}
| adamw/elasticmq | persistence/persistence-sql/src/main/scala/org/elasticmq/persistence/sql/package.scala | Scala | apache-2.0 | 818 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.json
import java.nio.charset.{Charset, StandardCharsets}
import java.util.{Locale, TimeZone}
import com.fasterxml.jackson.core.{JsonFactory, JsonParser}
import org.apache.commons.lang3.time.FastDateFormat
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util._
/**
* Options for parsing JSON data into Spark SQL rows.
*
* Most of these map directly to Jackson's internal options, specified in [[JsonParser.Feature]].
*/
private[sql] class JSONOptions(
@transient private val parameters: CaseInsensitiveMap[String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String)
extends Logging with Serializable {
def this(
parameters: Map[String, String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String = "") = {
this(
CaseInsensitiveMap(parameters),
defaultTimeZoneId,
defaultColumnNameOfCorruptRecord)
}
val samplingRatio =
parameters.get("samplingRatio").map(_.toDouble).getOrElse(1.0)
val primitivesAsString =
parameters.get("primitivesAsString").map(_.toBoolean).getOrElse(false)
val prefersDecimal =
parameters.get("prefersDecimal").map(_.toBoolean).getOrElse(false)
val allowComments =
parameters.get("allowComments").map(_.toBoolean).getOrElse(false)
val allowUnquotedFieldNames =
parameters.get("allowUnquotedFieldNames").map(_.toBoolean).getOrElse(false)
val allowSingleQuotes =
parameters.get("allowSingleQuotes").map(_.toBoolean).getOrElse(true)
val allowNumericLeadingZeros =
parameters.get("allowNumericLeadingZeros").map(_.toBoolean).getOrElse(false)
val allowNonNumericNumbers =
parameters.get("allowNonNumericNumbers").map(_.toBoolean).getOrElse(true)
val allowBackslashEscapingAnyCharacter =
parameters.get("allowBackslashEscapingAnyCharacter").map(_.toBoolean).getOrElse(false)
private val allowUnquotedControlChars =
parameters.get("allowUnquotedControlChars").map(_.toBoolean).getOrElse(false)
val compressionCodec = parameters.get("compression").map(CompressionCodecs.getCodecClassName)
val parseMode: ParseMode =
parameters.get("mode").map(ParseMode.fromString).getOrElse(PermissiveMode)
val columnNameOfCorruptRecord =
parameters.getOrElse("columnNameOfCorruptRecord", defaultColumnNameOfCorruptRecord)
val timeZone: TimeZone = DateTimeUtils.getTimeZone(
parameters.getOrElse(DateTimeUtils.TIMEZONE_OPTION, defaultTimeZoneId))
// Uses `FastDateFormat` which can be direct replacement for `SimpleDateFormat` and thread-safe.
val dateFormat: FastDateFormat =
FastDateFormat.getInstance(parameters.getOrElse("dateFormat", "yyyy-MM-dd"), Locale.US)
val timestampFormat: FastDateFormat =
FastDateFormat.getInstance(
parameters.getOrElse("timestampFormat", "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"), timeZone, Locale.US)
val multiLine = parameters.get("multiLine").map(_.toBoolean).getOrElse(false)
/**
* A string between two consecutive JSON records.
*/
val lineSeparator: Option[String] = parameters.get("lineSep").map { sep =>
require(sep.nonEmpty, "'lineSep' cannot be an empty string.")
sep
}
/**
* Standard encoding (charset) name. For example UTF-8, UTF-16LE and UTF-32BE.
* If the encoding is not specified (None), it will be detected automatically
* when the multiLine option is set to `true`.
*/
val encoding: Option[String] = parameters.get("encoding")
.orElse(parameters.get("charset")).map { enc =>
// The following encodings are not supported in per-line mode (multiline is false)
// because they cause some problems in reading files with BOM which is supposed to
// present in the files with such encodings. After splitting input files by lines,
// only the first lines will have the BOM which leads to impossibility for reading
// the rest lines. Besides of that, the lineSep option must have the BOM in such
// encodings which can never present between lines.
val blacklist = Seq(Charset.forName("UTF-16"), Charset.forName("UTF-32"))
val isBlacklisted = blacklist.contains(Charset.forName(enc))
require(multiLine || !isBlacklisted,
s"""The ${enc} encoding must not be included in the blacklist when multiLine is disabled:
| ${blacklist.mkString(", ")}""".stripMargin)
val isLineSepRequired = !(multiLine == false &&
Charset.forName(enc) != StandardCharsets.UTF_8 && lineSeparator.isEmpty)
require(isLineSepRequired, s"The lineSep option must be specified for the $enc encoding")
enc
}
val lineSeparatorInRead: Option[Array[Byte]] = lineSeparator.map { lineSep =>
lineSep.getBytes(encoding.getOrElse("UTF-8"))
}
val lineSeparatorInWrite: String = lineSeparator.getOrElse("\\n")
/** Sets config options on a Jackson [[JsonFactory]]. */
def setJacksonOptions(factory: JsonFactory): Unit = {
factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros)
factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
factory.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
allowBackslashEscapingAnyCharacter)
factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, allowUnquotedControlChars)
}
}
| joseph-torres/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala | Scala | apache-2.0 | 6,409 |
import scala.annotation.experimental
@experimental
class A
@experimental
class B extends A
@experimental
type X
@experimental
type Y = Int
@experimental
opaque type Z = Int
type AA = A // error
type BB = Z // error
type XX = Z // error
type YY = Z // error
type ZZ = Z // error
| dotty-staging/dotty | tests/neg-custom-args/no-experimental/experimentalType.scala | Scala | apache-2.0 | 284 |
package me.ilinskiy.diningPhilosophers
import akka.actor.{ActorSystem, Props}
object Main extends App {
val system = ActorSystem("Dinner")
val waiter = system.actorOf(Props[Waiter])
system.actorOf(Props(new Philosopher(waiter, "A")))
system.actorOf(Props(new Philosopher(waiter, "B")))
system.actorOf(Props(new Philosopher(waiter, "C")))
system.actorOf(Props(new Philosopher(waiter, "D")))
system.actorOf(Props(new Philosopher(waiter, "E")))
}
| ilinum/dining-philosophers-akka | src/main/scala/me/ilinskiy/diningPhilosophers/Main.scala | Scala | mit | 461 |
package io.iohk.ethereum.mallet.main
import akka.http.scaladsl.model.Uri
import io.iohk.ethereum.domain.Address
import io.iohk.ethereum.mallet.common.{Constants, StringUtil}
/** Command line options and positional args parser */
object OptionParser {
private implicit val uriRead: scopt.Read[Uri] =
scopt.Read.reads(Uri.apply)
private implicit val addrRead: scopt.Read[Address] =
scopt.Read.reads { str =>
val bytes = StringUtil.hexToBytes(str)
require(bytes.nonEmpty && bytes.length <= Address.Length, "invalid address length")
Address(bytes)
}
def apply(args: Seq[String]): Option[ClOptions] = {
val parser = new scopt.OptionParser[ClOptions](Constants.AppName) {
head(Constants.AppName, "0.1") //TODO: proper versioning
opt[String]('d', "data-dir")
.action((d, o) => o.copy(dataDir = d))
.text(s"customise the directory where keys et. al. are kept (default: ~/.${Constants.AppName})")
opt[String]('c', "command")
.action((c, o) => o.copy(command = Some(c)))
.text(
"runs mallet in non-interactive mode - provided command will be executed and the app will exit. " +
"Exit code 0 indicates success, failure otherwise"
)
opt[Address]('a', "account")
.action((a, o) => o.copy(account = Some(a)))
.text("an alternative to 'selectAccount' command")
opt[String]('p', "password")
.action((p, o) => o.copy(password = Some(p)))
.text(
"when non-interactive is used, the password for accessing account may be provided this way " +
"(make sure no one's looking!)"
)
arg[Uri]("<node>")
.action((n, o) => o.copy(node = n))
.validate { uri =>
if (!Set("http", "https").contains(uri.scheme))
failure("node URI scheme must be explicit and either 'http' or 'https'")
else
success
}
.text("URI of the node that mallet connects to (HTTP or HTTPS)")
help("help")
.text("prints this usage text")
}
parser.parse(args, ClOptions())
}
}
| input-output-hk/etc-client | src/main/scala/io/iohk/ethereum/mallet/main/OptionParser.scala | Scala | mit | 2,127 |
package com.github.aafa.activity
import android.app.Activity
import android.os.Bundle
import android.widget.{LinearLayout, TextView}
import com.github.aafa.model.User
import io.realm.RealmConfiguration.Builder
import io.realm._
import macroid.FullDsl._
import macroid._
/**
* Copyright (c) aafa. All rights reserved.
* Created by aafa
*/
class MainActivity extends Activity with Contexts[Activity] with MainActivityView {
def realmConfiguration: RealmConfiguration = new Builder(this)
.deleteRealmIfMigrationNeeded()
.build()
def realm: Realm = Realm.getInstance(realmConfiguration)
override def onCreate(b: Bundle): Unit = {
super.onCreate(b)
setTitle("Hello world, realm-test!")
setContentView(ui.get)
test()
}
def test(): Unit = {
val user: User = new User()
user.name = "Hello, Realm!"
user.id = 1
realm.beginTransaction()
realm.clear(classOf[User])
realm.copyToRealm(user)
realm.commitTransaction()
val realmUser: User = realm.where(classOf[User]).equalTo("id", new Integer(1)).findFirst()
updateText(realmUser.name)
println(realmUser.name)
}
}
trait MainActivityView {
this: MainActivity =>
var textSlot = slot[TextView]
def ui: Ui[LinearLayout] = {
l[LinearLayout](
w[TextView] <~ wire(textSlot)
)
}
def updateText(s: String) = runUi(textSlot <~ text(s))
}
| aafa/realm-sbt-plugin | realm-android/src/main/scala/com/github/aafa/activity/MainActivity.scala | Scala | mit | 1,387 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package sys
package process
import processInternal._
import ProcessBuilder._
import scala.language.implicitConversions
/** Represents a process that is running or has finished running.
* It may be a compound process with several underlying native processes (such as `a #&& b`).
*
* This trait is often not used directly, though its companion object contains
* factories for [[scala.sys.process.ProcessBuilder]], the main component of this
* package.
*
* It is used directly when calling the method `run` on a `ProcessBuilder`,
* which makes the process run in the background. The methods provided on `Process`
* make it possible for one to block until the process exits and get the exit value,
* or destroy the process altogether.
*
* @see [[scala.sys.process.ProcessBuilder]]
*/
trait Process {
/** Returns this process alive status */
def isAlive(): Boolean
/** Blocks until this process exits and returns the exit code.*/
def exitValue(): Int
/** Destroys this process. */
def destroy(): Unit
}
/** Methods for constructing simple commands that can then be combined. */
object Process extends ProcessImpl with ProcessCreation { }
/** Factories for creating [[scala.sys.process.ProcessBuilder]]. They can be
* found on and used through [[scala.sys.process.Process]]'s companion object.
*/
trait ProcessCreation {
/** Creates a [[scala.sys.process.ProcessBuilder]] from a `String`, including the
* parameters.
*
* @example {{{ apply("cat file.txt") }}}
*/
def apply(command: String): ProcessBuilder = apply(command, None)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`,
* where the head is the command and each element of the tail is a parameter.
*
* @example {{{ apply("cat" :: files) }}}
*/
def apply(command: Seq[String]): ProcessBuilder = apply(command, None)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`,
* and a sequence of `String` representing the arguments.
*
* @example {{{ apply("cat", files) }}}
*/
def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command +: arguments, None)
/** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
* environment variables.
*
* @example {{{ apply("java", new java.io.File("/opt/app"), "CLASSPATH" -> "library.jar") }}}
*/
def apply(command: String, cwd: File, extraEnv: (String, String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv: _*)
/** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra
* environment variables.
*
* @example {{{ apply("java" :: javaArgs, new java.io.File("/opt/app"), "CLASSPATH" -> "library.jar") }}}
*/
def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder =
apply(command, Some(cwd), extraEnv: _*)
/** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
* `File` and extra environment variables.
*
* @example {{{ apply("java", params.get("cwd"), "CLASSPATH" -> "library.jar") }}}
*/
def apply(command: String, cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = {
apply(command.split("""\\s+"""), cwd, extraEnv : _*)
// not smart to use this on windows, because CommandParser uses \\ to escape ".
/*CommandParser.parse(command) match {
case Left(errorMsg) => error(errorMsg)
case Right((cmd, args)) => apply(cmd :: args, cwd, extraEnv : _*)
}*/
}
/** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to
* `File` and extra environment variables.
*
* @example {{{ apply("java" :: javaArgs, params.get("cwd"), "CLASSPATH" -> "library.jar") }}}
*/
def apply(command: Seq[String], cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = {
val jpb = new JProcessBuilder(command.toArray: _*)
cwd foreach (jpb directory _)
extraEnv foreach { case (k, v) => jpb.environment.put(k, v) }
apply(jpb)
}
/** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`.
*
* @example {{{
* apply((new java.lang.ProcessBuilder("ls", "-l")) directory new java.io.File(System.getProperty("user.home")))
* }}}
*/
def apply(builder: JProcessBuilder): ProcessBuilder = new Simple(builder)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This
* `ProcessBuilder` can then be used as a `Source` or a `Sink`, so one can
* pipe things from and to it.
*/
def apply(file: File): FileBuilder = new FileImpl(file)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This
* `ProcessBuilder` can then be used as a `Source`, so that one can pipe things
* from it.
*/
def apply(url: URL): URLBuilder = new URLImpl(url)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be
* to force an exit value.
*/
def apply(value: Boolean): ProcessBuilder = apply(value.toString, if (value) 0 else 1)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a `String` name and a
* `Boolean`. This can be used to force an exit value, with the name being
* used for `toString`.
*/
def apply(name: String, exitValue: => Int): ProcessBuilder = new Dummy(name, exitValue)
/** Creates a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of
* something else for which there's an implicit conversion to `Source`.
*/
def applySeq[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = builders.map(convert)
/** Creates a [[scala.sys.process.ProcessBuilder]] from one or more
* [[scala.sys.process.ProcessBuilder.Source]], which can then be
* piped to something else.
*
* This will concatenate the output of all sources. For example:
*
* {{{
* import scala.sys.process._
* import scala.sys.process.Process.cat
* import java.net.URL
* import java.io.File
*
* val spde = new URL("http://technically.us/spde.html")
* val dispatch = new URL("http://dispatch.databinder.net/Dispatch.html")
* val build = new File("project/build.properties")
* cat(spde, dispatch, build) #| "grep -i scala" !
* }}}
*/
def cat(file: Source, files: Source*): ProcessBuilder = cat(file +: files)
/** Creates a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence
* of [[scala.sys.process.ProcessBuilder.Source]], which can then be
* piped to something else.
*
* This will concatenate the output of all sources.
*/
def cat(files: Seq[Source]): ProcessBuilder = {
require(files.nonEmpty)
files map (_.cat) reduceLeft (_ #&& _)
}
}
/** Provide implicit conversions for the factories offered by [[scala.sys.process.Process]]'s
* companion object. These implicits can then be used to decrease the noise in a pipeline
* of commands, making it look more shell-like. They are available through the package object
* [[scala.sys.process]].
*/
trait ProcessImplicits {
import Process._
/** Return a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence
* of values for which an implicit conversion to `Source` is available.
*/
implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = applySeq(builders)
/** Implicitly convert a `java.lang.ProcessBuilder` into a Scala one. */
implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder)
/** Implicitly convert a `java.io.File` into a
* [[scala.sys.process.ProcessBuilder.FileBuilder]], which can be used as
* either input or output of a process. For example:
* {{{
* import scala.sys.process._
* "ls" #> new java.io.File("dirContents.txt") !
* }}}
*/
implicit def fileToProcess(file: File): FileBuilder = apply(file)
/** Implicitly convert a `java.net.URL` into a
* [[scala.sys.process.ProcessBuilder.URLBuilder]] , which can be used as
* input to a process. For example:
* {{{
* import scala.sys.process._
* Seq("xmllint", "--html", "-") #< new java.net.URL("http://www.scala-lang.org") #> new java.io.File("fixed.html") !
* }}}
*/
implicit def urlToProcess(url: URL): URLBuilder = apply(url)
/** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]]. */
implicit def stringToProcess(command: String): ProcessBuilder = apply(command)
/** Implicitly convert a sequence of `String` into a
* [[scala.sys.process.ProcessBuilder]]. The first argument will be taken to
* be the command to be executed, and the remaining will be its arguments.
* When using this, arguments may contain spaces.
*/
implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command)
}
| felixmulder/scala | src/library/scala/sys/process/Process.scala | Scala | bsd-3-clause | 9,617 |
package ucesoft.cbm.misc
import java.awt.{Container, GridBagConstraints, GridBagLayout, Insets}
import java.io.File
import java.util.Properties
import javax.swing._
import javax.swing.event.{DocumentEvent, DocumentListener}
import ucesoft.cbm.c128.FunctionROMType
class ROMPanel(prop:Properties,c64Only:Boolean,scpu:Boolean = false) extends JPanel {
import ucesoft.cbm.cpu.ROM._
private val C64 = 1
private val C128 = 2
private val DRIVE = 4
private val C128_I_F_ROM = 8
private val C128_E_F_ROM = 16
private val SCPU = 32
private case class ROM(label:String,propName:String,romType:Int,var path:Option[String] = None,var item:Option[String] = None) {
def apply(prop:Properties): Unit = {
val value = if (!path.isDefined || path.get == "") "" else {
if (item.isDefined) path.get + "," + item.get else path.get
}
prop.setProperty(propName,value)
ucesoft.cbm.cpu.ROM.reload(propName)
}
}
private val romList = List(ROM("SCPU ROM",SCPU64_ROM_PROP,SCPU),
ROM("C64 Kernal",C64_KERNAL_ROM_PROP,C64),
ROM("C64 Basic",C64_BASIC_ROM_PROP,C64),
ROM("C64 Char",C64_CHAR_ROM_PROP,C64|SCPU),
ROM("C128 Kernal",C128_KERNAL_ROM_PROP,C128),
ROM("C128 Basic",C128_BASIC_ROM_PROP,C128),
ROM("C128 Char",C128_CHAR_ROM_PROP,C128),
ROM("C128 Internal Function",C128_INTERNAL_ROM_PROP,C128_I_F_ROM),
ROM("C128 External Function",C128_EXTERNAL_ROM_PROP,C128_E_F_ROM),
ROM("Drive 1541 Kernal",D1541_DOS_ROM_PROP,DRIVE),
ROM("Drive 1571 Kernal",D1571_DOS_ROM_PROP,DRIVE),
ROM("Drive 1581 Kernal",D1581_DOS_ROM_PROP,DRIVE))
private val romMap : Map[String,ROM] = romList filter { r =>
if (scpu) (r.romType & SCPU) == SCPU || r.romType == DRIVE
else if (c64Only) r.romType == C64 || r.romType == DRIVE
else true
} map { r => r.propName -> r } toMap
private var lastDir = "./"
def applyUpdates : Unit = {
for(rom <- romList) {
rom.apply(prop)
}
}
private def makePanel(name:String,romType:Int) : JPanel = {
val roms = romList filter { r => (r.romType & romType) > 0 }
val p = new JPanel(new GridBagLayout)
p.setBorder(BorderFactory.createTitledBorder(name))
for((rom,y) <- roms.zipWithIndex) {
val tf = new JTextField(30)
val cb = new JCheckBox(if (rom.romType == C128_I_F_ROM || rom.romType == C128_E_F_ROM) "none" else "default")
val button = new JButton("Browse ...")
cb.setSelected(!rom.path.isDefined)
tf.setEnabled(rom.path.isDefined)
tf.setText(rom.path.getOrElse(""))
tf.getDocument.addDocumentListener(new DocumentListener {
override def removeUpdate(e: DocumentEvent): Unit = rom.path = Some(tf.getText)
override def insertUpdate(e: DocumentEvent): Unit = rom.path = Some(tf.getText)
override def changedUpdate(e: DocumentEvent): Unit = rom.path = Some(tf.getText)
})
button.setEnabled(rom.path.isDefined)
cb.addActionListener(_ => {
tf.setEnabled(!cb.isSelected)
button.setEnabled(!cb.isSelected)
tf.setText("")
})
button.addActionListener(_ => {
val fc = new JFileChooser(if (tf.getText.isEmpty) lastDir else new File(tf.getText).getParent)
fc.setDialogTitle("Choose ROM path")
fc.showOpenDialog(p) match {
case JFileChooser.APPROVE_OPTION =>
tf.setText(fc.getSelectedFile.toString)
tf.setToolTipText(fc.getSelectedFile.toString)
lastDir = fc.getSelectedFile.toString
case _ =>
}
})
add(p,0,y,new JLabel(rom.label))
if (rom.romType == C128_I_F_ROM) {
val p1 = new JPanel
p1.setLayout(new BoxLayout(p1,BoxLayout.Y_AXIS))
p1.add(cb)
val combo = new JComboBox(Array(FunctionROMType.NORMAL.toString,FunctionROMType.MEGABIT.toString))
combo.addActionListener(_ => rom.item = Some(FunctionROMType.withName(combo.getSelectedItem.toString).toString) )
rom.item match {
case Some(rt) if rt == FunctionROMType.NORMAL.toString =>
combo.setSelectedIndex(0)
case Some(rt) if rt == FunctionROMType.MEGABIT.toString =>
combo.setSelectedIndex(1)
case None =>
combo.setEnabled(false)
}
p1.add(combo)
cb.addActionListener(_ => {
combo.setEnabled(!cb.isSelected)
combo.setSelectedIndex(0)
})
add(p,1,y,p1)
}
else add(p,1,y,cb)
add(p,2,y,tf)
add(p,3,y,button)
}
p
}
private def add(p:Container,x:Int,y:Int,comp:JComponent): Unit = {
val c = new GridBagConstraints
c.insets = new Insets(5,5,5,5)
c.gridx = x
c.gridy = y
c.fill = GridBagConstraints.NONE
c.gridwidth = 1
c.gridheight = 1
p.add(comp,c)
}
import scala.jdk.CollectionConverters._
for(kv <- prop.asScala) {
romMap get kv._1 match {
case Some(rom) if !kv._2.isEmpty =>
if (rom.romType == C128_I_F_ROM) {
kv._2.split(",") match {
case Array(p,t) =>
rom.path = Some(p)
rom.item = Some(t)
case _ =>
rom.path = Some(kv._2)
rom.item = None
}
}
else rom.path = Some(kv._2)
case _ =>
}
}
setLayout(new BoxLayout(this,BoxLayout.Y_AXIS))
if (!scpu) add(makePanel("Commodore 64",C64))
else add(makePanel("SCPU",SCPU))
if (!c64Only) add(makePanel("Commodore 128",C128 | C128_I_F_ROM | C128_E_F_ROM))
add(makePanel("Drives",DRIVE))
}
object ROMPanel {
def showROMPanel(parent:JFrame,prop:Properties,c64Only:Boolean,scpu:Boolean = false,applyCallBack : () => Unit) = {
val f = new JDialog(parent,"System ROMs",true)
f.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
f.setLocationRelativeTo(parent)
val contentPane = f.getContentPane
val romPanel = new ROMPanel(prop,c64Only,scpu)
contentPane.add("Center",romPanel)
val buttonPanel = new JPanel
val okB = new JButton("Apply")
okB.setToolTipText("Apply changes")
val cancelB = new JButton("Cancel")
cancelB.addActionListener(_ => f.dispose )
okB.addActionListener(_ => {
romPanel.applyUpdates
f.dispose
applyCallBack()
})
buttonPanel.add(okB)
buttonPanel.add(cancelB)
contentPane.add("South",buttonPanel)
f.pack()
f.setVisible(true)
}
}
| abbruzze/kernal64 | Kernal64/src/ucesoft/cbm/misc/ROMPanel.scala | Scala | mit | 6,680 |
/*
* Hard-coded IR for java.lang.Object.
*/
import org.scalajs.core.ir
import ir._
import ir.Definitions._
import ir.Infos._
import ir.Trees._
import ir.Types._
import ir.Position.NoPosition
/** Hard-coded IR for java.lang.Object.
* We cannot so much as begin to fake a compilation of java.lang.Object,
* because Object is hijacked so much by scalac itself that it does not like
* at all to try to compile that class. So we have to bypass entirely the
* compiler to define java.lang.Object.
*/
object JavaLangObject {
val InfoAndTree = {
implicit val DummyPos = NoPosition
// ClassType(Object) is normally invalid, but not in this class def
val ThisType = ClassType(ObjectClass)
val classDef = ClassDef(
Ident("O", Some("java.lang.Object")),
ClassKind.Class,
None,
Nil,
None,
List(
/* def this() = () */
MethodDef(
static = false,
Ident("init___", Some("<init>")),
Nil,
NoType,
Skip())(OptimizerHints.empty, None),
/* def getClass(): java.lang.Class[_] = <getclass>(this) */
MethodDef(
static = false,
Ident("getClass__jl_Class", Some("getClass__jl_Class")),
Nil,
ClassType(ClassClass),
{
GetClass(This()(ThisType))
})(OptimizerHints.empty.withInline(true), None),
/* def hashCode(): Int = System.identityHashCode(this) */
MethodDef(
static = false,
Ident("hashCode__I", Some("hashCode__I")),
Nil,
IntType,
{
Apply(
LoadModule(ClassType("jl_System$")),
Ident("identityHashCode__O__I", Some("identityHashCode")),
List(This()(ThisType)))(IntType)
})(OptimizerHints.empty, None),
/* def equals(that: Object): Boolean = this eq that */
MethodDef(
static = false,
Ident("equals__O__Z", Some("equals__O__Z")),
List(ParamDef(Ident("that", Some("that")), AnyType,
mutable = false, rest = false)),
BooleanType,
{
BinaryOp(BinaryOp.===,
This()(ThisType),
VarRef(Ident("that", Some("that")))(AnyType))
})(OptimizerHints.empty.withInline(true), None),
/* protected def clone(): Object =
* if (this.isInstanceOf[Cloneable]) <clone>(this)
* else throw new CloneNotSupportedException()
*/
MethodDef(
static = false,
Ident("clone__O", Some("clone__O")),
Nil,
AnyType,
{
If(IsInstanceOf(This()(ThisType), ClassType("jl_Cloneable")), {
Apply(LoadModule(ClassType("sjsr_package$")),
Ident("cloneObject__sjs_js_Object__sjs_js_Object", Some("cloneObject")),
List(This()(ThisType)))(AnyType)
}, {
Throw(New(ClassType("jl_CloneNotSupportedException"),
Ident("init___", Some("<init>")), Nil))
})(AnyType)
})(OptimizerHints.empty.withInline(true), None),
/* def toString(): String =
* getClass().getName() + "@" + Integer.toHexString(hashCode())
*/
MethodDef(
static = false,
Ident("toString__T", Some("toString__T")),
Nil,
ClassType(StringClass),
{
BinaryOp(BinaryOp.String_+, BinaryOp(BinaryOp.String_+,
Apply(
Apply(This()(ThisType),
Ident("getClass__jl_Class", Some("getClass__jl_Class")), Nil)(
ClassType(ClassClass)),
Ident("getName__T"), Nil)(ClassType(StringClass)),
// +
StringLiteral("@")),
// +
Apply(
LoadModule(ClassType("jl_Integer$")),
Ident("toHexString__I__T"),
List(Apply(This()(ThisType), Ident("hashCode__I"), Nil)(IntType)))(
ClassType(StringClass)))
})(OptimizerHints.empty, None),
/* Since wait() is not supported in any way, a correct implementation
* of notify() and notifyAll() is to do nothing.
*/
/* def notify(): Unit = () */
MethodDef(
static = false,
Ident("notify__V", Some("notify__V")),
Nil,
NoType,
Skip())(OptimizerHints.empty, None),
/* def notifyAll(): Unit = () */
MethodDef(
static = false,
Ident("notifyAll__V", Some("notifyAll__V")),
Nil,
NoType,
Skip())(OptimizerHints.empty, None),
/* def finalize(): Unit = () */
MethodDef(
static = false,
Ident("finalize__V", Some("finalize__V")),
Nil,
NoType,
Skip())(OptimizerHints.empty, None),
// Exports
/* JSExport for toString(). */
MethodDef(
static = false,
StringLiteral("toString"),
Nil,
AnyType,
{
Apply(This()(ThisType),
Ident("toString__T", Some("toString__T")),
Nil)(ClassType(StringClass))
})(OptimizerHints.empty, None)
))(OptimizerHints.empty)
val hashedClassedDef = Hashers.hashClassDef(classDef)
val info = generateClassInfo(hashedClassedDef)
(info, hashedClassedDef)
}
}
| japgolly/scala-js | project/JavaLangObject.scala | Scala | bsd-3-clause | 5,448 |
package com.scala.bala.ftp
import java.io.DataInputStream
import java.io.File
import java.io.FileWriter
import java.io.IOException
import java.io.PrintStream
import java.net.Socket
import scala.util.control.Exception.catching
import com.scala.bala.ftp.exception.BException
import com.scala.bala.ftp.util.FTPConfigurationReader
import com.scala.bala.ftp.util.FTPUtil
class FTPClient extends BException{
private var ftpIn:DataInputStream = null;
private var ftpOut:PrintStream = null;
private var transferMode = TransferType.BINARY
private var code:Int = -1;
private var serverConnection = false;
private var loggedin = false;
private var isPasv = false;
private var socket:Socket = null;
private var remoteHost:String = null;
private var port:Int = 0;
def createDataSocketPASV = {
doCmd("PASV\\n")
val pasv = ftpIn.readLine();
val code = FTPUtil.getCode(pasv)
if(code == 425 ) getConnectionException(pasv)
if(code == 227 ) {
val ipAddressPort = FTPUtil.getIPAddressFromPASV(pasv)
remoteHost = ipAddressPort._1
port = ipAddressPort._2
socket = new PassiveDataSocket(remoteHost, port ).socket
}
}
def initgetFile(credential:Array[String], localPath:String, remotefile: String) = {
login(credential(0), credential(1), credential(2))
val remoteLocation = getCurrentLocation+"/"
val serverLogLocation=remoteLocation + FTPConfigurationReader.folderLocation
changeDirectory( serverLogLocation)
setTransferMode(TransferType.BINARY)
createDataSocketPASV
doCmd("RETR "+serverLogLocation+"/"+remotefile+"\\r\\n")
val joop = scala.io.Source.fromInputStream(socket.getInputStream())
val serverFolder = new File(localPath+socket.getInetAddress())
serverFolder.mkdir()
val writer = new FileWriter(serverFolder.getAbsolutePath()+"\\\\"+remotefile)
try{
joop.foreach(writer.write(_))
}finally{
writer.flush
writer.close
println("Done writing ..." + remotefile)
}
logout
}
private def connectToServer(serverIp:String) = {
serverConnection = false;
val connection = new OpenConnection;
val socket = connection.openConnectionWithServer(serverIp)
ftpIn = new DataInputStream(socket.getInputStream());
ftpOut = new PrintStream (socket.getOutputStream());
var code:Int = readLine
if(code != 220 ) getConnectionException
serverConnection = true
}
def login(serverIp:String, userName:String, password:String )= {
println("Connecting to.... "+ serverIp + " " +userName + " "+ password );
connectToServer(serverIp)
if(serverConnection){
doCmd("USER "+userName+"\\n")
code = readLine
if(code != 331 ) getLoginExcepion(code)
doCmd("PASS "+password+"\\n")
code = readLine
if(code != 230 ) getLoginExcepion(code)
loggedin = true;
doCmd("PWD\\n")
readLine
}else{
println("Server connection was/is not successful")
}
}
def getCurrentLocation = {
doCmd("PWD\\n")
FTPUtil.currentPath(ftpIn.readLine())
}
def changeDirectory(logLocation:String) = {
doCmd("CWD "+logLocation+"\\n")
code = readLine
if(code != 250 && code != 200) getConnectionException("Couldn't change dir to "+ logLocation)
}
def logout = {
doCmd("QUIT\\n")
readLine
loggedin = false;
}
def disConnectServer = {
ftpIn.close()
ftpOut.close()
serverConnection = false
}
def setTransferMode(mode:TransferType.Value) = mode match {
case TransferType.BINARY => changeMode("TYPE I\\n");
case TransferType.ASCII => changeMode("TYPE A\\n");
case TransferType.EBCDIC => changeMode("TYPE E\\n");
case TransferType.LOCAL => changeMode("TYPE L\\n");
}
private def changeMode(mode:String) = {
doCmd(mode);
readLine;
}
private def readLine:Int = {
FTPUtil.getCode( ftpIn.readLine() )
}
def doCmd(cmd:String) = {
println( cmd)
ftpOut.print(cmd)
}
def hardOut = {
catching(classOf[IOException]).apply({
ftpOut.close();
ftpIn.close();
})
}
def getFile(){
setTransferMode(transferMode)
if(readLine != 200) getConnectionException("Could not change xfer")
}
} | bbalajisg/scala-projects | ftp-client/src/main/scala/com/scala/bala/ftp/FTPClient.scala | Scala | gpl-2.0 | 4,510 |
package java.lang
import java.util.Arrays
class Character(val charValue: scala.Char)
extends AnyRef
with java.io.Serializable
with Comparable[Character] {
@inline override def equals(that: Any): scala.Boolean =
that match {
case that: Character =>
charValue == that.charValue
case _ =>
false
}
@inline override def compareTo(that: Character): Int =
Character.compare(charValue, that.charValue)
@inline override def toString(): String =
Character.toString(charValue)
@inline override def hashCode(): Int =
Character.hashCode(charValue)
}
object Character {
final val TYPE = classOf[scala.Char]
final val MIN_VALUE = '\u0000'
final val MAX_VALUE = '\uffff'
final val SIZE = 16
final val BYTES = 2
/* These are supposed to be final vals of type Byte, but that's not possible.
* So we implement them as def's, which are binary compatible with final vals.
*/
@inline def UNASSIGNED: scala.Byte = 0
@inline def UPPERCASE_LETTER: scala.Byte = 1
@inline def LOWERCASE_LETTER: scala.Byte = 2
@inline def TITLECASE_LETTER: scala.Byte = 3
@inline def MODIFIER_LETTER: scala.Byte = 4
@inline def OTHER_LETTER: scala.Byte = 5
@inline def NON_SPACING_MARK: scala.Byte = 6
@inline def ENCLOSING_MARK: scala.Byte = 7
@inline def COMBINING_SPACING_MARK: scala.Byte = 8
@inline def DECIMAL_DIGIT_NUMBER: scala.Byte = 9
@inline def LETTER_NUMBER: scala.Byte = 10
@inline def OTHER_NUMBER: scala.Byte = 11
@inline def SPACE_SEPARATOR: scala.Byte = 12
@inline def LINE_SEPARATOR: scala.Byte = 13
@inline def PARAGRAPH_SEPARATOR: scala.Byte = 14
@inline def CONTROL: scala.Byte = 15
@inline def FORMAT: scala.Byte = 16
@inline def PRIVATE_USE: scala.Byte = 18
@inline def SURROGATE: scala.Byte = 19
@inline def DASH_PUNCTUATION: scala.Byte = 20
@inline def START_PUNCTUATION: scala.Byte = 21
@inline def END_PUNCTUATION: scala.Byte = 22
@inline def CONNECTOR_PUNCTUATION: scala.Byte = 23
@inline def OTHER_PUNCTUATION: scala.Byte = 24
@inline def MATH_SYMBOL: scala.Byte = 25
@inline def CURRENCY_SYMBOL: scala.Byte = 26
@inline def MODIFIER_SYMBOL: scala.Byte = 27
@inline def OTHER_SYMBOL: scala.Byte = 28
@inline def INITIAL_QUOTE_PUNCTUATION: scala.Byte = 29
@inline def FINAL_QUOTE_PUNCTUATION: scala.Byte = 30
final val MIN_RADIX = 2
final val MAX_RADIX = 36
final val MIN_HIGH_SURROGATE = '\uD800'
final val MAX_HIGH_SURROGATE = '\uDBFF'
final val MIN_LOW_SURROGATE = '\uDC00'
final val MAX_LOW_SURROGATE = '\uDFFF'
final val MIN_SURROGATE = MIN_HIGH_SURROGATE
final val MAX_SURROGATE = MAX_LOW_SURROGATE
final val MIN_CODE_POINT = 0
final val MAX_CODE_POINT = 0x10ffff
final val MIN_SUPPLEMENTARY_CODE_POINT = 0x10000
@inline def charCount(codePoint: Int): Int =
if (codePoint >= MIN_SUPPLEMENTARY_CODE_POINT) 2 else 1
def codePointAt(seq: Array[scala.Char],
_index: scala.Int,
limit: scala.Int): scala.Int = {
var index = _index
if (index < 0 || index >= limit || limit < 0 || limit > seq.length) {
throw new ArrayIndexOutOfBoundsException()
}
val high = seq(index)
index += 1
if (index >= limit) {
high
} else {
val low = seq(index)
if (isSurrogatePair(high, low))
toCodePoint(high, low)
else
high
}
}
def codePointBefore(seq: Array[scala.Char], _index: scala.Int): scala.Int = {
var index = _index
val len = seq.length
if (index < 1 || index > len) {
throw new ArrayIndexOutOfBoundsException(index)
}
index -= 1
val low = seq.charAt(index)
index -= 1
if (index < 0) {
low
} else {
val high = seq(index)
if (isSurrogatePair(high, low))
toCodePoint(high, low)
else
low
}
}
def codePointCount(seq: Array[scala.Char],
offset: scala.Int,
count: scala.Int): scala.Int = {
val len = seq.length
val endIndex = offset + count
if (offset < 0 || count < 0 || endIndex > len) {
throw new IndexOutOfBoundsException()
}
var result = 0
var i = offset
while (i <= endIndex) {
var c = seq(i)
if (isHighSurrogate(c)) {
i += 1
if (i < endIndex) {
c = seq(i)
if (!isLowSurrogate(c)) {
result += 1
}
}
}
result += 1
i += 1
}
result
}
def offsetByCodePoints(seq: Array[scala.Char],
start: scala.Int,
count: scala.Int,
index: scala.Int,
codePointOffset: scala.Int): scala.Int = {
val end = start + count
if (start < 0 || count < 0 || end > seq.length || index < start || index > end) {
throw new IndexOutOfBoundsException()
}
if (codePointOffset == 0) {
index
} else if (codePointOffset > 0) {
var codePoints = codePointOffset
var i = index
while (codePoints > 0) {
codePoints -= 1
if (i >= end) {
throw new IndexOutOfBoundsException()
}
if (isHighSurrogate(seq(i))) {
val next = i + 1
if (next <= end && isLowSurrogate(seq(next))) {
i += 1
}
}
i += 1
}
i
} else {
var codePoints = -codePointOffset
var i = index
while (codePoints > 0) {
codePoints -= 1
i -= 1
if (i < start) {
throw new IndexOutOfBoundsException()
}
if (isLowSurrogate(seq(i))) {
val prev = i - 1
if (prev >= start && isHighSurrogate(seq(prev))) {
i -= 1
}
}
}
i
}
}
def hashCode(value: scala.Char): scala.Int = value
def valueOf(charValue: scala.Char): Character = new Character(charValue)
def getType(ch: scala.Char): Int = getType(ch.toInt)
def getType(codePoint: Int): Int = {
if (codePoint < 0) UNASSIGNED.toInt
else if (codePoint < 256) getTypeLT256(codePoint)
else getTypeGE256(codePoint)
}
@inline
private[this] def getTypeLT256(codePoint: Int): scala.Byte =
charTypesFirst256(codePoint)
private[this] def getTypeGE256(codePoint: Int): scala.Byte = {
// the idx is increased by 1 due to the differences in indexing
// between charTypeIndices and charType
val idx = Arrays.binarySearch(charTypeIndices, codePoint) + 1
// in the case where idx is negative (-insertionPoint - 1)
charTypes(Math.abs(idx))
}
def digit(c: scala.Char, radix: Int): Int = {
if (radix > MAX_RADIX || radix < MIN_RADIX)
-1
else if (c >= '0' && c <= '9' && c - '0' < radix)
c - '0'
else if (c >= 'A' && c <= 'Z' && c - 'A' < radix - 10)
c - 'A' + 10
else if (c >= 'a' && c <= 'z' && c - 'a' < radix - 10)
c - 'a' + 10
else if (c >= '\uFF21' && c <= '\uFF3A' &&
c - '\uFF21' < radix - 10)
c - '\uFF21' + 10
else if (c >= '\uFF41' && c <= '\uFF5A' &&
c - '\uFF41' < radix - 10)
c - '\uFF21' + 10
else -1
}
// ported from https://github.com/gwtproject/gwt/blob/master/user/super/com/google/gwt/emul/java/lang/Character.java
def forDigit(digit: Int, radix: Int): Char = {
if (radix < MIN_RADIX || radix > MAX_RADIX || digit < 0 || digit >= radix) {
0
} else {
val overBaseTen = digit - 10
val result = if (overBaseTen < 0) '0' + digit else 'a' + overBaseTen
result.toChar
}
}
def isISOControl(c: scala.Char): scala.Boolean = isISOControl(c.toInt)
def isISOControl(codePoint: Int): scala.Boolean = {
(0x00 <= codePoint && codePoint <= 0x1F) || (0x7F <= codePoint && codePoint <= 0x9F)
}
@deprecated("Replaced by isWhitespace(char)", "")
def isSpace(c: scala.Char): scala.Boolean =
c == '\t' || c == '\n' || c == '\f' || c == '\r' || c == ' '
def isWhitespace(c: scala.Char): scala.Boolean =
isWhitespace(c.toInt)
def isWhitespace(codePoint: scala.Int): scala.Boolean = {
def isSeparator(tpe: Int): scala.Boolean =
tpe == SPACE_SEPARATOR || tpe == LINE_SEPARATOR || tpe == PARAGRAPH_SEPARATOR
if (codePoint < 256) {
codePoint == '\t' || codePoint == '\n' || codePoint == '\u000B' ||
codePoint == '\f' || codePoint == '\r' ||
('\u001C' <= codePoint && codePoint <= '\u001F') ||
(codePoint != '\u00A0' && isSeparator(getTypeLT256(codePoint)))
} else {
(codePoint != '\u2007' && codePoint != '\u202F') &&
isSeparator(getTypeGE256(codePoint))
}
}
def isSpaceChar(ch: scala.Char): scala.Boolean =
isSpaceChar(ch.toInt)
def isSpaceChar(codePoint: Int): scala.Boolean =
isSpaceCharImpl(getType(codePoint))
@inline private[this] def isSpaceCharImpl(tpe: Int): scala.Boolean =
tpe == SPACE_SEPARATOR || tpe == LINE_SEPARATOR || tpe == PARAGRAPH_SEPARATOR
// --- UTF-16 surrogate pairs handling ---
// See http://en.wikipedia.org/wiki/UTF-16
private final val HighSurrogateMask = 0xfc00 // 111111 00 00000000
private final val HighSurrogateID = 0xd800 // 110110 00 00000000
private final val LowSurrogateMask = 0xfc00 // 111111 00 00000000
private final val LowSurrogateID = 0xdc00 // 110111 00 00000000
private final val SurrogateUsefulPartMask = 0x03ff // 000000 11 11111111
@inline def isHighSurrogate(c: scala.Char): scala.Boolean =
(c & HighSurrogateMask) == HighSurrogateID
@inline def isLowSurrogate(c: scala.Char): scala.Boolean =
(c & LowSurrogateMask) == LowSurrogateID
@inline
def isSurrogatePair(high: scala.Char, low: scala.Char): scala.Boolean =
isHighSurrogate(high) && isLowSurrogate(low)
@inline def toCodePoint(high: scala.Char, low: scala.Char): Int =
((high & SurrogateUsefulPartMask) << 10) + (low & SurrogateUsefulPartMask) + 0x10000
// --- End of UTF-16 surrogate pairs handling ---
def isLowerCase(c: scala.Char): scala.Boolean =
isLowerCase(c.toInt)
def isLowerCase(c: Int): scala.Boolean = {
if (c < 256)
c == '\u00AA' || c == '\u00BA' || getTypeLT256(c) == LOWERCASE_LETTER
else
isLowerCaseGE256(c)
}
private[this] def isLowerCaseGE256(c: Int): scala.Boolean = {
('\u02B0' <= c && c <= '\u02B8') || ('\u02C0' <= c && c <= '\u02C1') ||
('\u02E0' <= c && c <= '\u02E4') || c == '\u0345' || c == '\u037A' ||
('\u1D2C' <= c && c <= '\u1D6A') || c == '\u1D78' ||
('\u1D9B' <= c && c <= '\u1DBF') || c == '\u2071' || c == '\u207F' ||
('\u2090' <= c && c <= '\u209C') || ('\u2170' <= c && c <= '\u217F') ||
('\u24D0' <= c && c <= '\u24E9') || ('\u2C7C' <= c && c <= '\u2C7D') ||
c == '\uA770' || ('\uA7F8' <= c && c <= '\uA7F9') ||
getTypeGE256(c) == LOWERCASE_LETTER
}
def isUpperCase(c: scala.Char): scala.Boolean =
isUpperCase(c.toInt)
def isUpperCase(c: Int): scala.Boolean = {
('\u2160' <= c && c <= '\u216F') || ('\u24B6' <= c && c <= '\u24CF') ||
getType(c) == UPPERCASE_LETTER
}
@inline def isValidCodePoint(codePoint: Int): scala.Boolean =
codePoint >= MIN_CODE_POINT && codePoint <= MAX_CODE_POINT
@inline def isBmpCodePoint(codePoint: Int): scala.Boolean =
codePoint >= MIN_VALUE && codePoint <= MAX_VALUE
@inline def isSupplementaryCodePoint(codePoint: Int): scala.Boolean =
codePoint >= MIN_SUPPLEMENTARY_CODE_POINT && codePoint <= MAX_CODE_POINT
def isTitleCase(c: scala.Char): scala.Boolean =
isTitleCase(c.toInt)
def isTitleCase(cp: Int): scala.Boolean =
if (cp < 256) false
else isTitleCaseImpl(getTypeGE256(cp))
@inline private[this] def isTitleCaseImpl(tpe: Int): scala.Boolean =
tpe == TITLECASE_LETTER
def isDigit(c: scala.Char): scala.Boolean =
isDigit(c.toInt)
def isDigit(cp: Int): scala.Boolean =
if (cp < 256) '0' <= cp && cp <= '9'
else isDigitImpl(getTypeGE256(cp))
@inline private[this] def isDigitImpl(tpe: Int): scala.Boolean =
tpe == DECIMAL_DIGIT_NUMBER
def isDefined(c: scala.Char): scala.Boolean =
isDefined(c.toInt)
def isDefined(c: scala.Int): scala.Boolean = {
if (c < 0) false
else if (c < 888) true
else getTypeGE256(c) != UNASSIGNED
}
def isLetter(c: scala.Char): scala.Boolean = isLetter(c.toInt)
def isLetter(cp: Int): scala.Boolean = isLetterImpl(getType(cp))
@inline private[this] def isLetterImpl(tpe: Int): scala.Boolean = {
tpe == UPPERCASE_LETTER || tpe == LOWERCASE_LETTER ||
tpe == TITLECASE_LETTER || tpe == MODIFIER_LETTER || tpe == OTHER_LETTER
}
def isLetterOrDigit(c: scala.Char): scala.Boolean =
isLetterOrDigit(c.toInt)
def isLetterOrDigit(cp: Int): scala.Boolean =
isLetterOrDigitImpl(getType(cp))
@inline private[this] def isLetterOrDigitImpl(tpe: Int): scala.Boolean =
isDigitImpl(tpe) || isLetterImpl(tpe)
def isJavaLetter(ch: scala.Char): scala.Boolean =
isJavaLetterImpl(getType(ch))
@inline private[this] def isJavaLetterImpl(tpe: Int): scala.Boolean = {
isLetterImpl(tpe) || tpe == LETTER_NUMBER || tpe == CURRENCY_SYMBOL ||
tpe == CONNECTOR_PUNCTUATION
}
def isJavaLetterOrDigit(ch: scala.Char): scala.Boolean =
isJavaLetterOrDigitImpl(ch, getType(ch))
@inline private[this] def isJavaLetterOrDigitImpl(
codePoint: Int,
tpe: Int): scala.Boolean = {
isJavaLetterImpl(tpe) || tpe == COMBINING_SPACING_MARK ||
tpe == NON_SPACING_MARK || isIdentifierIgnorableImpl(codePoint, tpe)
}
def isAlphabetic(codePoint: Int): scala.Boolean = {
val tpe = getType(codePoint)
tpe == UPPERCASE_LETTER || tpe == LOWERCASE_LETTER ||
tpe == TITLECASE_LETTER || tpe == MODIFIER_LETTER ||
tpe == OTHER_LETTER || tpe == LETTER_NUMBER
}
def isIdeographic(c: Int): scala.Boolean = {
(12294 <= c && c <= 12295) || (12321 <= c && c <= 12329) ||
(12344 <= c && c <= 12346) || (13312 <= c && c <= 19893) ||
(19968 <= c && c <= 40908) || (63744 <= c && c <= 64109) ||
(64112 <= c && c <= 64217) || (131072 <= c && c <= 173782) ||
(173824 <= c && c <= 177972) || (177984 <= c && c <= 178205) ||
(194560 <= c && c <= 195101)
}
def isJavaIdentifierStart(ch: scala.Char): scala.Boolean =
isJavaIdentifierStart(ch.toInt)
def isJavaIdentifierStart(codePoint: Int): scala.Boolean =
isJavaIdentifierStartImpl(getType(codePoint))
@inline
private[this] def isJavaIdentifierStartImpl(tpe: Int): scala.Boolean = {
isLetterImpl(tpe) || tpe == LETTER_NUMBER || tpe == CURRENCY_SYMBOL ||
tpe == CONNECTOR_PUNCTUATION
}
def isJavaIdentifierPart(ch: scala.Char): scala.Boolean =
isJavaIdentifierPart(ch.toInt)
def isJavaIdentifierPart(codePoint: Int): scala.Boolean =
isJavaIdentifierPartImpl(codePoint, getType(codePoint))
@inline private[this] def isJavaIdentifierPartImpl(
codePoint: Int,
tpe: Int): scala.Boolean = {
isLetterImpl(tpe) || tpe == CURRENCY_SYMBOL ||
tpe == CONNECTOR_PUNCTUATION || tpe == DECIMAL_DIGIT_NUMBER ||
tpe == LETTER_NUMBER || tpe == COMBINING_SPACING_MARK ||
tpe == NON_SPACING_MARK || isIdentifierIgnorableImpl(codePoint, tpe)
}
def isUnicodeIdentifierStart(ch: scala.Char): scala.Boolean =
isUnicodeIdentifierStart(ch.toInt)
def isUnicodeIdentifierStart(codePoint: Int): scala.Boolean =
isUnicodeIdentifierStartImpl(getType(codePoint))
@inline
private[this] def isUnicodeIdentifierStartImpl(tpe: Int): scala.Boolean =
isLetterImpl(tpe) || tpe == LETTER_NUMBER
def isUnicodeIdentifierPart(ch: scala.Char): scala.Boolean =
isUnicodeIdentifierPart(ch.toInt)
def isUnicodeIdentifierPart(codePoint: Int): scala.Boolean =
isUnicodeIdentifierPartImpl(codePoint, getType(codePoint))
def isUnicodeIdentifierPartImpl(codePoint: Int, tpe: Int): scala.Boolean = {
tpe == CONNECTOR_PUNCTUATION || tpe == DECIMAL_DIGIT_NUMBER ||
tpe == COMBINING_SPACING_MARK || tpe == NON_SPACING_MARK ||
isUnicodeIdentifierStartImpl(tpe) ||
isIdentifierIgnorableImpl(codePoint, tpe)
}
def isIdentifierIgnorable(c: scala.Char): scala.Boolean =
isIdentifierIgnorable(c.toInt)
def isIdentifierIgnorable(codePoint: Int): scala.Boolean =
isIdentifierIgnorableImpl(codePoint, getType(codePoint))
@inline private[this] def isIdentifierIgnorableImpl(
codePoint: Int,
tpe: Int): scala.Boolean = {
('\u0000' <= codePoint && codePoint <= '\u0008') ||
('\u000E' <= codePoint && codePoint <= '\u001B') ||
('\u007F' <= codePoint && codePoint <= '\u009F') ||
tpe == FORMAT
}
def isMirrored(c: scala.Char): scala.Boolean =
isMirrored(c.toInt)
def isMirrored(codePoint: Int): scala.Boolean = {
val idx = Arrays.binarySearch(isMirroredIndices, codePoint) + 1
(Math.abs(idx) & 1) != 0
}
/* Conversions */
def toUpperCase(c: scala.Char): scala.Char = c.toString.toUpperCase()(0)
def toLowerCase(c: scala.Char): scala.Char = c.toString.toLowerCase()(0)
def toChars(codePoint: Int): Array[Char] = {
if (!isValidCodePoint(codePoint))
throw new IllegalArgumentException()
if (isSupplementaryCodePoint(codePoint)) {
val dst = new Array[Char](2)
toSurrogate(codePoint, dst, 0)
dst
} else {
Array(codePoint.toChar)
}
}
def toChars(codePoint: Int, dst: Array[Char], dstIndex: Int): Int = {
if (!isValidCodePoint(codePoint))
throw new IllegalArgumentException()
if (isSupplementaryCodePoint(codePoint)) {
toSurrogate(codePoint, dst, dstIndex)
2
} else {
dst(dstIndex) = codePoint.toChar
1
}
}
@inline private[this] def toSurrogate(codePoint: Int,
dst: Array[Char],
dstIndex: Int): Unit = {
val cpPrime = codePoint - 0x10000
val high = 0xD800 | ((cpPrime >> 10) & 0x3FF)
val low = 0xDC00 | (cpPrime & 0x3FF)
dst(dstIndex) = high.toChar
dst(dstIndex + 1) = low.toChar
}
@inline def toString(c: scala.Char): String =
String.valueOf(c)
@inline def compare(x: scala.Char, y: scala.Char): Int =
x - y
// Based on Unicode 7.0.0
// Scalafmt doesn't like long integer arrays, so we turn
// it off for the arrays below.
//
// format: off
// Types of characters from 0 to 255
private[this] lazy val charTypesFirst256 = Array[scala.Byte](15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 24, 24, 24, 26, 24, 24, 24,
21, 22, 24, 25, 24, 20, 24, 24, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 24, 24, 25,
25, 25, 24, 24, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 21, 24, 22, 27, 23, 27, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 21, 25, 22, 25, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 24, 26, 26, 26,
26, 28, 24, 27, 28, 5, 29, 25, 16, 28, 27, 28, 25, 11, 11, 27, 2, 24, 24,
27, 11, 5, 30, 11, 11, 11, 24, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 25, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 25, 2, 2, 2, 2, 2, 2,
2, 2)
// Character type data by ranges of types
// charTypeIndices: contains the index where the range ends
// charType: contains the type of the carater in the range ends
// note that charTypeIndices.length + 1 = charType.length and that the
// range 0 to 255 is not included because it is contained in charTypesFirst256
// They where generated with the following script:
//
// val indicesAndTypes = (256 to Character.MAX_CODE_POINT)
// .map(i => (i, Character.getType(i)))
// .foldLeft[List[(Int, Int)]](Nil) {
// case (x :: xs, elem) if x._2 == elem._2 => x :: xs
// case (prevs, elem) => elem :: prevs
// }.reverse
// val charTypeIndices = indicesAndTypes.map(_._1).tail
// val charTypeIndicesDeltas = charTypeIndices.zip(0 :: charTypeIndices.init)
// .map(tup => tup._1 - tup._2)
// val charTypes = indicesAndTypes.map(_._2)
// println(charTypeIndicesDeltas.mkString(
// "charTypeIndices: val deltas = Array[Int](", ", ", ")"))
// println(charTypes.mkString("val charTypes = Array[scala.Byte](", ", ", ")"))
//
// format: off
private[this] lazy val charTypeIndices = {
val deltas = Array[Int](257, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 3, 2, 1, 1, 1, 2, 1, 3,
2, 4, 1, 2, 1, 3, 3, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1,
3, 1, 1, 1, 2, 2, 1, 1, 3, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 7, 2, 1, 2, 2, 1, 1, 4, 1, 1, 1, 1, 1, 1, 1, 1, 69, 1, 27, 18,
4, 12, 14, 5, 7, 1, 1, 1, 17, 112, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1,
5, 2, 1, 1, 3, 1, 1, 1, 2, 1, 17, 1, 9, 35, 1, 2, 3, 3, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 1, 1, 1,
1, 2, 2, 51, 48, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 2, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 38, 2, 1, 6, 1, 39, 1, 1,
1, 4, 1, 1, 45, 1, 1, 1, 2, 1, 2, 1, 1, 8, 27, 5, 3, 2, 11, 5, 1, 3,
2, 1, 2, 2, 11, 1, 2, 2, 32, 1, 10, 21, 10, 4, 2, 1, 99, 1, 1, 7, 1,
1, 6, 2, 2, 1, 4, 2, 10, 3, 2, 1, 14, 1, 1, 1, 1, 30, 27, 2, 89, 11,
1, 14, 10, 33, 9, 2, 1, 3, 1, 5, 22, 4, 1, 9, 1, 3, 1, 5, 2, 15, 1,
25, 3, 2, 1, 65, 1, 1, 11, 55, 27, 1, 3, 1, 54, 1, 1, 1, 1, 3, 8, 4,
1, 2, 1, 7, 10, 2, 2, 10, 1, 1, 6, 1, 7, 1, 1, 2, 1, 8, 2, 2, 2, 22,
1, 7, 1, 1, 3, 4, 2, 1, 1, 3, 4, 2, 2, 2, 2, 1, 1, 8, 1, 4, 2, 1, 3,
2, 2, 10, 2, 2, 6, 1, 1, 5, 2, 1, 1, 6, 4, 2, 2, 22, 1, 7, 1, 2, 1, 2,
1, 2, 2, 1, 1, 3, 2, 4, 2, 2, 3, 3, 1, 7, 4, 1, 1, 7, 10, 2, 3, 1, 11,
2, 1, 1, 9, 1, 3, 1, 22, 1, 7, 1, 2, 1, 5, 2, 1, 1, 3, 5, 1, 2, 1, 1,
2, 1, 2, 1, 15, 2, 2, 2, 10, 1, 1, 15, 1, 2, 1, 8, 2, 2, 2, 22, 1, 7,
1, 2, 1, 5, 2, 1, 1, 1, 1, 1, 4, 2, 2, 2, 2, 1, 8, 1, 1, 4, 2, 1, 3,
2, 2, 10, 1, 1, 6, 10, 1, 1, 1, 6, 3, 3, 1, 4, 3, 2, 1, 1, 1, 2, 3, 2,
3, 3, 3, 12, 4, 2, 1, 2, 3, 3, 1, 3, 1, 2, 1, 6, 1, 14, 10, 3, 6, 1,
1, 6, 3, 1, 8, 1, 3, 1, 23, 1, 10, 1, 5, 3, 1, 3, 4, 1, 3, 1, 4, 7, 2,
1, 2, 6, 2, 2, 2, 10, 8, 7, 1, 2, 2, 1, 8, 1, 3, 1, 23, 1, 10, 1, 5,
2, 1, 1, 1, 1, 5, 1, 1, 2, 1, 2, 2, 7, 2, 7, 1, 1, 2, 2, 2, 10, 1, 2,
15, 2, 1, 8, 1, 3, 1, 41, 2, 1, 3, 4, 1, 3, 1, 3, 1, 1, 8, 1, 8, 2, 2,
2, 10, 6, 3, 1, 6, 2, 2, 1, 18, 3, 24, 1, 9, 1, 1, 2, 7, 3, 1, 4, 3,
3, 1, 1, 1, 8, 18, 2, 1, 12, 48, 1, 2, 7, 4, 1, 6, 1, 8, 1, 10, 2, 37,
2, 1, 1, 2, 2, 1, 1, 2, 1, 6, 4, 1, 7, 1, 3, 1, 1, 1, 1, 2, 2, 1, 4,
1, 2, 6, 1, 2, 1, 2, 5, 1, 1, 1, 6, 2, 10, 2, 4, 32, 1, 3, 15, 1, 1,
3, 2, 6, 10, 10, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 8, 1, 36, 4, 14, 1,
5, 1, 2, 5, 11, 1, 36, 1, 8, 1, 6, 1, 2, 5, 4, 2, 37, 43, 2, 4, 1, 6,
1, 2, 2, 2, 1, 10, 6, 6, 2, 2, 4, 3, 1, 3, 2, 7, 3, 4, 13, 1, 2, 2, 6,
1, 1, 1, 10, 3, 1, 2, 38, 1, 1, 5, 1, 2, 43, 1, 1, 332, 1, 4, 2, 7, 1,
1, 1, 4, 2, 41, 1, 4, 2, 33, 1, 4, 2, 7, 1, 1, 1, 4, 2, 15, 1, 57, 1,
4, 2, 67, 2, 3, 9, 20, 3, 16, 10, 6, 85, 11, 1, 620, 2, 17, 1, 26, 1,
1, 3, 75, 3, 3, 15, 13, 1, 4, 3, 11, 18, 3, 2, 9, 18, 2, 12, 13, 1, 3,
1, 2, 12, 52, 2, 1, 7, 8, 1, 2, 11, 3, 1, 3, 1, 1, 1, 2, 10, 6, 10, 6,
6, 1, 4, 3, 1, 1, 10, 6, 35, 1, 52, 8, 41, 1, 1, 5, 70, 10, 29, 3, 3,
4, 2, 3, 4, 2, 1, 6, 3, 4, 1, 3, 2, 10, 30, 2, 5, 11, 44, 4, 17, 7, 2,
6, 10, 1, 3, 34, 23, 2, 3, 2, 2, 53, 1, 1, 1, 7, 1, 1, 1, 1, 2, 8, 6,
10, 2, 1, 10, 6, 10, 6, 7, 1, 6, 82, 4, 1, 47, 1, 1, 5, 1, 1, 5, 1, 2,
7, 4, 10, 7, 10, 9, 9, 3, 2, 1, 30, 1, 4, 2, 2, 1, 1, 2, 2, 10, 44, 1,
1, 2, 3, 1, 1, 3, 2, 8, 4, 36, 8, 8, 2, 2, 3, 5, 10, 3, 3, 10, 30, 6,
2, 64, 8, 8, 3, 1, 13, 1, 7, 4, 1, 4, 2, 1, 2, 9, 44, 63, 13, 1, 34,
37, 39, 21, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9,
8, 6, 2, 6, 2, 8, 8, 8, 8, 6, 2, 6, 2, 8, 1, 1, 1, 1, 1, 1, 1, 1, 8,
8, 14, 2, 8, 8, 8, 8, 8, 8, 5, 1, 2, 4, 1, 1, 1, 3, 3, 1, 2, 4, 1, 3,
4, 2, 2, 4, 1, 3, 8, 5, 3, 2, 3, 1, 2, 4, 1, 2, 1, 11, 5, 6, 2, 1, 1,
1, 2, 1, 1, 1, 8, 1, 1, 5, 1, 9, 1, 1, 4, 2, 3, 1, 1, 1, 11, 1, 1, 1,
10, 1, 5, 5, 6, 1, 1, 2, 6, 3, 1, 1, 1, 10, 3, 1, 1, 1, 13, 3, 27, 21,
13, 4, 1, 3, 12, 15, 2, 1, 4, 1, 2, 1, 3, 2, 3, 1, 1, 1, 2, 1, 5, 6,
1, 1, 1, 1, 1, 1, 4, 1, 1, 4, 1, 4, 1, 2, 2, 2, 5, 1, 4, 1, 1, 2, 1,
1, 16, 35, 1, 1, 4, 1, 6, 5, 5, 2, 4, 1, 2, 1, 2, 1, 7, 1, 31, 2, 2,
1, 1, 1, 31, 268, 8, 4, 20, 2, 7, 1, 1, 81, 1, 30, 25, 40, 6, 18, 12,
39, 25, 11, 21, 60, 78, 22, 183, 1, 9, 1, 54, 8, 111, 1, 144, 1, 103,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 30, 44, 5, 1, 1, 31, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 16, 256, 131, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 63, 1, 1, 1, 1, 32, 1, 1, 258, 48,
21, 2, 6, 3, 10, 166, 47, 1, 47, 1, 1, 1, 3, 2, 1, 1, 1, 1, 1, 1, 4,
1, 1, 2, 1, 6, 2, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 1, 1, 1, 3, 1, 1, 5,
4, 1, 2, 38, 1, 1, 5, 1, 2, 56, 7, 1, 1, 14, 1, 23, 9, 7, 1, 7, 1, 7,
1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 32, 2, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1,
9, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 10, 2, 68,
26, 1, 89, 12, 214, 26, 12, 4, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 9, 4, 2, 1, 5, 2, 3,
1, 1, 1, 2, 1, 86, 2, 2, 2, 2, 1, 1, 90, 1, 3, 1, 5, 41, 3, 94, 1, 2,
4, 10, 27, 5, 36, 12, 16, 31, 1, 10, 30, 8, 1, 15, 32, 10, 39, 15, 63,
1, 256, 6582, 10, 64, 20941, 51, 21, 1, 1143, 3, 55, 9, 40, 6, 2, 268,
1, 3, 16, 10, 2, 20, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 10, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 7, 1, 70, 10, 2, 6, 8,
23, 9, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 8, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 77, 2, 1, 7, 1, 3, 1, 4, 1, 23, 2, 2, 1, 4, 4, 6, 2, 1, 1, 6,
52, 4, 8, 2, 50, 16, 1, 9, 2, 10, 6, 18, 6, 3, 1, 4, 10, 28, 8, 2, 23,
11, 2, 11, 1, 29, 3, 3, 1, 47, 1, 2, 4, 2, 1, 4, 13, 1, 1, 10, 4, 2,
32, 41, 6, 2, 2, 2, 2, 9, 3, 1, 8, 1, 1, 2, 10, 2, 4, 16, 1, 6, 3, 1,
1, 4, 48, 1, 1, 3, 2, 2, 5, 2, 1, 1, 1, 24, 2, 1, 2, 11, 1, 2, 2, 2,
1, 2, 1, 1, 10, 6, 2, 6, 2, 6, 9, 7, 1, 7, 145, 35, 2, 1, 2, 1, 2, 1,
1, 1, 2, 10, 6, 11172, 12, 23, 4, 49, 4, 2048, 6400, 366, 2, 106, 38,
7, 12, 5, 5, 1, 1, 10, 1, 13, 1, 5, 1, 1, 1, 2, 1, 2, 1, 108, 16, 17,
363, 1, 1, 16, 64, 2, 54, 40, 12, 1, 1, 2, 16, 7, 1, 1, 1, 6, 7, 9, 1,
2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 4, 3,
3, 1, 4, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 3, 1, 1, 1, 2, 4, 5, 1, 135, 2,
1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 2, 10, 2, 3, 2, 26, 1, 1, 1, 1, 1, 1,
26, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 10, 1, 45, 2, 31, 3, 6, 2, 6, 2, 6,
2, 3, 3, 2, 1, 1, 1, 2, 1, 1, 4, 2, 10, 3, 2, 2, 12, 1, 26, 1, 19, 1,
2, 1, 15, 2, 14, 34, 123, 5, 3, 4, 45, 3, 9, 53, 4, 17, 1, 5, 12, 52,
45, 1, 130, 29, 3, 49, 47, 31, 1, 4, 12, 17, 1, 8, 1, 53, 30, 1, 1,
36, 4, 8, 1, 5, 42, 40, 40, 78, 2, 10, 854, 6, 2, 1, 1, 44, 1, 2, 3,
1, 2, 23, 1, 1, 8, 160, 22, 6, 3, 1, 26, 5, 1, 64, 56, 6, 2, 64, 1, 3,
1, 2, 5, 4, 4, 1, 3, 1, 27, 4, 3, 4, 1, 8, 8, 9, 7, 29, 2, 1, 128, 54,
3, 7, 22, 2, 8, 19, 5, 8, 128, 73, 535, 31, 385, 1, 1, 1, 53, 15, 7,
4, 20, 10, 16, 2, 1, 45, 3, 4, 2, 2, 2, 1, 4, 14, 25, 7, 10, 6, 3, 36,
5, 1, 8, 1, 10, 4, 60, 2, 1, 48, 3, 9, 2, 4, 4, 7, 10, 1190, 43, 1, 1,
1, 2, 6, 1, 1, 8, 10, 2358, 879, 145, 99, 13, 4, 2956, 1071, 13265,
569, 1223, 69, 11, 1, 46, 16, 4, 13, 16480, 2, 8190, 246, 10, 39, 2,
60, 2, 3, 3, 6, 8, 8, 2, 7, 30, 4, 48, 34, 66, 3, 1, 186, 87, 9, 18,
142, 26, 26, 26, 7, 1, 18, 26, 26, 1, 1, 2, 2, 1, 2, 2, 2, 4, 1, 8, 4,
1, 1, 1, 7, 1, 11, 26, 26, 2, 1, 4, 2, 8, 1, 7, 1, 26, 2, 1, 4, 1, 5,
1, 1, 3, 7, 1, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 28, 2,
25, 1, 25, 1, 6, 25, 1, 25, 1, 6, 25, 1, 25, 1, 6, 25, 1, 25, 1, 6,
25, 1, 25, 1, 6, 1, 1, 2, 50, 5632, 4, 1, 27, 1, 2, 1, 1, 2, 1, 1, 10,
1, 4, 1, 1, 1, 1, 6, 1, 4, 1, 1, 1, 1, 1, 1, 3, 1, 2, 1, 1, 2, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 4, 1, 7, 1, 4, 1, 4, 1, 1, 1, 10,
1, 17, 5, 3, 1, 5, 1, 17, 52, 2, 270, 44, 4, 100, 12, 15, 2, 14, 2,
15, 1, 15, 32, 11, 5, 31, 1, 60, 4, 43, 75, 29, 13, 43, 5, 9, 7, 2,
174, 33, 15, 6, 1, 70, 3, 20, 12, 37, 1, 5, 21, 17, 15, 63, 1, 1, 1,
182, 1, 4, 3, 62, 2, 4, 12, 24, 147, 70, 4, 11, 48, 70, 58, 116, 2188,
42711, 41, 4149, 11, 222, 16354, 542, 722403, 1, 30, 96, 128, 240,
65040, 65534, 2, 65534)
uncompressDeltas(deltas)
}
private[this] lazy val charTypes = Array[scala.Byte](1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 5, 1, 2, 5, 1, 3, 2,
1, 3, 2, 1, 3, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 5, 2, 4, 27, 4, 27, 4, 27, 4, 27, 4, 27, 6, 1, 2, 1,
2, 4, 27, 1, 2, 0, 4, 2, 24, 0, 27, 1, 24, 1, 0, 1, 0, 1, 2, 1, 0, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 25, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 28, 6, 7, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 0, 1, 0, 4, 24, 0, 2, 0, 24, 20, 0, 26, 0, 6, 20, 6, 24, 6, 24, 6,
24, 6, 0, 5, 0, 5, 24, 0, 16, 0, 25, 24, 26, 24, 28, 6, 24, 0, 24, 5,
4, 5, 6, 9, 24, 5, 6, 5, 24, 5, 6, 16, 28, 6, 4, 6, 28, 6, 5, 9, 5,
28, 5, 24, 0, 16, 5, 6, 5, 6, 0, 5, 6, 5, 0, 9, 5, 6, 4, 28, 24, 4, 0,
5, 6, 4, 6, 4, 6, 4, 6, 0, 24, 0, 5, 6, 0, 24, 0, 5, 0, 5, 0, 6, 0, 6,
8, 5, 6, 8, 6, 5, 8, 6, 8, 6, 8, 5, 6, 5, 6, 24, 9, 24, 4, 5, 0, 5, 0,
6, 8, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 5, 8, 6, 0, 8, 0, 8,
6, 5, 0, 8, 0, 5, 0, 5, 6, 0, 9, 5, 26, 11, 28, 26, 0, 6, 8, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 0, 8, 6, 0, 6, 0, 6, 0, 6, 0,
5, 0, 5, 0, 9, 6, 5, 6, 0, 6, 8, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 6, 5, 8, 6, 0, 6, 8, 0, 8, 6, 0, 5, 0, 5, 6, 0, 9, 24, 26, 0, 6, 8,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 5, 8, 6, 8, 6, 0, 8, 0, 8,
6, 0, 6, 8, 0, 5, 0, 5, 6, 0, 9, 28, 5, 11, 0, 6, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 8, 6, 8, 0, 8, 0, 8, 6, 0, 5,
0, 8, 0, 9, 11, 28, 26, 28, 0, 8, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
6, 8, 0, 6, 0, 6, 0, 6, 0, 5, 0, 5, 6, 0, 9, 0, 11, 28, 0, 8, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 6, 5, 8, 6, 8, 0, 6, 8, 0, 8, 6, 0, 8, 0, 5,
0, 5, 6, 0, 9, 0, 5, 0, 8, 0, 5, 0, 5, 0, 5, 0, 5, 8, 6, 0, 8, 0, 8,
6, 5, 0, 8, 0, 5, 6, 0, 9, 11, 0, 28, 5, 0, 8, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 6, 0, 8, 6, 0, 6, 0, 8, 0, 8, 24, 0, 5, 6, 5, 6, 0, 26, 5, 4,
6, 24, 9, 24, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0,
5, 0, 5, 0, 5, 6, 5, 6, 0, 6, 5, 0, 5, 0, 4, 0, 6, 0, 9, 0, 5, 0, 5,
28, 24, 28, 24, 28, 6, 28, 9, 11, 28, 6, 28, 6, 28, 6, 21, 22, 21, 22,
8, 5, 0, 5, 0, 6, 8, 6, 24, 6, 5, 6, 0, 6, 0, 28, 6, 28, 0, 28, 24,
28, 24, 0, 5, 8, 6, 8, 6, 8, 6, 8, 6, 5, 9, 24, 5, 8, 6, 5, 6, 5, 8,
5, 8, 5, 6, 5, 6, 8, 6, 8, 6, 5, 8, 9, 8, 6, 28, 1, 0, 1, 0, 1, 0, 5,
24, 4, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 24, 11, 0, 5, 28, 0, 5, 0, 20, 5,
24, 5, 12, 5, 21, 22, 0, 5, 24, 10, 0, 5, 0, 5, 6, 0, 5, 6, 24, 0, 5,
6, 0, 5, 0, 5, 0, 6, 0, 5, 6, 8, 6, 8, 6, 8, 6, 24, 4, 24, 26, 5, 6,
0, 9, 0, 11, 0, 24, 20, 24, 6, 12, 0, 9, 0, 5, 4, 5, 0, 5, 6, 5, 0, 5,
0, 5, 0, 6, 8, 6, 8, 0, 8, 6, 8, 6, 0, 28, 0, 24, 9, 5, 0, 5, 0, 5, 0,
8, 5, 8, 0, 9, 11, 0, 28, 5, 6, 8, 0, 24, 5, 8, 6, 8, 6, 0, 6, 8, 6,
8, 6, 8, 6, 0, 6, 9, 0, 9, 0, 24, 4, 24, 0, 6, 8, 5, 6, 8, 6, 8, 6, 8,
6, 8, 5, 0, 9, 24, 28, 6, 28, 0, 6, 8, 5, 8, 6, 8, 6, 8, 6, 8, 5, 9,
5, 6, 8, 6, 8, 6, 8, 6, 8, 0, 24, 5, 8, 6, 8, 6, 0, 24, 9, 0, 5, 9, 5,
4, 24, 0, 24, 0, 6, 24, 6, 8, 6, 5, 6, 5, 8, 6, 5, 0, 2, 4, 2, 4, 2,
4, 6, 0, 6, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 0, 1, 0, 2, 1, 2, 1, 2, 0, 1, 0, 2, 0, 1, 0, 1, 0, 1, 0, 1, 2, 1,
2, 0, 2, 3, 2, 3, 2, 3, 2, 0, 2, 1, 3, 27, 2, 27, 2, 0, 2, 1, 3, 27,
2, 0, 2, 1, 0, 27, 2, 1, 27, 0, 2, 0, 2, 1, 3, 27, 0, 12, 16, 20, 24,
29, 30, 21, 29, 30, 21, 29, 24, 13, 14, 16, 12, 24, 29, 30, 24, 23,
24, 25, 21, 22, 24, 25, 24, 23, 24, 12, 16, 0, 16, 11, 4, 0, 11, 25,
21, 22, 4, 11, 25, 21, 22, 0, 4, 0, 26, 0, 6, 7, 6, 7, 6, 0, 28, 1,
28, 1, 28, 2, 1, 2, 1, 2, 28, 1, 28, 25, 1, 28, 1, 28, 1, 28, 1, 28,
1, 28, 2, 1, 2, 5, 2, 28, 2, 1, 25, 1, 2, 28, 25, 28, 2, 28, 11, 10,
1, 2, 10, 11, 0, 25, 28, 25, 28, 25, 28, 25, 28, 25, 28, 25, 28, 25,
28, 25, 28, 25, 28, 25, 28, 25, 28, 25, 28, 21, 22, 28, 25, 28, 25,
28, 25, 28, 0, 28, 0, 28, 0, 11, 28, 11, 28, 25, 28, 25, 28, 25, 28,
25, 28, 0, 28, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22,
11, 28, 25, 21, 22, 25, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 25,
28, 25, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21,
22, 21, 22, 21, 22, 21, 22, 25, 21, 22, 21, 22, 25, 21, 22, 25, 28,
25, 28, 25, 0, 28, 0, 1, 0, 2, 0, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 4, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 28, 1, 2, 1, 2, 6, 1, 2, 0, 24,
11, 24, 2, 0, 2, 0, 2, 0, 5, 0, 4, 24, 0, 6, 5, 0, 5, 0, 5, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 6, 24, 29, 30, 29, 30, 24, 29, 30, 24,
29, 30, 24, 20, 24, 20, 24, 29, 30, 24, 29, 30, 21, 22, 21, 22, 21,
22, 21, 22, 24, 4, 24, 20, 0, 28, 0, 28, 0, 28, 0, 28, 0, 12, 24, 28,
4, 5, 10, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 28, 21, 22, 21, 22,
21, 22, 21, 22, 20, 21, 22, 28, 10, 6, 8, 20, 4, 28, 10, 4, 5, 24, 28,
0, 5, 0, 6, 27, 4, 5, 20, 5, 24, 4, 5, 0, 5, 0, 5, 0, 28, 11, 28, 5,
0, 28, 0, 5, 28, 0, 11, 28, 11, 28, 11, 28, 11, 28, 11, 28, 0, 28, 5,
0, 28, 5, 0, 5, 4, 5, 0, 28, 0, 5, 4, 24, 5, 4, 24, 5, 9, 5, 0, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 5, 6,
7, 24, 6, 24, 4, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 0, 6, 5, 10, 6, 24, 0, 27, 4, 27, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
2, 4, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 4, 27, 1, 2, 1, 2,
0, 1, 2, 1, 2, 0, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 0, 4, 2, 5, 6, 5,
6, 5, 6, 5, 8, 6, 8, 28, 0, 11, 28, 26, 28, 0, 5, 24, 0, 8, 5, 8, 6,
0, 24, 9, 0, 6, 5, 24, 5, 0, 9, 5, 6, 24, 5, 6, 8, 0, 24, 5, 0, 6, 8,
5, 6, 8, 6, 8, 6, 8, 24, 0, 4, 9, 0, 24, 0, 5, 6, 8, 6, 8, 6, 0, 5, 6,
5, 6, 8, 0, 9, 0, 24, 5, 4, 5, 28, 5, 8, 0, 5, 6, 5, 6, 5, 6, 5, 6, 5,
6, 5, 0, 5, 4, 24, 5, 8, 6, 8, 24, 5, 4, 8, 6, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 8, 6, 8, 6, 8, 24, 8, 6, 0, 9, 0, 5, 0, 5, 0, 5, 0, 19,
18, 5, 0, 5, 0, 2, 0, 2, 0, 5, 6, 5, 25, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0,
5, 27, 0, 5, 21, 22, 0, 5, 0, 5, 0, 5, 26, 28, 0, 6, 24, 21, 22, 24,
0, 6, 0, 24, 20, 23, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22, 21, 22,
21, 22, 21, 22, 24, 21, 22, 24, 23, 24, 0, 24, 20, 21, 22, 21, 22, 21,
22, 24, 25, 20, 25, 0, 24, 26, 24, 0, 5, 0, 5, 0, 16, 0, 24, 26, 24,
21, 22, 24, 25, 24, 20, 24, 9, 24, 25, 24, 1, 21, 24, 22, 27, 23, 27,
2, 21, 25, 22, 25, 21, 22, 24, 21, 22, 24, 5, 4, 5, 4, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 26, 25, 27, 28, 26, 0, 28, 25, 28, 0, 16, 28, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 24, 0, 11, 0, 28, 10, 11, 28, 11,
0, 28, 0, 28, 6, 0, 5, 0, 5, 0, 5, 0, 11, 0, 5, 10, 5, 10, 0, 5, 0,
24, 5, 0, 5, 24, 10, 0, 1, 2, 5, 0, 9, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 24, 11, 0, 5, 11, 0, 24, 5, 0, 24, 0, 5, 0, 5, 0, 5, 6, 0, 6,
0, 6, 5, 0, 5, 0, 5, 0, 6, 0, 6, 11, 0, 24, 0, 5, 11, 24, 0, 5, 0, 24,
5, 0, 11, 5, 0, 11, 0, 5, 0, 11, 0, 8, 6, 8, 5, 6, 24, 0, 11, 9, 0, 6,
8, 5, 8, 6, 8, 6, 24, 16, 24, 0, 5, 0, 9, 0, 6, 5, 6, 8, 6, 0, 9, 24,
0, 6, 8, 5, 8, 6, 8, 5, 24, 0, 9, 0, 5, 6, 8, 6, 8, 6, 8, 6, 0, 9, 0,
5, 0, 10, 0, 24, 0, 5, 0, 5, 0, 5, 0, 5, 8, 0, 6, 4, 0, 5, 0, 28, 0,
28, 0, 28, 8, 6, 28, 8, 16, 6, 28, 6, 28, 6, 28, 0, 28, 6, 28, 0, 28,
0, 11, 0, 1, 2, 1, 2, 0, 2, 1, 2, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 2,
0, 2, 0, 2, 0, 2, 1, 2, 1, 0, 1, 0, 1, 0, 1, 0, 2, 1, 0, 1, 0, 1, 0,
1, 0, 1, 0, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 0, 1, 25, 2, 25, 2,
1, 25, 2, 25, 2, 1, 25, 2, 25, 2, 1, 25, 2, 25, 2, 1, 25, 2, 25, 2, 1,
2, 0, 9, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0,
5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5, 0, 5,
0, 25, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 11, 0, 28, 0, 28,
0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28,
0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28, 0, 28,
0, 28, 0, 28, 0, 28, 0, 5, 0, 5, 0, 5, 0, 5, 0, 16, 0, 16, 0, 6, 0,
18, 0, 18, 0)
// Indices representing the start of ranges of codePoint that have the same
// `isMirrored` result. It is true for the first range
// (i.e. isMirrored(40)==true, isMirrored(41)==true, isMirrored(42)==false)
// They where generated with the following script:
//
// val indicesAndRes = (0 to Character.MAX_CODE_POINT)
// .map(i => (i, Character.isMirrored(i))).foldLeft[List[(Int, Boolean)]](Nil) {
// case (x :: xs, elem) if x._2 == elem._2 => x :: xs
// case (prevs, elem) => elem :: prevs
// }.reverse
// val isMirroredIndices = indicesAndRes.map(_._1).tail
// val isMirroredIndicesDeltas = isMirroredIndices.zip(
// 0 :: isMirroredIndices.init).map(tup => tup._1 - tup._2)
// println(isMirroredIndicesDeltas.mkString(
// "isMirroredIndices: val deltas = Array[Int](", ", ", ")"))
private[this] lazy val isMirroredIndices = {
val deltas = Array[Int](40, 2, 18, 1, 1, 1, 28, 1, 1, 1, 29, 1, 1, 1,
45, 1, 15, 1, 3710, 4, 1885, 2, 2460, 2, 10, 2, 54, 2, 14, 2, 177, 1,
192, 4, 3, 6, 3, 1, 3, 2, 3, 4, 1, 4, 1, 1, 1, 1, 4, 9, 5, 1, 1, 18,
5, 4, 9, 2, 1, 1, 1, 8, 2, 31, 2, 4, 5, 1, 9, 2, 2, 19, 5, 2, 9, 5, 2,
2, 4, 24, 2, 16, 8, 4, 20, 2, 7, 2, 1085, 14, 74, 1, 2, 4, 1, 2, 1, 3,
5, 4, 5, 3, 3, 14, 403, 22, 2, 21, 8, 1, 7, 6, 3, 1, 4, 5, 1, 2, 2, 5,
4, 1, 1, 3, 2, 2, 10, 6, 2, 2, 12, 19, 1, 4, 2, 1, 1, 1, 2, 1, 1, 4,
5, 2, 6, 3, 24, 2, 11, 2, 4, 4, 1, 2, 2, 2, 4, 43, 2, 8, 1, 40, 5, 1,
1, 1, 3, 5, 5, 3, 4, 1, 3, 5, 1, 1, 772, 4, 3, 2, 1, 2, 14, 2, 2, 10,
478, 10, 2, 8, 52797, 6, 5, 2, 162, 2, 18, 1, 1, 1, 28, 1, 1, 1, 29,
1, 1, 1, 1, 2, 1, 2, 55159, 1, 57, 1, 57, 1, 57, 1, 57, 1)
uncompressDeltas(deltas)
}
// format: on
private[this] def uncompressDeltas(deltas: Array[Int]): Array[Int] = {
for (i <- 1 until deltas.length)
deltas(i) += deltas(i - 1)
deltas
}
// TODO:
// def getDirectionality(c: scala.Char): scala.Byte
// def toTitleCase(c: scala.Char): scala.Char
// def getNumericValue(c: scala.Char): Int
// def reverseBytes(ch: scala.Char): scala.Char
// ...
}
| cedricviaccoz/scala-native | javalib/src/main/scala/java/lang/Character.scala | Scala | bsd-3-clause | 44,961 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.booleantype
import java.io.File
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.util.CarbonProperties
class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
override def beforeEach(): Unit = {
sql("drop table if exists boolean_one_column")
sql("drop table if exists boolean_table")
sql("drop table if exists boolean_table2")
sql("drop table if exists boolean_table3")
sql("drop table if exists boolean_table4")
sql("drop table if exists carbon_table")
sql("drop table if exists hive_table")
sql("CREATE TABLE if not exists boolean_one_column(booleanField BOOLEAN) STORED AS carbondata")
}
override def afterAll(): Unit = {
sql("drop table if exists boolean_one_column")
sql("drop table if exists boolean_table")
sql("drop table if exists boolean_table2")
sql("drop table if exists boolean_table3")
sql("drop table if exists boolean_table4")
sql("drop table if exists carbon_table")
sql("drop table if exists hive_table")
}
test("Inserting and selecting table: one column boolean, should support") {
sql("insert into boolean_one_column values(true)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true))
)
}
test("Inserting and selecting table: one column boolean and many rows, should support") {
sql("insert into boolean_one_column values(true)")
sql("insert into boolean_one_column values(True)")
sql("insert into boolean_one_column values(TRUE)")
sql("insert into boolean_one_column values('true')")
sql("insert into boolean_one_column values(False)")
sql("insert into boolean_one_column values(false)")
sql("insert into boolean_one_column values(FALSE)")
sql("insert into boolean_one_column values('false')")
sql("insert into boolean_one_column values('tr')")
sql("insert into boolean_one_column values(null)")
sql("insert into boolean_one_column values('truEe')")
sql("insert into boolean_one_column values('falsEe')")
sql("insert into boolean_one_column values('t')")
sql("insert into boolean_one_column values('f')")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true), Row(true), Row(true), Row(true),
Row(false), Row(false), Row(false), Row(false),
Row(true), Row(false), Row(null), Row(null), Row(null), Row(null))
)
}
test("Inserting and selecting table: create one column boolean table and insert two columns") {
// send to old flow, as for one column two values are inserted.
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_ENABLE_BAD_RECORD_HANDLING_FOR_INSERT, "true")
sql("insert into boolean_one_column values(true,false)")
sql("insert into boolean_one_column values(True)")
sql("insert into boolean_one_column values(false,true)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true), Row(true), Row(false))
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_ENABLE_BAD_RECORD_HANDLING_FOR_INSERT,
CarbonCommonConstants.CARBON_ENABLE_BAD_RECORD_HANDLING_FOR_INSERT_DEFAULT)
}
test("Inserting and selecting table: two columns boolean and many rows, should support") {
sql("CREATE TABLE if not exists boolean_table2(col1 BOOLEAN, col2 BOOLEAN) STORED AS carbondata")
sql("insert into boolean_table2 values(true,true)")
sql("insert into boolean_table2 values(True,false)")
sql("insert into boolean_table2 values(TRUE,false)")
sql("insert into boolean_table2 values(false,true)")
sql("insert into boolean_table2 values(FALSE,false)")
sql("insert into boolean_table2 values('false',false)")
sql("insert into boolean_table2 values(null,true)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(true, true), Row(true, false), Row(true, false),
Row(false, true), Row(false, false), Row(false, false), Row(null, true))
)
}
test("Inserting and selecting table: two columns and other data type, should support") {
sql("CREATE TABLE if not exists boolean_table2(col1 INT, col2 BOOLEAN) STORED AS carbondata")
sql("insert into boolean_table2 values(1,true)")
sql("insert into boolean_table2 values(100,true)")
sql("insert into boolean_table2 values(1991,false)")
sql("insert into boolean_table2 values(906,false)")
sql("insert into boolean_table2 values(218,false)")
sql("insert into boolean_table2 values(1011,false)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(1, true), Row(100, true), Row(1991, false),
Row(906, false), Row(218, false), Row(1011, false))
)
}
test("Inserting into table with another table: support boolean data type and other format") {
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table2(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table3(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| stringField STRING,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table4(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| stringField STRING,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert into boolean_table2 select * from boolean_table")
sql("insert into boolean_table3 select shortField,booleanField,intField,stringField,booleanField2 from boolean_table")
sql("insert into boolean_table4 select shortField,booleanField,intField,stringField,booleanField2 from boolean_table where shortField > 3")
checkAnswer(
sql("select booleanField,intField from boolean_table2"),
Seq(Row(true, 10), Row(false, 17), Row(false, 11),
Row(true, 10), Row(true, 10), Row(true, 14),
Row(false, 10), Row(false, 10), Row(false, 16), Row(false, 10))
)
checkAnswer(
sql("select booleanField,intField from boolean_table3"),
Seq(Row(true, 10), Row(false, 17), Row(false, 11),
Row(true, 10), Row(true, 10), Row(true, 14),
Row(false, 10), Row(false, 10), Row(false, 16), Row(false, 10))
)
checkAnswer(
sql("select booleanField,intField from boolean_table4"),
Seq(Row(false, 17), Row(false, 16))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from boolean_table2"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from boolean_table3"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
}
test("Inserting with the order of data type in source and target table columns being different") {
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table2(
| booleanField BOOLEAN,
| shortField SHORT,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert into boolean_table2 select * from boolean_table")
checkAnswer(
sql("select booleanField,intField from boolean_table2"),
Seq(Row(true, 10), Row(true, 17), Row(true, 11),
Row(true, 10), Row(true, 10), Row(true, 14),
Row(true, 10), Row(true, 10), Row(true, 16), Row(true, 10))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from boolean_table2"),
Seq(Row(true, 10, true), Row(true, 17, true), Row(true, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(true, 10, false), Row(true, 10, false), Row(true, 16, false), Row(true, 10, false))
)
}
ignore("Inserting with the number of data type in source and target table columns being different, source more than target") {
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table2(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert into boolean_table2 select * from boolean_table")
}
test("Inserting with the number of data type in source and target table columns being different, source less than target") {
val exception_insert: Exception =intercept[Exception] {
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table2(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert into boolean_table2 select * from boolean_table")
}
assert(exception_insert.getMessage.contains("Cannot insert into target table because number of columns mismatch"))
}
test("Inserting into Hive table from carbon table: support boolean data type and other format") {
sql(
s"""
| CREATE TABLE carbon_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE hive_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE carbon_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert into hive_table select * from carbon_table where shortField = 1 and booleanField = true")
checkAnswer(
sql("select booleanField,intField,booleanField2 from carbon_table"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from hive_table"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from carbon_table where exists (select booleanField,intField,booleanField2 " +
"from hive_table where carbon_table.intField=hive_table.intField)"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true), Row(false, 10, false), Row(false, 10, false), Row(false, 10, false))
)
sql("drop table if exists carbon_table")
sql("drop table if exists hive_table")
}
test("Inserting into carbon table from Hive table: support boolean data type and other format") {
sql(
s"""
| CREATE TABLE hive_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
""".stripMargin)
sql(
s"""
| CREATE TABLE carbon_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${FileFactory.getUpdatedFilePath(storeLocation)}'
| INTO TABLE hive_table
""".stripMargin)
sql("insert into carbon_table select * from hive_table where shortField = 1 and booleanField = true")
checkAnswer(
sql("select booleanField,intField,booleanField2 from hive_table"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from carbon_table"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from hive_table where exists (select booleanField,intField,booleanField2 " +
"from carbon_table where hive_table.intField=carbon_table.intField)"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true), Row(false, 10, false), Row(false, 10, false), Row(false, 10, false))
)
}
test("Inserting overwrite: one column boolean and many rows, should support") {
sql("insert into boolean_one_column values(True)")
sql("insert overwrite table boolean_one_column values(false)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(false))
)
sql("insert overwrite table boolean_one_column values(true)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true))
)
sql("insert overwrite table boolean_one_column values(null)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(null))
)
sql("insert overwrite table boolean_one_column values(true)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true))
)
sql("insert overwrite table boolean_one_column values('t')")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true))
)
}
test("Inserting overwrite: create one column boolean table and insert two columns") {
// send to old flow, as for one column two values are inserted.
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_ENABLE_BAD_RECORD_HANDLING_FOR_INSERT, "true")
sql("insert overwrite table boolean_one_column values(true,false)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true))
)
sql("insert overwrite table boolean_one_column values(True)")
sql("insert overwrite table boolean_one_column values(false,true)")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(false))
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_ENABLE_BAD_RECORD_HANDLING_FOR_INSERT,
CarbonCommonConstants.CARBON_ENABLE_BAD_RECORD_HANDLING_FOR_INSERT_DEFAULT)
}
test("Inserting overwrite: two columns boolean and many rows, should support") {
sql("CREATE TABLE if not exists boolean_table2(col1 BOOLEAN, col2 BOOLEAN) STORED AS carbondata")
sql("insert overwrite table boolean_table2 values(true,true)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(true, true))
)
sql("insert overwrite table boolean_table2 values(True,false)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(true, false))
)
sql("insert overwrite table boolean_table2 values(FALSE,false)")
sql("insert overwrite table boolean_table2 values('false',false)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(false, false))
)
sql("insert overwrite table boolean_table2 values(null,true)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(null, true))
)
}
test("Inserting overwrite: two columns and other data type, should support") {
sql("CREATE TABLE if not exists boolean_table2(col1 INT, col2 BOOLEAN) STORED AS carbondata")
sql("insert overwrite table boolean_table2 values(1,true)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(1, true))
)
sql("insert overwrite table boolean_table2 values(100,true)")
sql("insert overwrite table boolean_table2 values(1991,false)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(1991, false))
)
sql("insert overwrite table boolean_table2 values(906,false)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(906, false))
)
sql("insert overwrite table boolean_table2 values(218,false)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(218, false))
)
sql("insert overwrite table boolean_table2 values(1011,true)")
checkAnswer(
sql("select * from boolean_table2"),
Seq(Row(1011, true))
)
}
test("Inserting overwrite: overwrite table with another table: support boolean data type and other format") {
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table2(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table3(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| stringField STRING,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table4(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| stringField STRING,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert overwrite table boolean_table2 select * from boolean_table")
sql("insert overwrite table boolean_table3 select shortField,booleanField,intField,stringField,booleanField2 from boolean_table")
sql("insert overwrite table boolean_table4 select shortField,booleanField,intField,stringField,booleanField2 from boolean_table where shortField > 3")
checkAnswer(
sql("select booleanField,intField from boolean_table2"),
Seq(Row(true, 10), Row(false, 17), Row(false, 11),
Row(true, 10), Row(true, 10), Row(true, 14),
Row(false, 10), Row(false, 10), Row(false, 16), Row(false, 10))
)
checkAnswer(
sql("select booleanField,intField from boolean_table3"),
Seq(Row(true, 10), Row(false, 17), Row(false, 11),
Row(true, 10), Row(true, 10), Row(true, 14),
Row(false, 10), Row(false, 10), Row(false, 16), Row(false, 10))
)
checkAnswer(
sql("select booleanField,intField from boolean_table4"),
Seq(Row(false, 17), Row(false, 16))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from boolean_table2"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from boolean_table3"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
}
test("Inserting overwrite: overwrite table Hive table from carbon table: support boolean data type and other format") {
sql(
s"""
| CREATE TABLE carbon_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
sql(
s"""
| CREATE TABLE hive_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE carbon_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
sql("insert overwrite table hive_table select * from carbon_table where shortField = 1 and booleanField = true")
checkAnswer(
sql("select booleanField,intField,booleanField2 from carbon_table"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from hive_table"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from carbon_table where exists (select booleanField,intField,booleanField2 " +
"from hive_table where carbon_table.intField=hive_table.intField)"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true), Row(false, 10, false), Row(false, 10, false), Row(false, 10, false))
)
}
test("Inserting overwrite: overwrite table carbon table from Hive table: support boolean data type and other format") {
sql(
s"""
| CREATE TABLE hive_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
""".stripMargin)
sql(
s"""
| CREATE TABLE carbon_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
sql(
s"""
| LOAD DATA LOCAL INPATH '${FileFactory.getUpdatedFilePath(storeLocation)}'
| INTO TABLE hive_table
""".stripMargin)
sql("insert overwrite table carbon_table select * from hive_table where shortField = 1 and booleanField = true")
checkAnswer(
sql("select booleanField,intField,booleanField2 from hive_table"),
Seq(Row(true, 10, true), Row(false, 17, true), Row(false, 11, true),
Row(true, 10, true), Row(true, 10, true), Row(true, 14, false),
Row(false, 10, false), Row(false, 10, false), Row(false, 16, false), Row(false, 10, false))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from carbon_table"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true))
)
checkAnswer(
sql("select booleanField,intField,booleanField2 from hive_table where exists (select booleanField,intField,booleanField2 " +
"from carbon_table where hive_table.intField=carbon_table.intField)"),
Seq(Row(true, 10, true), Row(true, 10, true), Row(true, 10, true), Row(false, 10, false), Row(false, 10, false), Row(false, 10, false))
)
}
test("Inserting table with bad records, and SORT_COLUMNS is boolean column") {
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "true")
sql("DROP TABLE IF EXISTS carbon_table")
sql(
s"""
| CREATE TABLE if not exists carbon_table(
| cc BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='cc')
""".stripMargin)
sql("insert into carbon_table values(true)")
sql("insert into carbon_table values(True)")
sql("insert into carbon_table values(TRUE)")
sql("insert into carbon_table values('true')")
sql("insert into carbon_table values(False)")
sql("insert into carbon_table values(false)")
sql("insert into carbon_table values(FALSE)")
sql("insert into carbon_table values('false')")
sql("insert into carbon_table values('tr')")
sql("insert into carbon_table values(null)")
sql("insert into carbon_table values('truEe')")
sql("insert into carbon_table values('falSee')")
sql("insert into carbon_table values('t')")
sql("insert into carbon_table values('f')")
checkAnswer(
sql("select * from carbon_table"),
Seq(
Row(true), Row(true), Row(true), Row(true),
Row(false), Row(false), Row(false), Row(false),
Row(true), Row(false), Row(null), Row(null), Row(null), Row(null)))
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
}
| jackylk/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala | Scala | apache-2.0 | 36,996 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.descriptors
import org.apache.flink.table.descriptors.MetadataValidator.{METADATA_COMMENT, METADATA_CREATION_TIME, METADATA_LAST_ACCESS_TIME, METADATA_PROPERTY_VERSION}
/**
* Validator for [[Metadata]].
*/
class MetadataValidator extends DescriptorValidator {
override def validate(properties: DescriptorProperties): Unit = {
properties.validateInt(METADATA_PROPERTY_VERSION, true, 0, Integer.MAX_VALUE)
properties.validateString(METADATA_COMMENT, true)
properties.validateLong(METADATA_CREATION_TIME, true)
properties.validateLong(METADATA_LAST_ACCESS_TIME, true)
}
}
object MetadataValidator {
val METADATA_PROPERTY_VERSION = "metadata.property-version"
val METADATA_COMMENT = "metadata.comment"
val METADATA_CREATION_TIME = "metadata.creation-time"
val METADATA_LAST_ACCESS_TIME = "metadata.last-access-time"
}
| mylog00/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/descriptors/MetadataValidator.scala | Scala | apache-2.0 | 1,690 |
package org.eigengo.scalalp.streams
import java.io.InputStream
import java.util.zip.ZipEntry
import scala.io.{Codec, Source}
object TwentyNewsGroupsTrain {
import nak.NakContext._
import nak.data._
import nak.liblinear.LiblinearConfig
import java.io.File
def processEntry(entry: ZipEntry, contents: InputStream): Option[Example[String, String]] = {
val names = entry.getName.split('/')
if (names.length == 3 && !names(2).isEmpty) {
val text = Source.fromInputStream(contents)(Codec.ISO8859).mkString
Some(Example(names(1), text, names(2)))
} else None
}
def main(args: Array[String]) {
def fromLabelled(top: String)(entry: ZipEntry, contents: InputStream): Option[Example[String, String]] = ???
// Example stopword set (you should use a more extensive list for actual classifiers).
val stopwords = Set("the", "a", "an", "of", "in", "for", "by", "on")
// Train
print("Training... ")
val zipFile = new ZipArchive(new File(getClass.getResource("/20news-bydate-train.zip").toURI))
/*
The zip file contains top-level directory 20news-bydate-train with sub-directories like ``alt.atheism``,
``comp.graphics``, ..., ``sci.space`` and such like. Inside each of those directories, there are the newsgroup
messages, each in its own file. We will use the directory name as the label, the body of the message as the
features, and the file name as the id.
So, for
20news-bydate-train
+--foo
| +--message-1.txt (contents=1)
| +--message-2.txt (contents=2)
+--bar
| +--message-x.txt (contents=x)
+--baz
+--message-y.txt (contents=y)
trainingExamples should be
List(
Example(foo, 1, message-1.txt), Example(foo, 2, message-2.txt),
Example(bar, x, message-x.txt),
Example(baz, y, message-y.txt))
Hint: use the ZipFile#flatMap, giving it appropriate operation.
*/
val trainingExamples = zipFile.flatMap(processEntry)
val config = LiblinearConfig(cost = 5.0, eps = 0.01)
val featurizer = new BowFeaturizer(stopwords)
val classifier = trainClassifierHashed(config, featurizer, trainingExamples, 50000)
saveClassifier(classifier, "20news.classify")
println("done.")
}
}
| eigengo/scala-launchpad | src/main/scala/org/eigengo/scalalp/streams/TwentyNewsGroupsTrain.scala | Scala | apache-2.0 | 2,255 |
package com.typesafe.slick.testkit.tests
import org.junit.Assert._
import scala.slick.model._
import scala.slick.ast.ColumnOption
import scala.slick.jdbc.meta.MTable
import scala.slick.jdbc.meta
import com.typesafe.slick.testkit.util.{JdbcTestDB, TestkitTest}
class MetaModelTest extends TestkitTest[JdbcTestDB] {
import tdb.profile.simple._
def test { ifCap(jcap.createModel){
class Categories(tag: Tag) extends Table[(Int, String)](tag, "categories") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def name = column[String]("name")
def * = (id, name)
def idx = index("IDX_NAME",name)
}
val categories = TableQuery[Categories]
class Posts(tag: Tag) extends Table[(Int, String, Option[Int])](tag, "posts") {
def id = column[Int]("id")
def title = column[String]("title")
def category = column[Option[Int]]("category")
def * = (id, title, category)
def pk = primaryKey("posts_pk", (id,title))
def categoryFK = foreignKey("category_fk", category, categories)(_.id)
}
val posts = TableQuery[Posts]
val ddl = posts.ddl ++ categories.ddl
ddl.create
tdb.profile.createModel.assertConsistency
val tables = tdb.profile.getTables.list
def createModel(tables:Seq[MTable]): Model = meta.createModel(tables,tdb.profile)
createModel(tables).assertConsistency
;{
// checks that createModel filters out foreign keys pointing out
val model = createModel(tables.filter(_.name.name.toUpperCase=="POSTS"))
model.assertConsistency
assertEquals( 0, model.tables.map(_.foreignKeys.size).sum )
}
createModel(tables.filter(_.name.name.toUpperCase=="CATEGORIES")).assertConsistency
try{
// checks that assertConsistency fails when manually feeding the model with inconsistent tables
Model( createModel(tables).tables.filter(_.name.table.toUpperCase=="POSTS") ).assertConsistency
fail("Consistency assertion should have failed")
} catch {
case _:AssertionError =>
}
// check that the model matches the table classes
val model = tdb.profile.createModel
assertEquals( model.tables.toString, 2, model.tables.size )
;{
val categories = model.tables.filter(_.name.table.toUpperCase=="CATEGORIES").head
assertEquals( 2, categories.columns.size )
assertEquals( None, categories.primaryKey )
assertEquals( 0, categories.foreignKeys.size )
assertEquals( List("id"), categories.columns.filter(_.options.exists(_ == ColumnOption.PrimaryKey)).map(_.name).toList )
//assertEquals( categories.indices.toString, 1, categories.indices.size ) // Removed until made sure all dbs actually produce indices model
//assertEquals( "IDX_NAME", categories.indices.head.name.get.toUpperCase )
}
;{
val posts = model.tables.filter(_.name.table.toUpperCase=="POSTS").head
assertEquals( 3, posts.columns.size )
assertEquals( posts.indices.toString, 0, posts.indices.size )
assertEquals( 2, posts.primaryKey.get.columns.size )
assertEquals( 1, posts.foreignKeys.size )
if(tdb.profile != slick.driver.SQLiteDriver){
assertEquals( "CATEGORY_FK", posts.foreignKeys.head.name.get.toUpperCase )
}
assert( !posts.columns.exists(_.options.exists(_ == ColumnOption.PrimaryKey)) )
}
}}
}
| dvinokurov/slick | slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/MetaModelTest.scala | Scala | bsd-2-clause | 3,349 |
/*******************************************************************************
Copyright (c) 2013, KAIST, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.DOMHtml
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing.models._
import org.w3c.dom.Node
import org.w3c.dom.Element
import kr.ac.kaist.jsaf.analysis.cfg.CFG
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
// Limitation : we do not support multiple documents by <iframe> in one execution
object HTMLIFrameElement extends DOM {
private val name = "HTMLIFrameElement"
/* predefined locatoins */
val loc_cons = newSystemRecentLoc(name + "Cons")
val loc_proto = newSystemRecentLoc(name + "Proto")
/* constructor */
private val prop_cons: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Function")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("@hasinstance", AbsConstValue(PropValue(Value(NullTop)))),
("length", AbsConstValue(PropValue(ObjectValue(Value(AbsNumber.alpha(0)), F, F, F)))),
("prototype", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F))))
)
/* prorotype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(HTMLElement.loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue)))
)
/* global */
private val prop_global: List[(String, AbsProperty)] = List(
(name, AbsConstValue(PropValue(ObjectValue(loc_cons, T, F, T))))
)
def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_cons, prop_cons), (loc_proto, prop_proto), (GlobalLoc, prop_global)
)
def getSemanticMap(): Map[String, SemanticFun] = {
Map()
}
def getPreSemanticMap(): Map[String, SemanticFun] = {
Map()
}
def getDefMap(): Map[String, AccessFun] = {
Map()
}
def getUseMap(): Map[String, AccessFun] = {
Map()
}
/* semantics */
// no function
/* instance */
override def getInstance(cfg: CFG): Option[Loc] = Some(newRecentLoc())
/* list of properties in the instance object */
override def getInsList(node: Node): List[(String, PropValue)] = node match {
case e: Element =>
// This object has all properties of the HTMLElement object
HTMLElement.getInsList(node) ++ List(
("@class", PropValue(AbsString.alpha("Object"))),
("@proto", PropValue(ObjectValue(loc_proto, F, F, F))),
("@extensible", PropValue(BoolTrue)),
// DOM Level 1
("align", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("align")), T, T, T))),
("frameBorder", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("frameBorder")), T, T, T))),
("height", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("height")), T, T, T))),
("longDesc", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("longDesc")), T, T, T))),
("marginHeight", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("marginHeight")), T, T, T))),
("marginWidth", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("marginWidth")), T, T, T))),
("name", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("name")), T, T, T))),
("scrolling", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("scrolling")), T, T, T))),
("src", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("src")), T, T, T))),
("width", PropValue(ObjectValue(AbsString.alpha(e.getAttribute("width")), T, T, T))),
// TODO: 'contentWindow' should be a window object of the nested document in the <iframe> tag
("contentWindow", PropValue(ObjectValue(NullTop, F, T, T)))
)
// TODO: 'contentDocument' in DOM Level 2
case _ => {
System.err.println("* Warning: " + node.getNodeName + " cannot have instance objects.")
List()
}
}
def getInsList(align: PropValue, frameBorder: PropValue, height: PropValue, longDesc: PropValue, marginHeight: PropValue,
marginWidth: PropValue, name: PropValue, scrolling: PropValue, src: PropValue, width: PropValue,
contentWindow: PropValue): List[(String, PropValue)] = List(
("@class", PropValue(AbsString.alpha("Object"))),
("@proto", PropValue(ObjectValue(loc_proto, F, F, F))),
("@extensible", PropValue(BoolTrue)),
// DOM Level 1
("align", align),
("frameBorder", frameBorder),
("height", height),
("longDesc", longDesc),
("marginHeight", marginHeight),
("marginWidth", marginWidth),
("name", name),
("scrolling", scrolling),
("src", src),
("width", width),
("contentWindow", contentWindow)
)
override def default_getInsList(): List[(String, PropValue)] = {
val align = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val frameBorder = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val height = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val longDesc = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val marginHeight = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val marginWidth = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val name = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val scrolling = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val src = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val width = PropValue(ObjectValue(AbsString.alpha(""), T, T, T))
val contentWindow = PropValue(ObjectValue(NullTop, F, T, T))
// This object has all properties of the HTMLElement object
HTMLElement.default_getInsList :::
getInsList(align, frameBorder, height, longDesc, marginHeight, marginWidth, name, scrolling, src, width, contentWindow)
}
}
| daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/DOMHtml/HTMLIFrameElement.scala | Scala | bsd-3-clause | 6,294 |
package spark
import akka.actor.ActorSystem
import akka.actor.ActorSystemImpl
import akka.remote.RemoteActorRefProvider
import serializer.Serializer
import spark.broadcast.BroadcastManager
import spark.storage.BlockManager
import spark.storage.BlockManagerMaster
import spark.network.ConnectionManager
import spark.util.AkkaUtils
/**
* Holds all the runtime environment objects for a running Spark instance (either master or worker),
* including the serializer, Akka actor system, block manager, map output tracker, etc. Currently
* Spark code finds the SparkEnv through a thread-local variable, so each thread that accesses these
* objects needs to have the right SparkEnv set. You can get the current environment with
* SparkEnv.get (e.g. after creating a SparkContext) and set it with SparkEnv.set.
*/
class SparkEnv (
val actorSystem: ActorSystem,
val serializer: Serializer,
val closureSerializer: Serializer,
val cacheTracker: CacheTracker,
val mapOutputTracker: MapOutputTracker,
val shuffleFetcher: ShuffleFetcher,
val broadcastManager: BroadcastManager,
val blockManager: BlockManager,
val connectionManager: ConnectionManager,
val httpFileServer: HttpFileServer
) {
/** No-parameter constructor for unit tests. */
def this() = {
this(null, new JavaSerializer, new JavaSerializer, null, null, null, null, null, null, null)
}
def stop() {
httpFileServer.stop()
mapOutputTracker.stop()
cacheTracker.stop()
shuffleFetcher.stop()
broadcastManager.stop()
blockManager.stop()
blockManager.master.stop()
actorSystem.shutdown()
// Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut
// down, but let's call it anyway in case it gets fixed in a later release
actorSystem.awaitTermination()
}
}
object SparkEnv extends Logging {
private val env = new ThreadLocal[SparkEnv]
def set(e: SparkEnv) {
env.set(e)
}
def get: SparkEnv = {
env.get()
}
def createFromSystemProperties(
hostname: String,
port: Int,
isMaster: Boolean,
isLocal: Boolean
) : SparkEnv = {
val (actorSystem, boundPort) = AkkaUtils.createActorSystem("spark", hostname, port)
// Bit of a hack: If this is the master and our port was 0 (meaning bind to any free port),
// figure out which port number Akka actually bound to and set spark.master.port to it.
if (isMaster && port == 0) {
System.setProperty("spark.master.port", boundPort.toString)
}
val classLoader = Thread.currentThread.getContextClassLoader
// Create an instance of the class named by the given Java system property, or by
// defaultClassName if the property is not set, and return it as a T
def instantiateClass[T](propertyName: String, defaultClassName: String): T = {
val name = System.getProperty(propertyName, defaultClassName)
Class.forName(name, true, classLoader).newInstance().asInstanceOf[T]
}
val serializer = instantiateClass[Serializer]("spark.serializer", "spark.JavaSerializer")
val blockManagerMaster = new BlockManagerMaster(actorSystem, isMaster, isLocal)
val blockManager = new BlockManager(blockManagerMaster, serializer)
val connectionManager = blockManager.connectionManager
val broadcastManager = new BroadcastManager(isMaster)
val closureSerializer = instantiateClass[Serializer](
"spark.closure.serializer", "spark.JavaSerializer")
val cacheTracker = new CacheTracker(actorSystem, isMaster, blockManager)
blockManager.cacheTracker = cacheTracker
val mapOutputTracker = new MapOutputTracker(actorSystem, isMaster)
val shuffleFetcher = instantiateClass[ShuffleFetcher](
"spark.shuffle.fetcher", "spark.BlockStoreShuffleFetcher")
val httpFileServer = new HttpFileServer()
httpFileServer.initialize()
System.setProperty("spark.fileserver.uri", httpFileServer.serverUri)
// Warn about deprecated spark.cache.class property
if (System.getProperty("spark.cache.class") != null) {
logWarning("The spark.cache.class property is no longer being used! Specify storage " +
"levels using the RDD.persist() method instead.")
}
new SparkEnv(
actorSystem,
serializer,
closureSerializer,
cacheTracker,
mapOutputTracker,
shuffleFetcher,
broadcastManager,
blockManager,
connectionManager,
httpFileServer)
}
}
| joeywen/spark_cpp_api | core/src/main/scala/spark/SparkEnv.scala | Scala | bsd-3-clause | 4,487 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.chill
import scala.collection.immutable.SortedSet
class SortedSetSerializer[T] extends KSerializer[SortedSet[T]] {
def write(kser: Kryo, out: Output, set: SortedSet[T]): Unit = {
// Write the size
out.writeInt(set.size, true)
// Write the ordering
kser.writeClassAndObject(out, set.ordering.asInstanceOf[AnyRef])
set.foreach { t =>
val tRef = t.asInstanceOf[AnyRef]
kser.writeClassAndObject(out, tRef)
// After each intermediate object, flush
out.flush()
}
}
def read(kser: Kryo, in: Input, cls: Class[SortedSet[T]]): SortedSet[T] = {
val size = in.readInt(true)
val ordering = kser.readClassAndObject(in).asInstanceOf[Ordering[T]]
// Go ahead and be faster, and not as functional cool, and be mutable in here
var idx = 0
val builder = SortedSet.newBuilder[T](ordering)
builder.sizeHint(size)
while (idx < size) {
val item = kser.readClassAndObject(in).asInstanceOf[T]
builder += item
idx += 1
}
builder.result()
}
}
| twitter/chill | chill-scala/src/main/scala/com/twitter/chill/SortedSetSerializer.scala | Scala | apache-2.0 | 1,611 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.utils
import org.apache.flink.table.api.ValidationException
import org.apache.flink.table.planner.calcite.FlinkTypeFactory.{isProctimeIndicatorType, isRowtimeIndicatorType}
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalJoin
import org.apache.flink.table.planner.plan.nodes.exec.spec.JoinSpec
import org.apache.flink.util.Preconditions.checkState
import org.apache.calcite.rel.core.{JoinInfo, JoinRelType}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rex._
import org.apache.calcite.sql.`type`.{OperandTypes, ReturnTypes}
import org.apache.calcite.sql.{SqlFunction, SqlFunctionCategory, SqlKind}
import scala.collection.JavaConversions._
/**
* Utilities for temporal join.
*/
object TemporalJoinUtil {
// ----------------------------------------------------------------------------------------
// Temporal Join Condition Utilities
// ----------------------------------------------------------------------------------------
/**
* [[TEMPORAL_JOIN_CONDITION]] is a specific join condition which correctly defines
* references to rightTimeAttribute, rightPrimaryKeyExpression and leftTimeAttribute.
* The condition is used to mark this is a temporal table join and ensure columns these
* expressions depends on will not be pruned.
*
* The join key pair is necessary for temporal table join to ensure the
* the condition will not be pushed down.
*
* The rightTimeAttribute, rightPrimaryKeyExpression and leftTimeAttribute will be
* extracted from the condition in physical phase.
*/
val TEMPORAL_JOIN_CONDITION = new SqlFunction(
"__TEMPORAL_JOIN_CONDITION",
SqlKind.OTHER_FUNCTION,
ReturnTypes.BOOLEAN_NOT_NULL,
null,
OperandTypes.or(
/**------------------------ Temporal table join condition ------------------------**/
// right time attribute and primary key are required in event-time temporal table join,
OperandTypes.sequence(
"'(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, PRIMARY_KEY, LEFT_KEY, RIGHT_KEY)'",
OperandTypes.DATETIME,
OperandTypes.DATETIME,
OperandTypes.ANY,
OperandTypes.ANY,
OperandTypes.ANY),
// right primary key is required for processing-time temporal table join
OperandTypes.sequence(
"'(LEFT_TIME_ATTRIBUTE, PRIMARY_KEY, LEFT_KEY, RIGHT_KEY)'",
OperandTypes.DATETIME,
OperandTypes.ANY,
OperandTypes.ANY,
OperandTypes.ANY),
/**------------------ Temporal table function join condition ---------------------**/
// Event-time temporal function join condition
OperandTypes.sequence(
"'(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, PRIMARY_KEY)'",
OperandTypes.DATETIME,
OperandTypes.DATETIME,
OperandTypes.ANY),
// Processing-time temporal function join condition
OperandTypes.sequence(
"'(LEFT_TIME_ATTRIBUTE, PRIMARY_KEY)'",
OperandTypes.DATETIME,
OperandTypes.ANY)),
SqlFunctionCategory.SYSTEM)
/**
* Initial temporal condition used in rewrite phase of logical plan, this condition will
* be replaced with [[TEMPORAL_JOIN_CONDITION]] after the primary key inferred.
*/
val INITIAL_TEMPORAL_JOIN_CONDITION = new SqlFunction(
"__INITIAL_TEMPORAL_JOIN_CONDITION",
SqlKind.OTHER_FUNCTION,
ReturnTypes.BOOLEAN_NOT_NULL,
null,
OperandTypes.or(
// initial Event-time temporal table join condition, will fill PRIMARY_KEY later,
OperandTypes.sequence(
"'(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, LEFT_KEY, RIGHT_KEY)'",
OperandTypes.DATETIME,
OperandTypes.DATETIME,
OperandTypes.ANY,
OperandTypes.ANY),
// initial Processing-time temporal table join condition, will fill PRIMARY_KEY later,
OperandTypes.sequence(
"'(LEFT_TIME_ATTRIBUTE, LEFT_KEY, RIGHT_KEY)'",
OperandTypes.DATETIME,
OperandTypes.ANY,
OperandTypes.ANY)),
SqlFunctionCategory.SYSTEM)
val TEMPORAL_JOIN_LEFT_KEY = new SqlFunction(
"__TEMPORAL_JOIN_LEFT_KEY",
SqlKind.OTHER_FUNCTION,
ReturnTypes.BOOLEAN_NOT_NULL,
null,
OperandTypes.ARRAY,
SqlFunctionCategory.SYSTEM)
val TEMPORAL_JOIN_RIGHT_KEY = new SqlFunction(
"__TEMPORAL_JOIN_RIGHT_KEY",
SqlKind.OTHER_FUNCTION,
ReturnTypes.BOOLEAN_NOT_NULL,
null,
OperandTypes.ARRAY,
SqlFunctionCategory.SYSTEM)
val TEMPORAL_JOIN_CONDITION_PRIMARY_KEY = new SqlFunction(
"__TEMPORAL_JOIN_CONDITION_PRIMARY_KEY",
SqlKind.OTHER_FUNCTION,
ReturnTypes.BOOLEAN_NOT_NULL,
null,
OperandTypes.ARRAY,
SqlFunctionCategory.SYSTEM)
private def makePrimaryKeyCall(
rexBuilder: RexBuilder,
rightPrimaryKeyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_CONDITION_PRIMARY_KEY,
rightPrimaryKeyExpression)
}
private def makeLeftJoinKeyCall(
rexBuilder: RexBuilder,
keyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_LEFT_KEY,
keyExpression)
}
private def makeRightJoinKeyCall(
rexBuilder: RexBuilder,
keyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_RIGHT_KEY,
keyExpression)
}
def makeProcTimeTemporalFunctionJoinConCall(
rexBuilder: RexBuilder,
leftTimeAttribute: RexNode,
rightPrimaryKeyExpression: RexNode): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_CONDITION,
leftTimeAttribute,
makePrimaryKeyCall(rexBuilder, Array(rightPrimaryKeyExpression)))
}
def makeRowTimeTemporalFunctionJoinConCall(
rexBuilder: RexBuilder,
leftTimeAttribute: RexNode,
rightTimeAttribute: RexNode,
rightPrimaryKeyExpression: RexNode): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_CONDITION,
leftTimeAttribute,
rightTimeAttribute,
makePrimaryKeyCall(rexBuilder, Array(rightPrimaryKeyExpression)))
}
def makeInitialRowTimeTemporalTableJoinCondCall(
rexBuilder: RexBuilder,
leftTimeAttribute: RexNode,
rightTimeAttribute: RexNode,
leftJoinKeyExpression: Seq[RexNode],
rightJoinKeyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
INITIAL_TEMPORAL_JOIN_CONDITION,
leftTimeAttribute,
rightTimeAttribute,
makeLeftJoinKeyCall(rexBuilder, leftJoinKeyExpression),
makeRightJoinKeyCall(rexBuilder, rightJoinKeyExpression))
}
def makeRowTimeTemporalTableJoinConCall(
rexBuilder: RexBuilder,
leftTimeAttribute: RexNode,
rightTimeAttribute: RexNode,
rightPrimaryKeyExpression: Seq[RexNode],
leftJoinKeyExpression: Seq[RexNode],
rightJoinKeyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_CONDITION,
leftTimeAttribute,
rightTimeAttribute,
makePrimaryKeyCall(rexBuilder, rightPrimaryKeyExpression),
makeLeftJoinKeyCall(rexBuilder, leftJoinKeyExpression),
makeRightJoinKeyCall(rexBuilder, rightJoinKeyExpression))
}
def makeInitialProcTimeTemporalTableJoinConCall(
rexBuilder: RexBuilder,
leftTimeAttribute: RexNode,
leftJoinKeyExpression: Seq[RexNode],
rightJoinKeyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
INITIAL_TEMPORAL_JOIN_CONDITION,
leftTimeAttribute,
makeLeftJoinKeyCall(rexBuilder, leftJoinKeyExpression),
makeRightJoinKeyCall(rexBuilder, rightJoinKeyExpression))
}
def makeProcTimeTemporalTableJoinConCall(
rexBuilder: RexBuilder,
leftTimeAttribute: RexNode,
rightPrimaryKeyExpression: Seq[RexNode],
leftJoinKeyExpression: Seq[RexNode],
rightJoinKeyExpression: Seq[RexNode]): RexNode = {
rexBuilder.makeCall(
TEMPORAL_JOIN_CONDITION,
leftTimeAttribute,
makePrimaryKeyCall(rexBuilder, rightPrimaryKeyExpression),
makeLeftJoinKeyCall(rexBuilder, leftJoinKeyExpression),
makeRightJoinKeyCall(rexBuilder, rightJoinKeyExpression))
}
def isInitialRowTimeTemporalTableJoin(rexCall: RexCall): Boolean = {
//(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, LEFT_KEY, RIGHT_KEY)
rexCall.getOperator == INITIAL_TEMPORAL_JOIN_CONDITION && rexCall.operands.length == 4
}
def isInitialProcTimeTemporalTableJoin(rexCall: RexCall): Boolean = {
//(LEFT_TIME_ATTRIBUTE, LEFT_KEY, RIGHT_KEY)
rexCall.getOperator == INITIAL_TEMPORAL_JOIN_CONDITION && rexCall.operands.length == 3
}
private def containsTemporalJoinCondition(condition: RexNode): Boolean = {
var hasTemporalJoinCondition: Boolean = false
condition.accept(new RexVisitorImpl[Void](true) {
override def visitCall(call: RexCall): Void = {
if (call.getOperator != TEMPORAL_JOIN_CONDITION &&
call.getOperator != INITIAL_TEMPORAL_JOIN_CONDITION) {
super.visitCall(call)
} else {
hasTemporalJoinCondition = true
null
}
}
})
hasTemporalJoinCondition
}
def containsInitialTemporalJoinCondition(condition: RexNode): Boolean = {
var hasTemporalJoinCondition: Boolean = false
condition.accept(new RexVisitorImpl[Void](true) {
override def visitCall(call: RexCall): Void = {
if (call.getOperator != INITIAL_TEMPORAL_JOIN_CONDITION) {
super.visitCall(call)
} else {
hasTemporalJoinCondition = true
null
}
}
})
hasTemporalJoinCondition
}
def isRowTimeJoin(joinSpec: JoinSpec): Boolean = {
val nonEquiJoinRex = joinSpec.getNonEquiCondition().orElse(null)
var rowtimeJoin: Boolean = false
val visitor = new RexVisitorImpl[Unit](true) {
override def visitCall(call: RexCall): Unit = {
if (isRowTimeTemporalTableJoinCon(call) ||
isRowTimeTemporalFunctionJoinCon(call)) {
rowtimeJoin = true
} else {
super.visitCall(call)
}
}
}
nonEquiJoinRex.accept(visitor)
rowtimeJoin
}
def isRowTimeTemporalTableJoinCon(rexCall: RexCall): Boolean = {
//(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, LEFT_KEY, RIGHT_KEY, PRIMARY_KEY)
rexCall.getOperator == TEMPORAL_JOIN_CONDITION && rexCall.operands.length == 5
}
def isRowTimeTemporalFunctionJoinCon(rexCall: RexCall): Boolean = {
//(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, PRIMARY_KEY)
rexCall.getOperator == TEMPORAL_JOIN_CONDITION && rexCall.operands.length == 3
}
def isTemporalFunctionJoin(rexBuilder: RexBuilder, joinInfo: JoinInfo): Boolean = {
val nonEquiJoinRex = joinInfo.getRemaining(rexBuilder)
var isTemporalFunctionJoin: Boolean = false
val visitor = new RexVisitorImpl[Unit](true) {
override def visitCall(call: RexCall): Unit = {
if (isTemporalFunctionCon(call)) {
isTemporalFunctionJoin = true
} else {
super.visitCall(call)
}
}
}
nonEquiJoinRex.accept(visitor)
isTemporalFunctionJoin
}
def isTemporalFunctionCon(rexCall: RexCall): Boolean = {
//(LEFT_TIME_ATTRIBUTE, PRIMARY_KEY)
//(LEFT_TIME_ATTRIBUTE, RIGHT_TIME_ATTRIBUTE, PRIMARY_KEY)
rexCall.getOperator == TEMPORAL_JOIN_CONDITION &&
(rexCall.operands.length == 2 || rexCall.operands.length == 3)
}
def validateTemporalFunctionCondition(
call: RexCall,
leftTimeAttribute: RexNode,
rightTimeAttribute: Option[RexNode],
rightPrimaryKey: Option[Array[RexNode]],
rightKeysStartingOffset: Int,
joinSpec: JoinSpec,
textualRepresentation: String): Unit = {
if (TemporalJoinUtil.isRowTimeTemporalFunctionJoinCon(call)) {
validateTemporalFunctionPrimaryKey(
rightKeysStartingOffset,
rightPrimaryKey,
joinSpec,
textualRepresentation)
if (!isRowtimeIndicatorType(rightTimeAttribute.get.getType)) {
throw new ValidationException(
s"Non rowtime timeAttribute [${rightTimeAttribute.get.getType}] " +
s"used to create TemporalTableFunction")
}
if (!isRowtimeIndicatorType(leftTimeAttribute.getType)) {
throw new ValidationException(
s"Non rowtime timeAttribute [${leftTimeAttribute.getType}] " +
s"passed as the argument to TemporalTableFunction")
}
}
else {
validateTemporalFunctionPrimaryKey(
rightKeysStartingOffset,
rightPrimaryKey,
joinSpec,
textualRepresentation)
if (!isProctimeIndicatorType(leftTimeAttribute.getType)) {
throw new ValidationException(
s"Non processing timeAttribute [${leftTimeAttribute.getType}] " +
s"passed as the argument to TemporalTableFunction")
}
}
}
private def validateTemporalFunctionPrimaryKey(
rightKeysStartingOffset: Int,
rightPrimaryKey: Option[Array[RexNode]],
joinInfo: JoinSpec,
textualRepresentation: String): Unit = {
if (joinInfo.getRightKeys.length != 1) {
throw new ValidationException(
s"Only single column join key is supported. " +
s"Found ${joinInfo.getRightKeys} in [$textualRepresentation]")
}
if (rightPrimaryKey.isEmpty || rightPrimaryKey.get.length != 1) {
throw new ValidationException(
s"Only single primary key is supported. " +
s"Found $rightPrimaryKey in [$textualRepresentation]")
}
val pk = rightPrimaryKey.get(0)
val rightJoinKeyInputReference = joinInfo.getRightKeys()(0) + rightKeysStartingOffset
val rightPrimaryKeyInputReference = extractInputRef(
pk,
textualRepresentation)
if (rightPrimaryKeyInputReference != rightJoinKeyInputReference) {
throw new ValidationException(
s"Join key [$rightJoinKeyInputReference] must be the same as " +
s"temporal table's primary key [$pk] " +
s"in [$textualRepresentation]")
}
}
def extractInputRef(rexNode: RexNode, textualRepresentation: String): Int = {
val inputReferenceVisitor = new InputRefVisitor
rexNode.accept(inputReferenceVisitor)
checkState(
inputReferenceVisitor.getFields.length == 1,
"Failed to find input reference in [%s]",
textualRepresentation)
inputReferenceVisitor.getFields.head
}
/**
* Check whether input join node satisfy preconditions to convert into temporal join.
*
* @param join input join to analyze.
* @return True if input join node satisfy preconditions to convert into temporal join,
* else false.
*/
def satisfyTemporalJoin(join: FlinkLogicalJoin): Boolean = {
satisfyTemporalJoin(join, join.getLeft, join.getRight)
}
def satisfyTemporalJoin(join: FlinkLogicalJoin, newLeft: RelNode, newRight: RelNode): Boolean = {
if (!containsTemporalJoinCondition(join.getCondition)) {
return false
}
val joinInfo = JoinInfo.of(newLeft, newRight, join.getCondition)
if (isTemporalFunctionJoin(join.getCluster.getRexBuilder, joinInfo)) {
// Temporal table function join currently only support INNER JOIN
join.getJoinType match {
case JoinRelType.INNER => true
case _ => false
}
} else {
// Temporal table join currently only support INNER JOIN and LEFT JOIN
join.getJoinType match {
case JoinRelType.INNER | JoinRelType.LEFT => true
case _ => false
}
}
}
}
| apache/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/utils/TemporalJoinUtil.scala | Scala | apache-2.0 | 16,300 |
/*
* The MIT License
*
* Copyright (c) 2015 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package dagr.core.tasksystem
object EitherTask {
sealed trait Choice
object Left extends Choice
object Right extends Choice
/**
* Creates an [[EitherTask]] that wraps `choice` into a function that will be evaluated lazily when the
* [[EitherTask]] needs to make its choice.
*
* @param left the left task.
* @param right the right task.
* @param choice an expression that returns either Left or Right when evaluated
*/
def apply(left: Task, right: Task, choice: => Choice): EitherTask = new EitherTask(left, right, () => choice)
/**
* Creates an [[EitherTask]] that wraps `choice` into a function that will be lazily evaluated when the
* [[EitherTask]] needs to make its choice. If `goLeft` evaluates to true the `Left` task will
* be returned, else the `Right` task.
*
* @param left the left task.
* @param right the right task.
* @param goLeft an expression that returns a Boolean, with true indicating Left and false indicating Right
*/
def of(left: Task, right: Task, goLeft: => Boolean): EitherTask = new EitherTask(left, right, () => if (goLeft) Left else Right)
}
/** A task that returns either the left or right task based on a deferred choice. The choice function is
* not evaluated until all dependencies have been met and the `EitherTask` needs to make a decision about
* which task to return from [[getTasks].
*
* @param left the left task.
* @param right the right task
* @param choice an expression that returns either Left or Right when invoked
*/
class EitherTask private (private val left: Task, private val right: Task, private val choice: () => EitherTask.Choice) extends Task {
/** Decides which task to return based on `choice` at execution time. */
override def getTasks: Iterable[Task] = Seq(if (choice() eq EitherTask.Left) left else right)
}
| fulcrumgenomics/dagr | core/src/main/scala/dagr/core/tasksystem/EitherTask.scala | Scala | mit | 3,016 |
package scadla.backends.amf
import scadla._
import dzufferey.utils._
import dzufferey.utils.LogLevel._
import scala.xml._
import squants.space.{LengthUnit, Millimeters, Microns, Meters, Inches}
object Printer extends Printer(Millimeters) {
}
class Printer(unit: LengthUnit = Millimeters) {
val targetUnit: String = unit match {
case Millimeters => "millimeter"
case Meters => "meter"
case Microns => "micrometer"
case Inches => "inch"
case other => Logger.logAndThrow("amf.Printer", Error, "unsupported unit: " + other)
}
def store(obj: Polyhedron, fileName: String) = {
val (points, faces) = obj.indexed
val pointNodes =
new Group(points.map{ p =>
<vertex><coordinates><x>{p.x.to(unit)}</x><y>{p.y.to(unit)}</y><z>{p.z.to(unit)}</z></coordinates></vertex>
})
val faceNodes =
new Group(faces.map{ case (a,b,c) =>
<triangle><v1>{a}</v1><v2>{b}</v2><v3>{c}</v3></triangle>
}.toSeq)
val node =
<amf unit={ targetUnit }>
<metadata type="producer">Scadla</metadata>
<object id="0">
<mesh>
<vertices>
{ pointNodes }
</vertices>
<volume>
{ faceNodes }
</volume>
</mesh>
</object>
</amf>
XML.save(fileName, node, "UTF-8", true)
}
}
| dzufferey/scadla | src/main/scala/scadla/backends/amf/Printer.scala | Scala | apache-2.0 | 1,352 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio
import java.io.PrintWriter
import java.nio.file.Files
import com.spotify.scio.io.TextIO
import com.spotify.scio.metrics.Metrics
import com.spotify.scio.options.ScioOptions
import com.spotify.scio.testing.{PipelineSpec, TestValidationOptions}
import com.spotify.scio.util.ScioUtil
import com.spotify.scio.testing.TestUtil
import java.nio.charset.StandardCharsets
import org.apache.beam.runners.direct.DirectRunner
import org.apache.beam.sdk.options.{PipelineOptions, PipelineOptionsFactory}
import org.apache.beam.sdk.testing.PAssert
import org.apache.beam.sdk.transforms.Create
import scala.concurrent.duration.Duration
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions
import org.apache.beam.sdk.options.Validation.Required
import scala.jdk.CollectionConverters._
class ScioContextTest extends PipelineSpec {
"ScioContext" should "support pipeline" in {
val pipeline = ScioContext().pipeline
val p = pipeline.apply(Create.of(List(1, 2, 3).asJava))
PAssert.that(p).containsInAnyOrder(List(1, 2, 3).asJava)
pipeline.run()
}
it should "have temp location for default runner" in {
val sc = ScioContext()
sc.prepare()
val opts = sc.options
opts.getTempLocation should not be null
}
it should "have temp location for DirectRunner" in {
val opts = PipelineOptionsFactory.create()
opts.setRunner(classOf[DirectRunner])
val sc = ScioContext(opts)
sc.prepare()
sc.options.getTempLocation should not be null
}
it should "support user defined temp location" in {
val expected = "/expected"
val opts = PipelineOptionsFactory.create()
opts.setTempLocation(expected)
ScioContext(opts).options.getTempLocation shouldBe expected
}
it should "support user defined job name via options" in {
val jobName = "test-job-1"
val opts = PipelineOptionsFactory.create()
opts.setJobName(jobName)
val pipelineOpts = ScioContext(opts).options
pipelineOpts.getJobName shouldBe jobName
}
it should "support user defined job name via context" in {
val jobName = "test-job-1"
val opts = PipelineOptionsFactory.create()
val sc = ScioContext(opts)
sc.setJobName(jobName)
val pipelineOpts = ScioContext(opts).options
pipelineOpts.getJobName shouldBe jobName
}
it should "support user defined job name via options then context" in {
val jobName1 = "test-job-1"
val jobName2 = "test-job-2"
val opts = PipelineOptionsFactory.create()
opts.setJobName(jobName1)
val sc = ScioContext(opts)
sc.setJobName(jobName2)
val pipelineOpts = ScioContext(opts).options
pipelineOpts.getJobName shouldBe jobName2
}
it should "create local output directory on close()" in {
val output = Files.createTempDirectory("scio-output-").toFile
output.delete()
val sc = ScioContext()
sc.parallelize(Seq("a", "b", "c")).saveAsTextFile(output.toString)
output.exists() shouldBe false
sc.run()
output.exists() shouldBe true
output.delete()
}
it should "[io] create local output directory on close()" in {
val output = Files.createTempDirectory("scio-output-").toFile
output.delete()
val sc = ScioContext()
val textIO = TextIO(output.getAbsolutePath)
sc.parallelize(Seq("a", "b", "c")).write(textIO)(TextIO.WriteParam())
output.exists() shouldBe false
sc.run()
output.exists() shouldBe true
output.delete()
}
it should "support save metrics on close for finished pipeline" in {
val metricsFile = Files.createTempFile("scio-metrics-dump-", ".json").toFile
val opts = PipelineOptionsFactory.create()
opts.setRunner(classOf[DirectRunner])
opts.as(classOf[ScioOptions]).setMetricsLocation(metricsFile.toString)
val sc = ScioContext(opts)
sc.run().waitUntilFinish() // block non-test runner
val mapper = ScioUtil.getScalaJsonMapper
val metrics = mapper.readValue(metricsFile, classOf[Metrics])
metrics.version shouldBe BuildInfo.version
}
it should "fail to run() on closed context" in {
val sc = ScioContext()
sc.run()
the[IllegalArgumentException] thrownBy {
sc.run()
} should have message "requirement failed: Pipeline cannot be modified once ScioContext has been executed"
}
it should "support options from optionsFile" in {
val optionsFile = Files.createTempFile("scio-options-", ".txt").toFile
val pw = new PrintWriter(optionsFile)
try {
pw.append("--foo=bar")
pw.flush()
} finally {
pw.close()
}
val (_, arg) = ScioContext.parseArguments[PipelineOptions](
Array(s"--optionsFile=${optionsFile.getAbsolutePath}")
)
arg("foo") shouldBe "bar"
}
it should "invalidate options where required arguments are missing" in {
assertThrows[IllegalArgumentException] {
ScioContext.parseArguments[TestValidationOptions](Array("--foo=bar"), true)
}
}
it should "parse valid, invalid, and missing blockFor argument passed from command line" in {
val (validOpts, _) =
ScioContext.parseArguments[PipelineOptions](Array(s"--blockFor=1h"))
ScioContext.apply(validOpts).awaitDuration shouldBe Duration("1h")
val (missingOpts, _) = ScioContext.parseArguments[PipelineOptions](Array())
ScioContext.apply(missingOpts).awaitDuration shouldBe Duration.Inf
val (invalidOpts, _) =
ScioContext.parseArguments[PipelineOptions](Array(s"--blockFor=foo"))
the[IllegalArgumentException] thrownBy { ScioContext.apply(invalidOpts) } should have message
s"blockFor param foo cannot be cast to type scala.concurrent.duration.Duration"
}
it should "truncate app arguments when they are overly long" in {
val longArg = "--argument=" + ("a" * 55000)
val (opts, _) = ScioContext.parseArguments[ScioOptions](Array(longArg))
def numBytes(s: String): Int = s.getBytes(StandardCharsets.UTF_8.name).length
val expectedNumBytes = 50000 + numBytes(" [...]")
numBytes(opts.getAppArguments) shouldBe expectedNumBytes
}
behavior of "Counter initialization in ScioContext"
it should "initialize Counters which are registered by name" in {
val sc = ScioContext()
sc.initCounter(name = "named-counter")
val res = sc.run().waitUntilDone()
val actualCommitedCounterValue = res
.counter(ScioMetrics.counter(name = "named-counter"))
.committed
actualCommitedCounterValue shouldBe Some(0)
}
it should "initialize Counters which are registered by name and namespace" in {
val sc = ScioContext()
sc.initCounter(namespace = "ns", name = "name-spaced-counter")
val res = sc.run().waitUntilDone()
val actualCommitedCounterValue = res
.counter(ScioMetrics.counter(namespace = "ns", name = "name-spaced-counter"))
.committed
actualCommitedCounterValue shouldBe Some(0)
}
it should "initialize Counters which are registered" in {
val scioCounter = ScioMetrics.counter(name = "some-counter")
val sc = ScioContext()
sc.initCounter(scioCounter)
val res = sc.run().waitUntilDone()
val actualCommitedCounterValue = res
.counter(scioCounter)
.committed
actualCommitedCounterValue shouldBe Some(0)
}
"PipelineOptions" should "propagate" in {
trait Options extends DataflowPipelineOptions {
@Required
def getStringValue: String
def setStringValue(value: String): Unit
}
val (opts, _) = ScioContext.parseArguments[Options](
// test appName will switch ScioContext into test mode
Array("--stringValue=foobar", s"--appName=${TestUtil.newTestId()}", "--project=dummy"),
withValidation = true
)
val sc = ScioContext(opts)
val internalOptions =
sc.parallelize(Seq(1, 2, 3, 4))
.map(_ + 1)
.internal
.getPipeline()
.getOptions()
.as(classOf[Options])
internalOptions.getStringValue shouldBe "foobar"
}
it should "#1323: generate unique SCollection names" in {
val options = PipelineOptionsFactory.create()
options.setStableUniqueNames(PipelineOptions.CheckEnabled.ERROR)
val sc = ScioContext(options)
val s1 = sc.empty[(String, Int)]()
val s2 = sc.empty[(String, Double)]()
s1.join(s2)
noException shouldBe thrownBy(sc.run())
}
}
| regadas/scio | scio-test/src/test/scala/com/spotify/scio/ScioContextTest.scala | Scala | apache-2.0 | 8,891 |
package org.hello
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class World212Suite extends FunSuite {
test("foo") {
new World2_13().foo()
}
} | scoverage/gradle-scoverage | src/crossScalaVersionTest/resources/projects/scala-multi-module-cross-version/2_13/src/test/scala/org/hello/World2_13Suite.scala | Scala | apache-2.0 | 239 |
package com.overviewdocs.blobstorage
sealed trait BlobBucketId {
val id: String
}
object BlobBucketId {
case object PageData extends BlobBucketId {
override val id = "pageData"
}
case object FileContents extends BlobBucketId {
override val id = "fileContents"
}
case object FileView extends BlobBucketId {
override val id = "fileView"
}
}
| overview/overview-server | common/src/main/scala/com/overviewdocs/blobstorage/BlobBucketId.scala | Scala | agpl-3.0 | 369 |
package scala.meta
package internal
package tokenizers
import Chars._
import scala.meta.inputs._
trait CharArrayReaderData {
/** the last read character */
var ch: Char = _
/** The offset one past the last read character */
var charOffset: Int = 0
/** The start offset of the current line */
var lineStartOffset: Int = 0
/** The start offset of the line before the current one */
var lastLineStartOffset: Int = 0
protected var lastUnicodeOffset = -1
}
class CharArrayReader(input: Input, dialect: Dialect, reporter: Reporter) extends CharArrayReaderData { self =>
val buf = input.chars
import reporter._
/** Is last character a unicode escape \\\\uxxxx? */
var isUnicodeEscape = false
/** Advance one character; reducing CR;LF pairs to just LF */
final def nextChar(): Unit = {
// If the last character is a unicode escape, skip charOffset to the end of
// the last character. In case `potentialUnicode` restores charOffset
// to the head of last character.
if(isUnicodeEscape) charOffset = lastUnicodeOffset
isUnicodeEscape = false
if (charOffset >= buf.length) {
ch = SU
} else {
val c = buf(charOffset)
ch = c
charOffset += 1
if (c == '\\\\') potentialUnicode()
if (ch < ' ') {
skipCR()
potentialLineEnd()
}
if (ch == '"' && !dialect.allowMultilinePrograms) {
readerError("double quotes are not allowed in single-line quasiquotes", at = charOffset - 1)
}
}
}
/** Advance one character, leaving CR;LF pairs intact.
* This is for use in multi-line strings, so there are no
* "potential line ends" here.
*/
final def nextRawChar() {
if(isUnicodeEscape) charOffset = lastUnicodeOffset
isUnicodeEscape = false
if (charOffset >= buf.length) {
ch = SU
} else {
val c = buf(charOffset)
ch = c
charOffset += 1
if (c == '\\\\') potentialUnicode()
}
}
/** Interpret \\\\uxxxx escapes */
private def potentialUnicode() = {
def evenSlashPrefix: Boolean = {
var p = charOffset - 2
while (p >= 0 && buf(p) == '\\\\') p -= 1
(charOffset - p) % 2 == 0
}
def udigit: Int = {
if (charOffset >= buf.length) {
// Since the positioning code is very insistent about throwing exceptions,
// we have to decrement the position so our error message can be seen, since
// we are one past EOF. This happens with e.g. val x = \\ u 1 <EOF>
readerError("incomplete unicode escape", at = charOffset - 1)
SU
}
else {
val d = digit2int(buf(charOffset), 16)
if (d >= 0) charOffset += 1
else readerError("error in unicode escape", at = charOffset)
d
}
}
// save the end of the current token (exclusive) in case this method
// advances the offset more than once. See UnicodeEscapeSuite for a
// and https://github.com/scalacenter/scalafix/issues/593 for
// an example why this this is necessary.
val end = charOffset
if (charOffset < buf.length && buf(charOffset) == 'u' && evenSlashPrefix) {
do charOffset += 1
while (charOffset < buf.length && buf(charOffset) == 'u')
val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
lastUnicodeOffset = charOffset
isUnicodeEscape = true
ch = code.toChar
}
// restore the charOffset to the saved position
if (end < buf.length) charOffset = end
}
/** replace CR;LF by LF */
private def skipCR() =
if (ch == CR && charOffset < buf.length)
buf(charOffset) match {
case '\\\\' =>
if (lookaheadReader.getu == LF)
potentialUnicode()
case _ =>
}
/** Handle line ends */
private def potentialLineEnd() {
if (ch == LF || ch == FF) {
if (!dialect.allowMultilinePrograms) {
readerError("line breaks are not allowed in single-line quasiquotes", at = charOffset - 1)
}
lastLineStartOffset = lineStartOffset
lineStartOffset = charOffset
}
}
/** A new reader that takes off at the current character position */
def lookaheadReader = new CharArrayLookaheadReader
class CharArrayLookaheadReader extends CharArrayReader(input, dialect, reporter) {
charOffset = self.charOffset
ch = self.ch
/** A mystery why CharArrayReader.nextChar() returns Unit */
def getc() = { nextChar() ; ch }
def getu() = { require(buf(charOffset) == '\\\\') ; ch = '\\\\' ; charOffset += 1 ; potentialUnicode() ; ch }
}
}
| olafurpg/scalameta | scalameta/tokenizers/shared/src/main/scala/scala/meta/internal/tokenizers/CharArrayReader.scala | Scala | bsd-3-clause | 4,542 |
trait Foo extends scala.tools.nsc.Global {
override def newCodePrinter(out: java.io.PrintWriter, tree: Tree, printRootPkg: Boolean): TreePrinter =
super.newCodePrinter(out, tree, printRootPkg)
}
| martijnhoekstra/scala | test/files/neg/t8777.scala | Scala | apache-2.0 | 201 |
package net.xylophones.planetoid.game.logic
import net.xylophones.planetoid.game.model._
class GameCollisionUpdater(collisionCalculator: CollisionCalculator) extends GameModelResultUpdater {
private case class CollisionResult(val isCollision: Boolean = false,
val impactMissiles: IndexedSeq[Missile] = IndexedSeq.empty)
private object CollisionResult {
def empty = CollisionResult()
}
override def update(initialResult: GameModelUpdateResult, physics: GamePhysics, playerInputs: IndexedSeq[PlayerInput]): GameModelUpdateResult = {
val model = initialResult.model
val player1 = model.players.p1
val player2 = model.players.p2
val p1Result = checkMissileOrPlanetCollision(player1, model.players.p2.missiles, model.planet, player2)
val p2Result = checkMissileOrPlanetCollision(player2, model.players.p1.missiles, model.planet, player1)
if (p1Result.isCollision || p2Result.isCollision) {
val p1Lives = if (p1Result.isCollision) player1.numLives - 1
else player1.numLives
val p2Points = if (p1Result.isCollision) player2.points + 1
else player2.points
val p2Lives = if (p2Result.isCollision) player2.numLives - 1
else player2.numLives
val p1Points = if (p2Result.isCollision) player1.points + 1
else player1.points
val p1 = Player(player1.rocket, p1Lives, p1Points, Vector.empty)
val p2 = Player(player2.rocket, p2Lives, p2Points, Vector.empty)
val player1Event = if (p1Lives != player1.numLives) Some(GameEvent.Player1LoseLife)
else None
val player2Event = if (p2Lives != player2.numLives) Some(GameEvent.Player2LoseLife)
else None
val newModel = model.copy(players = Players(p1, p2))
val events = initialResult.events ++ player1Event ++ player2Event + GameEvent.PlayerLoseLife
new GameModelUpdateResult(newModel, events)
} else {
new GameModelUpdateResult(model, initialResult.events)
}
}
private def checkMissileOrPlanetCollision(player: Player, missiles: IndexedSeq[Missile], planet: Planet, opponent: Player) = {
val missileResult = checkMissileCollision(player, missiles)
val planetResult = checkTwoCircularObjectCollision(player.rocket, planet)
val playerResults = checkTwoCircularObjectCollision(player.rocket, opponent.rocket)
mergeCollisionResults(playerResults, mergeCollisionResults(missileResult, planetResult))
}
private def checkMissileCollision(player: Player, missiles: IndexedSeq[Missile]) = {
val collidingMissiles = missiles.filter((m: Missile) => collisionCalculator.isCollision(m, player.rocket))
if (collidingMissiles.nonEmpty) CollisionResult(isCollision = true, impactMissiles = collidingMissiles)
else CollisionResult.empty
}
private def checkTwoCircularObjectCollision(c1: Circular, c2: Circular): CollisionResult = {
val isColliding = collisionCalculator.isCollision(c1, c2)
if (isColliding) CollisionResult(isCollision = true)
else CollisionResult.empty
}
private def mergeCollisionResults(r1: CollisionResult, r2: CollisionResult) = {
if (r1.isCollision && !r2.isCollision) {
r1
} else if (r2.isCollision && !r1.isCollision) {
r2
} else if (r1.isCollision && r2.isCollision) {
CollisionResult(isCollision = true, r1.impactMissiles ++ r2.impactMissiles)
} else {
CollisionResult.empty
}
}
}
| wjsrobertson/planetoid3d | game/src/main/scala/net/xylophones/planetoid/game/logic/GameCollisionUpdater.scala | Scala | apache-2.0 | 3,522 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.util
import java.lang.{Iterable => JIterable}
import java.util.{Map => JMap}
import scala.jdk.CollectionConverters._
/**
* Immutable wrappers for [[java.util.Map]]. Java `Map`s are mutable and `.asJava` returns
* `mutable.Map[K, V]` which is inconsistent and not idiomatic Scala. When wrapping Beam API, in
* many cases the underlying [[java.util.Map]] is immutable in nature and it's safe to wrap them
* with this.
*/
private[scio] object JMapWrapper {
def ofMultiMap[A, B](self: JMap[A, JIterable[B]]): Map[A, Iterable[B]] =
new Map[A, Iterable[B]] {
// make eager copies when necessary
override def removed(key: A): Map[A, Iterable[B]] =
self.asScala.iterator
.filter { case (k, _) => k != key }
.map { case (k, v) =>
(k, v.asScala)
}
.toMap
// lazy transform underlying j.u.Map
override def get(key: A): Option[Iterable[B]] =
Option(self.get(key)).map(_.asScala)
override def iterator: Iterator[(A, Iterable[B])] =
self.asScala.iterator.map(kv => (kv._1, kv._2.asScala))
override def updated[V1 >: Iterable[B]](key: A, value: V1): Map[A, V1] =
self.asScala.iterator
.map { case (k, v) => (k, v.asScala) }
.toMap
.updated(key, value)
}
def of[K, V](self: JMap[K, V]): Map[K, V] =
new Map[K, V] {
// make eager copies when necessary
override def removed(key: K): Map[K, V] =
self.asScala.iterator.filter { case (k, _) => k != key }.toMap
// lazy transform underlying j.u.Map
override def get(key: K): Option[V] = Option(self.get(key))
override def iterator: Iterator[(K, V)] = self.asScala.iterator
override def updated[V1 >: V](key: K, value: V1): Map[K, V1] =
self.asScala.toMap.updated(key, value)
}
}
| spotify/scio | scio-core/src/main/scala-2.13/com/spotify/scio/util/JMapWrapper.scala | Scala | apache-2.0 | 2,469 |
package feiyu.com.util
class ScalaHelloWorld {
def greetFromScala() {
val javaHW = new JavaHelloWorld
javaHW.greetingFromJava()
}
} | faustineinsun/SparkPlayground | Java/src/main/scala/feiyu/com/util/ScalaHelloWorld.scala | Scala | apache-2.0 | 153 |
package com.twitter.finagle.factory
import com.twitter.finagle._
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.naming.NameInterpreter
import com.twitter.finagle.param.{Label, Stats}
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.Trace
import com.twitter.finagle.util.{Drv, Rng, Showable}
import com.twitter.util._
import scala.collection.immutable
/**
* Proxies requests to the current definiton of 'name', queueing
* requests while it is pending.
*/
private class DynNameFactory[Req, Rep](
name: Activity[NameTree[Name.Bound]],
cache: ServiceFactoryCache[NameTree[Name.Bound], Req, Rep])
extends ServiceFactory[Req, Rep] {
private sealed trait State
private case class Pending(q: immutable.Queue[(ClientConnection, Promise[Service[Req, Rep]])])
extends State
private case class Named(name: NameTree[Name.Bound]) extends State
private case class Failed(exc: Throwable) extends State
private case class Closed() extends State
override def status = state match {
case Pending(_) => Status.Busy
case Named(name) => cache.status(name)
case Failed(_) | Closed() => Status.Closed
}
@volatile private[this] var state: State = Pending(immutable.Queue.empty)
private[this] val sub = name.run.changes respond {
case Activity.Ok(name) => synchronized {
state match {
case Pending(q) =>
state = Named(name)
for ((conn, p) <- q) p.become(apply(conn))
case Failed(_) | Named(_) =>
state = Named(name)
case Closed() =>
}
}
case Activity.Failed(exc) => synchronized {
state match {
case Pending(q) =>
// wrap the exception in a Failure.Naming, so that it can
// be identified for tracing
for ((_, p) <- q) p.setException(Failure.adapt(exc, Failure.Naming))
state = Failed(exc)
case Failed(_) =>
// if already failed, just update the exception; the promises
// must already be satisfied.
state = Failed(exc)
case Named(_) | Closed() =>
}
}
case Activity.Pending =>
}
def apply(conn: ClientConnection): Future[Service[Req, Rep]] = {
state match {
case Named(name) => cache(name, conn)
// wrap the exception in a Failure.Naming, so that it can
// be identified for tracing
case Failed(exc) => Future.exception(Failure.adapt(exc, Failure.Naming))
// don't trace these, since they're not a namer failure
case Closed() => Future.exception(new ServiceClosedException)
case Pending(_) => applySync(conn)
}
}
private[this] def applySync(conn: ClientConnection): Future[Service[Req, Rep]] = synchronized {
state match {
case Pending(q) =>
val p = new Promise[Service[Req, Rep]]
val el = (conn, p)
p setInterruptHandler { case exc =>
synchronized {
state match {
case Pending(q) if q contains el =>
state = Pending(q filter (_ != el))
p.setException(new CancelledConnectionException(exc))
case _ =>
}
}
}
state = Pending(q enqueue el)
p
case other => apply(conn)
}
}
def close(deadline: Time) = {
val prev = synchronized {
val prev = state
state = Closed()
prev
}
prev match {
case Pending(q) =>
val exc = new ServiceClosedException
for ((_, p) <- q)
p.setException(exc)
case _ =>
}
sub.close(deadline)
}
}
/**
* Builds a factory from a [[com.twitter.finagle.NameTree]]. Leaves
* are taken from the given
* [[com.twitter.finagle.factory.ServiceFactoryCache]]; Unions become
* random weighted distributors.
*/
private[finagle] object NameTreeFactory {
def apply[Key, Req, Rep](
path: Path,
tree: NameTree[Key],
factoryCache: ServiceFactoryCache[Key, Req, Rep],
rng: Rng = Rng.threadLocal
): ServiceFactory[Req, Rep] = {
lazy val noBrokersAvailableFactory = Failed(new NoBrokersAvailableException(path.show))
case class Failed(exn: Throwable) extends ServiceFactory[Req, Rep] {
val service: Future[Service[Req, Rep]] = Future.exception(exn)
def apply(conn: ClientConnection) = service
override def status = Status.Closed
def close(deadline: Time) = Future.Done
}
case class Leaf(key: Key) extends ServiceFactory[Req, Rep] {
def apply(conn: ClientConnection) = factoryCache.apply(key, conn)
override def status = factoryCache.status(key)
def close(deadline: Time) = Future.Done
}
case class Weighted(
drv: Drv,
factories: Seq[ServiceFactory[Req, Rep]]
) extends ServiceFactory[Req, Rep] {
def apply(conn: ClientConnection) = factories(drv(rng)).apply(conn)
override def status = Status.worstOf[ServiceFactory[Req, Rep]](factories, _.status)
def close(deadline: Time) = Future.Done
}
def factoryOfTree(tree: NameTree[Key]): ServiceFactory[Req, Rep] =
tree match {
case NameTree.Neg | NameTree.Fail | NameTree.Empty => noBrokersAvailableFactory
case NameTree.Leaf(key) => Leaf(key)
// it's an invariant of Namer.bind that it returns no Alts
case NameTree.Alt(_*) => Failed(new IllegalArgumentException("NameTreeFactory"))
case NameTree.Union(weightedTrees@_*) =>
val (weights, trees) = weightedTrees.unzip { case NameTree.Weighted(w, t) => (w, t) }
Weighted(Drv.fromWeights(weights), trees.map(factoryOfTree))
}
factoryOfTree(tree)
}
}
/**
* A factory that routes to the local binding of the passed-in
* [[com.twitter.finagle.Path Path]]. It calls `newFactory` to mint a
* new [[com.twitter.finagle.ServiceFactory ServiceFactory]] for novel
* name evaluations.
*
* A three-level caching scheme is employed for efficiency:
*
* First, the [[ServiceFactory]] for a [[Path]] is cached by the local
* [[com.twitter.finagle.Dtab Dtab]]. This permits sharing in the
* common case that no local [[Dtab]] is given. (It also papers over the
* mutability of [[Dtab.base]].)
*
* Second, the [[ServiceFactory]] for a [[Path]] (relative to a
* [[Dtab]]) is cached by the [[com.twitter.finagle.NameTree
* NameTree]] it is bound to by that [[Dtab]]. Binding a path results
* in an [[com.twitter.util.Activity Activity]], so this cache permits
* sharing when the same tree is returned in different updates of the
* [[Activity]]. (In particular it papers over nuisance updates of the
* [[Activity]] where the value is unchanged.)
*
* Third, the ServiceFactory for a [[com.twitter.finagle.Name.Bound
* Name.Bound]] appearing in a [[NameTree]] is cached by its
* [[Name.Bound]]. This permits sharing when the same [[Name.Bound]]
* appears in different [[NameTree]]s (or the same [[NameTree]]
* resulting from different bindings of the [[Path]]).
*
* @bug This is far too complicated, though it seems necessary for
* efficiency when namers are occasionally overriden.
*
* @bug 'status' has a funny definition.
*/
private[finagle] class BindingFactory[Req, Rep](
path: Path,
newFactory: Name.Bound => ServiceFactory[Req, Rep],
baseDtab: () => Dtab = BindingFactory.DefaultBaseDtab,
statsReceiver: StatsReceiver = NullStatsReceiver,
maxNameCacheSize: Int = 8,
maxNameTreeCacheSize: Int = 8,
maxNamerCacheSize: Int = 4,
record: (String, String) => Unit = Trace.recordBinary)
extends ServiceFactory[Req, Rep] {
private[this] val tree = NameTree.Leaf(path)
private[this] val nameCache =
new ServiceFactoryCache[Name.Bound, Req, Rep](
bound => new ServiceFactoryProxy(newFactory(bound)) {
private val boundShow = Showable.show(bound)
override def apply(conn: ClientConnection) = {
record("namer.name", boundShow)
super.apply(conn)
}
},
statsReceiver.scope("namecache"),
maxNameCacheSize)
private[this] val nameTreeCache =
new ServiceFactoryCache[NameTree[Name.Bound], Req, Rep](
tree => new ServiceFactoryProxy(NameTreeFactory(path, tree, nameCache)) {
private val treeShow = tree.show
override def apply(conn: ClientConnection) = {
record("namer.tree", treeShow)
super.apply(conn)
}
},
statsReceiver.scope("nametreecache"),
maxNameTreeCacheSize)
private[this] val dtabCache = {
val latencyStat = statsReceiver.stat("bind_latency_us")
val newFactory: ((Dtab, Dtab)) => ServiceFactory[Req, Rep] = { case (baseDtab, localDtab) =>
val factory = new DynNameFactory(
NameInterpreter.bind(baseDtab ++ localDtab, path),
nameTreeCache)
new ServiceFactoryProxy(factory) {
private val pathShow = path.show
private val baseDtabShow = baseDtab.show
override def apply(conn: ClientConnection) = {
val elapsed = Stopwatch.start()
record("namer.path", pathShow)
record("namer.dtab.base", baseDtabShow)
// dtab.local is annotated on the client & server tracers.
super.apply(conn) rescue {
// DynNameFactory wraps naming exceptions for tracing
case f@Failure(maybeExc) if f.isFlagged(Failure.Naming) =>
record("namer.failure", maybeExc.getOrElse(f.show).getClass.getName)
Future.exception(f)
// we don't have the dtabs handy at the point we throw
// the exception; fill them in on the way out
case e: NoBrokersAvailableException =>
Future.exception(new NoBrokersAvailableException(e.name, baseDtab, localDtab))
} respond { _ =>
latencyStat.add(elapsed().inMicroseconds)
}
}
}
}
new ServiceFactoryCache[(Dtab, Dtab), Req, Rep](
newFactory,
statsReceiver.scope("dtabcache"),
maxNamerCacheSize)
}
def apply(conn: ClientConnection): Future[Service[Req, Rep]] =
dtabCache((baseDtab(), Dtab.local), conn)
def close(deadline: Time) =
Closable.sequence(dtabCache, nameTreeCache, nameCache).close(deadline)
override def status = dtabCache.status((baseDtab(), Dtab.local))
}
object BindingFactory {
val role = Stack.Role("Binding")
/**
* A class eligible for configuring a
* [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.factory.BindingFactory]] with a destination
* [[com.twitter.finagle.Name]] to bind.
*/
case class Dest(dest: Name) {
def mk(): (Dest, Stack.Param[Dest]) =
(this, Dest.param)
}
object Dest {
implicit val param = Stack.Param(Dest(Name.Path(Path.read("/$/fail"))))
}
private[finagle] val DefaultBaseDtab = () => Dtab.base
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.factory.BindingFactory]] with a
* [[com.twitter.finagle.Dtab]].
*/
case class BaseDtab(baseDtab: () => Dtab) {
def mk(): (BaseDtab, Stack.Param[BaseDtab]) =
(this, BaseDtab.param)
}
object BaseDtab {
implicit val param = Stack.Param(BaseDtab(DefaultBaseDtab))
}
/**
* Base type for BindingFactory modules. Implementers may handle
* bound residual paths in a protocol-specific way.
*
* The module creates a new `ServiceFactory` based on the module
* above it for each distinct [[com.twitter.finagle.Name.Bound]]
* resolved from `BindingFactory.Dest` (with caching of previously
* seen `Name.Bound`s).
*/
private[finagle] trait Module[Req, Rep] extends Stack.Module[ServiceFactory[Req, Rep]] {
val role = BindingFactory.role
val description = "Bind destination names to endpoints"
val parameters = Seq(
implicitly[Stack.Param[BaseDtab]],
implicitly[Stack.Param[Dest]],
implicitly[Stack.Param[Label]],
implicitly[Stack.Param[Stats]])
/**
* A request filter that is aware of the bound residual path.
*
* The returned filter is applied around the ServiceFactory built from the rest of the stack.
*/
protected[this] def boundPathFilter(path: Path): Filter[Req, Rep, Req, Rep]
def make(params: Stack.Params, next: Stack[ServiceFactory[Req, Rep]]) = {
val Label(label) = params[Label]
val Stats(stats) = params[Stats]
val Dest(dest) = params[Dest]
def newStack(errorLabel: String, bound: Name.Bound) = {
val client = next.make(
params +
// replace the possibly unbound Dest with the definitely bound
// Dest because (1) it's needed by AddrMetadataExtraction and
// (2) it seems disingenuous not to.
Dest(bound) +
LoadBalancerFactory.Dest(bound.addr) +
LoadBalancerFactory.ErrorLabel(errorLabel))
boundPathFilter(bound.path) andThen client
}
val factory = dest match {
case bound@Name.Bound(addr) => newStack(label, bound)
case Name.Path(path) =>
val BaseDtab(baseDtab) = params[BaseDtab]
new BindingFactory(path, newStack(path.show, _), baseDtab, stats.scope("namer"))
}
Stack.Leaf(role, factory)
}
}
/**
* Creates a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.factory.BindingFactory]].
*
* Ignores bound residual paths.
*/
private[finagle] def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Module[Req, Rep] {
private[this] val f = Filter.identity[Req, Rep]
protected[this] def boundPathFilter(path: Path) = f
}
}
| kingtang/finagle | finagle-core/src/main/scala/com/twitter/finagle/factory/BindingFactory.scala | Scala | apache-2.0 | 13,618 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl
import java.util.concurrent.ConcurrentLinkedQueue
import scala.reflect.ClassTag
import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.rdd._
class CLMappedValuesRDD[K: ClassTag, V: ClassTag, U : ClassTag](val prev: RDD[Tuple2[K, V]],
val f: V => U, val useSwat : Boolean) extends RDD[Tuple2[K, U]](prev) {
override def getPartitions: Array[Partition] = firstParent[Tuple2[K, V]].partitions
override def compute(split: Partition, context: TaskContext) :
Iterator[Tuple2[K, U]] = {
val nested = firstParent[Tuple2[K, V]].iterator(split, context)
if (useSwat) {
/*
* If bufferedKeysIter == bufferedKeysLimit && bufferedKeysFull, then the
* buffer is full. Otherwise, it is empty.
*/
val bufferedKeys : ConcurrentLinkedQueue[K] = new ConcurrentLinkedQueue[K]
/*
* These next and hasNext methods will be called by a separate thread, so
* they must be thread-safe with respect to this thread.
*/
val valueIter : Iterator[V] = new Iterator[V] {
override def next() : V = {
val n : Tuple2[K, V] = nested.next
bufferedKeys.add(n._1)
n._2
}
override def hasNext() : Boolean = {
nested.hasNext
}
}
val valueProcessor : CLRDDProcessor[V, U] = new PullCLRDDProcessor[V, U](
valueIter, f, context, firstParent[Tuple2[K, V]].id, split.index)
return new Iterator[Tuple2[K, U]] {
def next() : Tuple2[K, U] = {
val nextVal : U = valueProcessor.next
val nextKey : K = bufferedKeys.poll
assert(nextKey != null)
(nextKey, nextVal)
}
def hasNext() : Boolean = {
valueProcessor.hasNext
}
}
} else {
return new Iterator[Tuple2[K, U]] {
def next() : Tuple2[K, U] = {
val v : Tuple2[K, V] = nested.next
(v._1, f(v._2))
}
def hasNext() : Boolean = {
nested.hasNext
}
}
}
}
}
| agrippa/spark-swat | swat/src/main/scala/org/apache/spark/rdd/cl/CLMappedValuesRDD.scala | Scala | bsd-3-clause | 3,569 |
package sisdn.test
import org.scalatest.{FlatSpec, Matchers}
import sisdn.admission.utils._
class AuthorizationSpec extends FlatSpec with Matchers {
//val auth = AuthorizeAdmission(RequestContext.)
}
| mhashimm/sisdn-admission-service | src/test/scala/sisdn/test/AuthorizationSpec.scala | Scala | agpl-3.0 | 204 |
package nz.wicker.autoencoder.neuralnet.rbm
import nz.wicker.autoencoder.math.matrix.Mat
/**
* This class provides settings necessary for rbm training.
* It stores information about number of epochs, dependence of
* the momentum and number of steps of the gibbs sampling on the
* current epoch, as well as functions that determine how big
* weights will be penalized.
*/
abstract class RbmTrainingConfiguration(
val epochs: Int,
val minibatchSize: Int,
val learningRate: Double,
val initialBiasScaling: Double,
val initialWeightScaling: Double,
val initialMomentum: Double,
val finalMomentum: Double,
val initialGibbsSamplingSteps: Int,
val finalGibbsSamplingSteps: Int,
val weightPenaltyFactor: Double,
val sampleVisibleUnitsDeterministically: Boolean
) {
def momentum(epoch: Int): Double
def gibbsSamplingSteps(epoch: Int): Int
def weightPenalty(weights: Mat): Mat = weights * weightPenaltyFactor
}
| joergwicker/autoencoder | src/main/scala/nz/wicker/autoencoder/neuralnet/rbm/RbmTrainingConfiguration.scala | Scala | gpl-3.0 | 941 |
import scala.concurrent.Future
import scala.language.implicitConversions
import scavenger.categories.formalccc
/** Contains the API and an Akka-backend implementation of the
* Scavenger framework.
*/
package object scavenger {
//type Identifier = formalccc.Elem
// Three castings into the canonical form of a morphism
implicit def withoutContextToFull[X, Y](f: X => Future[Y]):
((X, Context) => Future[Y]) = {
// case (x, ctx) => f(x)
throw new UnsupportedOperationException(
"Attempted to use a function of type X => Future[Y] as Scavenger-Algorithm. " +
"Notice that the definition of the function closes over some execution context, " +
"which can not be easily serialized and sent over the wire. Please change the " +
"type to (X, Context) => Future[Y], and use the executionContext provided by " +
"the Scavenger `Context`."
)
}
implicit def synchronousToFull[X, Y](f: (X, Context) => Y):
((X, Context) => Future[Y]) = {
case (x, ctx) => Future(f(x, ctx))(ctx.executionContext)
}
implicit def simpleToFull[X, Y](f: X => Y):
((X, Context) => Future[Y]) = {
case (x, ctx) => Future(f(x))(ctx.executionContext)
}
// Generic atomic algorithm constructor that builds
// Atomic algorithms from functions.
private def atomicAlgorithmConstructor[X, Y](d: Difficulty)(
algorithmId: String, f: (X, Context) => Future[Y]
): Algorithm[X, Y] = new AtomicAlgorithm[X, Y] {
def identifier = formalccc.Atom(algorithmId)
def difficulty = d
def apply(x: X, ctx: Context) = f(x, ctx)
}
// Three different constructors for atomic algorithms
/** Constructs a cheap atomic algorithm with specified identifier */
def cheap[X, Y](algorithmId: String)(f: (X, Context) => Future[Y]):
Algorithm[X, Y] = atomicAlgorithmConstructor(Cheap)(algorithmId, f)
/** Constructs an expensive atomic algorithm with specified identifier */
def expensive[X, Y](algorithmId: String)(f: (X, Context) => Future[Y]):
Algorithm[X, Y] = atomicAlgorithmConstructor(Expensive)(algorithmId, f)
/** Constructs a parallelizable atomic algorithm with specified identifier */
def parallel[X, Y](algorithmId: String)(f: (X, Context) => Future[Y]):
Algorithm[X, Y] = atomicAlgorithmConstructor(Parallel)(algorithmId, f)
/** Provides implicit `CanApplyTo`s
* for the `apply` method of `Computation` that allows to
* build `Y`-valued computations from `X`-valued and `Y => X`-valued ones.
*/
implicit def canApplyFunctionToArg[X, Y]: CanApplyTo[X => Y, X, Y] =
new CanApplyTo[X => Y, X, Y] {
def apply(f: Computation[X => Y], x: Computation[X], d: Difficulty):
Computation[Y] =
Eval[X, Y](d)(ComputationPair(f, x))
}
implicit def canBuildCouple[A, B]: CanBuildProduct[A, B, (A, B)] =
new CanBuildProduct[A, B, (A, B)] {
def apply(a: Computation[A], b: Computation[B]): Computation[(A, B)] =
ComputationPair(a, b)
}
}
| tyukiand/scavenger_2_x | src/main/scala/scavenger/package.scala | Scala | gpl-3.0 | 2,975 |
/*
* Copyright (C) 2010-2014 GRNET S.A.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gr.grnet.pithosj.core.command
import com.twitter.io.Buf
import gr.grnet.common.date.ParsedDate
case class GetObject2ResultData(
objBuf: Buf,
container: String,
path: String,
ETag: Option[String],
Content_Type: Option[String],
Content_Length: Option[Long],
Last_Modified: Option[ParsedDate],
X_Object_Hash: Option[String],
X_Object_Modified_By: Option[String],
X_Object_Version_Timestamp: Option[ParsedDate],
X_Object_UUID: Option[String],
X_Object_Version: Option[String]
)
| grnet/pithos-j | src/main/scala/gr/grnet/pithosj/core/command/GetObject2ResultData.scala | Scala | gpl-3.0 | 1,203 |
package model
import model.impl.{Player, PlayerNameEnum, Tile, TileNameEnum}
import org.scalatest.{FlatSpec, Matchers}
import util.position.Position
class PlayerSpec extends FlatSpec with Matchers {
"A Player" should "have a name" in {
val player: Player = new Player(PlayerNameEnum.GOLD, Set())
player.name should be(PlayerNameEnum.GOLD)
}
it should "have a set of tales" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)),
new Tile(TileNameEnum.HORSE, new Position(1, 3)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.getTiles should contain theSameElementsAs tiles
}
"toString" should "have given output" in {
val playerString: String = "Gold:Set(R:{1,2})"
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.toString should be(playerString)
}
it should "say if has a tile on a given position" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.isATileThere(new Position(1, 2)) should be(true)
player.isATileThere(new Position(1, 3)) should be(false)
}
it should "give the name of the tile on a position if there is one" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.RABBIT)
player.getTileName(new Position(1, 2)) should be(TileNameEnum.NONE)
}
"equal" should "objects, if the tile name and position is the same" in {
val tiles1: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)))
val player1: Player = new Player(PlayerNameEnum.GOLD, tiles1)
val tiles2: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)))
val player2: Player = new Player(PlayerNameEnum.GOLD, tiles2)
player1 should be(player2)
}
it should "not equal, if the tile name or position is different" in {
val tiles1: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)),
new Tile(TileNameEnum.RABBIT, new Position(1, 3)))
val player1: Player = new Player(PlayerNameEnum.GOLD, tiles1)
val tiles2: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)))
val player2: Player = new Player(PlayerNameEnum.GOLD, tiles2)
player1 should not be player2
}
it should "not equal, if its not a player object" in {
val tiles1: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 2)),
new Tile(TileNameEnum.RABBIT, new Position(1, 3)))
val player1: Player = new Player(PlayerNameEnum.GOLD, tiles1)
player1 should not be 1
}
"move" should "move a tile to a new position" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.RABBIT)
player.getTileName(new Position(1, 2)) should be(TileNameEnum.NONE)
player.moveTile(new Position(1, 1), new Position(1, 2)) should be(true)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.NONE)
player.getTileName(new Position(1, 2)) should be(TileNameEnum.RABBIT)
}
it should "be false if on given position is no tile" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.RABBIT)
player.getTileName(new Position(1, 2)) should be(TileNameEnum.NONE)
player.moveTile(new Position(1, 2), new Position(1, 1)) should be(false)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.RABBIT)
player.getTileName(new Position(1, 2)) should be(TileNameEnum.NONE)
}
"remove" should "remove tile on given position" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.remove(new Position(1, 1)) should be(true)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.NONE)
}
it should "do nothing if tile pos not in set" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.remove(new Position(2, 2)) should be(false)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.RABBIT)
}
"add" should "add the given tile on the position" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.add(TileNameEnum.RABBIT, new Position(1, 3)) should be(true)
player.getTileName(new Position(1, 3)) should be(TileNameEnum.RABBIT)
}
it should "do nothing if the position is already occupied" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.add(TileNameEnum.RABBIT, new Position(1, 1)) should be(false)
player.getTileName(new Position(1, 1)) should be(TileNameEnum.RABBIT)
player.getTiles should contain theSameElementsAs tiles
}
"hasNoRabbit" should "true if player has no rabbit" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.CAT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.hasNoRabbits should be(true)
}
it should "false if not" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 1)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.hasNoRabbits should be(false)
}
"hasRabbitOnRow" should "true if a Rabbit is on given row" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 8)),
new Tile(TileNameEnum.DOG, new Position(2, 8)),
new Tile(TileNameEnum.DOG, new Position(2, 7)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.hasRabbitOnRow(8) should be(true)
}
it should "false if no Rabbit on given row" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 8)),
new Tile(TileNameEnum.DOG, new Position(2, 8)),
new Tile(TileNameEnum.DOG, new Position(2, 7)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.hasRabbitOnRow(7) should be(false)
}
it should "false if row out of 1-8 bound" in {
val tiles: Set[Tile] = Set(
new Tile(TileNameEnum.RABBIT, new Position(1, 8)),
new Tile(TileNameEnum.DOG, new Position(2, 8)),
new Tile(TileNameEnum.DOG, new Position(2, 7)))
val player: Player = new Player(PlayerNameEnum.GOLD, tiles)
player.hasRabbitOnRow(0) should be(false)
player.hasRabbitOnRow(9) should be(false)
}
}
| MartinLei/Arimaa | src/test/scala/model/PlayerSpec.scala | Scala | mit | 7,191 |
package codecheck.github
package operations
import org.scalatest.FunSpec
import org.scalatest.BeforeAndAfterAll
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
class BranchOpSpec extends FunSpec
with api.Constants
with BeforeAndAfterAll
{
describe("getBranch") {
it("with valid repo and branch should succeed") {
val branchOp = Await.result(api.getBranch(user, userRepo, "master"), TIMEOUT)
assert(branchOp.isDefined)
assert(branchOp.get.name == "master")
}
it("with invalid branch should be None") {
val branchOp = Await.result(api.getBranch(user, userRepo, "unknown"), TIMEOUT)
assert(branchOp.isEmpty)
}
}
describe("listBranches") {
it("with valid repo should succeed") {
val list = Await.result(api.listBranches(user, userRepo), TIMEOUT)
assert(list.length > 0)
assert(list.exists(_.name == "master"))
}
}
}
| code-check/github-api-scala | src/test/scala/BranchOpSpec.scala | Scala | mit | 939 |
package scala.pickling.share.json
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, json._
class C(val name: String, val desc: String, var c: C, val arr: Array[Int])
class ShareJsonTest extends FunSuite {
import scala.pickling.internal.currentRuntime
val c1 = new C("c1", "desc", null, Array(1))
val c2 = new C("c2", "desc", c1, Array(1))
val c3 = new C("c3", "desc", c2, Array(1))
test("loop-share-nonprimitives") {
currentRuntime.picklers.clearRegisteredPicklerUnpicklerFor[C]
c1.c = c3
val pickle = c1.pickle
assert(pickle.toString === """
|JSONPickle({
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": {
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": {
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": { "$ref": 0 },
| "desc": "desc",
| "name": "c2"
| },
| "desc": "desc",
| "name": "c3"
| },
| "desc": "desc",
| "name": "c1"
|})
""".trim.stripMargin)
val c11 = pickle.unpickle[C]
val c13 = c11.c
val c12 = c13.c
assert(c11.name === "c1")
assert(c11.desc === "desc")
assert(c11.arr.toList === List(1))
assert(c12.name === "c2")
assert(c12.desc === "desc")
assert(c12.arr.toList === List(1))
assert(c13.name === "c3")
assert(c13.desc === "desc")
assert(c13.arr.toList === List(1))
assert(c12.c === c11)
}
test("loop-share-nothing") {
currentRuntime.picklers.clearRegisteredPicklerUnpicklerFor[C]
// Note we've been running out of memory on this test in jenkins,
// which is also a legitimate success case
try {
import shareNothing._
c1.c = c3
c2.pickle
fail("Expected a stack overflow or out of memory error")
} catch {
case x: StackOverflowError => ()
case x: OutOfMemoryError => ()
}
}
test("loop-share-everything") {
currentRuntime.picklers.clearRegisteredPicklerUnpicklerFor[C]
import shareEverything._
c1.c = c3
val pickle = c1.pickle
assert(pickle.toString === """
|JSONPickle({
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": {
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": {
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": { "$ref": 0 },
| "desc": "desc",
| "name": "c2"
| },
| "desc": "desc",
| "name": "c3"
| },
| "desc": "desc",
| "name": "c1"
|})
""".trim.stripMargin)
val c11 = pickle.unpickle[C]
val c13 = c11.c
val c12 = c13.c
assert(c11.name === "c1")
assert(c11.desc === "desc")
assert(c11.arr.toList === List(1))
assert(c12.name === "c2")
assert(c12.desc === "desc")
assert(c12.arr.toList === List(1))
assert(c13.name === "c3")
assert(c13.desc === "desc")
assert(c13.arr.toList === List(1))
assert(c12.c === c11)
}
test("noloop-share-non-primitives") {
currentRuntime.picklers.clearRegisteredPicklerUnpicklerFor[C]
import shareNothing._
c1.c = null
val pickle = c3.pickle
assert(pickle.toString === """
|JSONPickle({
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": {
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": {
| "$type": "scala.pickling.share.json.C",
| "arr": [
| 1
| ],
| "c": null,
| "desc": "desc",
| "name": "c1"
| },
| "desc": "desc",
| "name": "c2"
| },
| "desc": "desc",
| "name": "c3"
|})
""".trim.stripMargin)
val c23 = pickle.unpickle[C]
val c22 = c23.c
val c21 = c22.c
assert(c23.name === "c3")
assert(c23.desc === "desc")
assert(c23.arr.toList === List(1))
assert(c22.name === "c2")
assert(c22.desc === "desc")
assert(c22.arr.toList === List(1))
assert(c21.name === "c1")
assert(c21.desc === "desc")
assert(c21.arr.toList === List(1))
}
}
| scala/pickling | core/src/test/scala/scala/pickling/json/ShareJsonTest.scala | Scala | bsd-3-clause | 4,474 |
/**
* Copyright (c) 2014 Rafael Brandão <rafa.bra@gmail.com>
*
* This is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or (at
* your option) any later version.
*
* This software is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*/
package ecurrencies.integration.rabbitmq.internal
import ecurrencies.integration.rabbitmq.RabbitMQ
import akka.actor.{ActorContext, ActorRef, ExtendedActorSystem}
import com.rabbitmq.client.Channel
import com.thenewmotion.akka.rabbitmq.ConnectionActor
private[rabbitmq] class RabbitMQProvider(implicit val system: ExtendedActorSystem)
extends RabbitMQ {
lazy val settings = new RabbitMQSettings(system.settings.config)
lazy val connectionActor: ActorRef =
system.actorOf(ConnectionActor.props(settings.connection.buildFactory), "rabbitmq")
def configureChannel(channel: Channel, self: ActorRef) {
import settings.{exchange, queue, bindingKey, channel => channelSettings}
channel.exchangeDeclare(
exchange.name, exchange.`type`, exchange.durable, exchange.autoDelete, exchange.internal, null
)
channel.queueDeclare(queue.name, queue.durable, queue.exclusive, queue.autoDelete, null)
channel.queueBind(queue.name, exchange.name, bindingKey)
channel.basicQos(channelSettings.prefetchCount)
channel.basicConsume(queue.name, false, new ChannelConsumer(channel, self))
}
}
| rafael-brandao/ecurrencies | integration/rabbitmq/src/main/scala/ecurrencies/integration/rabbitmq/internal/RabbitMQProvider.scala | Scala | lgpl-3.0 | 1,855 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.